summaryrefslogtreecommitdiffstats
path: root/python
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 19:33:14 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 19:33:14 +0000
commit36d22d82aa202bb199967e9512281e9a53db42c9 (patch)
tree105e8c98ddea1c1e4784a60a5a6410fa416be2de /python
parentInitial commit. (diff)
downloadfirefox-esr-36d22d82aa202bb199967e9512281e9a53db42c9.tar.xz
firefox-esr-36d22d82aa202bb199967e9512281e9a53db42c9.zip
Adding upstream version 115.7.0esr.upstream/115.7.0esrupstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'python')
-rw-r--r--python/README16
-rw-r--r--python/docs/index.rst228
-rw-r--r--python/gdbpp/gdbpp/__init__.py31
-rw-r--r--python/gdbpp/gdbpp/enumset.py31
-rw-r--r--python/gdbpp/gdbpp/linkedlist.py48
-rw-r--r--python/gdbpp/gdbpp/owningthread.py26
-rw-r--r--python/gdbpp/gdbpp/smartptr.py60
-rw-r--r--python/gdbpp/gdbpp/string.py21
-rw-r--r--python/gdbpp/gdbpp/tarray.py29
-rw-r--r--python/gdbpp/gdbpp/thashtable.py152
-rw-r--r--python/l10n/fluent_migrations/__init__.py0
-rw-r--r--python/l10n/fluent_migrations/bug_1552333_aboutCertError.py40
-rw-r--r--python/l10n/fluent_migrations/bug_1635548_browser_context.py82
-rw-r--r--python/l10n/fluent_migrations/bug_1738056_aboutDialog_channel.py33
-rw-r--r--python/l10n/fluent_migrations/bug_1786186_mobile_aboutConfig.py65
-rw-r--r--python/l10n/fluent_migrations/bug_1793557_extensions.py912
-rw-r--r--python/l10n/fluent_migrations/bug_1793572_webrtc.py771
-rw-r--r--python/l10n/fluent_migrations/bug_1813077_popup_notification_learn_more.py22
-rw-r--r--python/l10n/fluent_migrations/bug_1814261_mixed_content_identity_panel.py49
-rw-r--r--python/l10n/fluent_migrations/bug_1814266_identity_custom_root.py34
-rw-r--r--python/l10n/fluent_migrations/bug_1818322_mozTabList.py26
-rw-r--r--python/l10n/fluent_migrations/bug_1820654_update_manual.py30
-rw-r--r--python/l10n/fluent_migrations/bug_1821187_migrationWizard_password_file_import_strings.py27
-rw-r--r--python/l10n/fluent_migrations/bug_1821779_migrationWizard_browser_names.py38
-rw-r--r--python/l10n/fluent_migrations/bug_1828443_pocket_policy.py30
-rw-r--r--python/l10n/fluent_migrations/bug_1828767_sanitize_dialog_native_size.py77
-rw-r--r--python/l10n/fluent_migrations/bug_1830042_places.py127
-rw-r--r--python/l10n/fluent_migrations/bug_1831851_accounts.py195
-rw-r--r--python/l10n/fluent_migrations/bug_1831872_sync.py31
-rw-r--r--python/l10n/fluent_migrations/bug_1832138_ctrlTab.py37
-rw-r--r--python/l10n/fluent_migrations/bug_1832141_recently_closed.py76
-rw-r--r--python/l10n/fluent_migrations/bug_1832179_sendTabToDevice.py37
-rw-r--r--python/l10n/fluent_migrations/bug_1832186_popupwarning.py139
-rw-r--r--python/l10n/fluent_migrations/bug_1832668_firefoxView_navigation.py27
-rw-r--r--python/l10n/fluent_migrations/bug_1833228_fxviewTabList.py44
-rw-r--r--python/l10n/fluent_migrations/bug_1835559_aboutDialog_explicit_textContent.py67
-rw-r--r--python/l10n/fluent_migrations/bug_1866295_new_device_migration_strings.py22
-rw-r--r--python/l10n/fluent_migrations/bug_1867346_new_device_migration_string_replacement.py22
-rw-r--r--python/l10n/mozxchannel/__init__.py150
-rw-r--r--python/l10n/mozxchannel/projectconfig.py77
-rw-r--r--python/l10n/mozxchannel/source.py88
-rw-r--r--python/l10n/test_fluent_migrations/__init__.py0
-rw-r--r--python/l10n/test_fluent_migrations/fmt.py188
-rw-r--r--python/lldbutils/.ruff.toml4
-rw-r--r--python/lldbutils/README.txt221
-rw-r--r--python/lldbutils/lldbutils/__init__.py17
-rw-r--r--python/lldbutils/lldbutils/content.py28
-rw-r--r--python/lldbutils/lldbutils/general.py165
-rw-r--r--python/lldbutils/lldbutils/gfx.py65
-rw-r--r--python/lldbutils/lldbutils/layout.py46
-rw-r--r--python/lldbutils/lldbutils/utils.py86
-rw-r--r--python/mach/.ruff.toml4
-rw-r--r--python/mach/README.rst13
-rw-r--r--python/mach/bash-completion.sh18
-rw-r--r--python/mach/docs/commands.rst129
-rw-r--r--python/mach/docs/driver.rst32
-rw-r--r--python/mach/docs/faq.rst152
-rw-r--r--python/mach/docs/index.rst89
-rw-r--r--python/mach/docs/logging.rst100
-rw-r--r--python/mach/docs/metrics.md55
-rw-r--r--python/mach/docs/settings.rst138
-rw-r--r--python/mach/docs/telemetry.rst37
-rw-r--r--python/mach/docs/usage.rst150
-rw-r--r--python/mach/docs/windows-usage-outside-mozillabuild.rst124
-rw-r--r--python/mach/mach/__init__.py0
-rw-r--r--python/mach/mach/base.py73
-rw-r--r--python/mach/mach/commands/__init__.py0
-rw-r--r--python/mach/mach/commands/commandinfo.py487
-rw-r--r--python/mach/mach/commands/completion_templates/bash.template62
-rw-r--r--python/mach/mach/commands/completion_templates/fish.template64
-rw-r--r--python/mach/mach/commands/completion_templates/zsh.template62
-rw-r--r--python/mach/mach/commands/settings.py51
-rw-r--r--python/mach/mach/config.py415
-rw-r--r--python/mach/mach/decorators.py340
-rw-r--r--python/mach/mach/dispatcher.py516
-rw-r--r--python/mach/mach/logging.py398
-rw-r--r--python/mach/mach/main.py735
-rw-r--r--python/mach/mach/mixin/__init__.py0
-rw-r--r--python/mach/mach/mixin/logging.py52
-rw-r--r--python/mach/mach/mixin/process.py217
-rw-r--r--python/mach/mach/python_lockfile.py79
-rw-r--r--python/mach/mach/registrar.py186
-rw-r--r--python/mach/mach/requirements.py183
-rw-r--r--python/mach/mach/sentry.py222
-rw-r--r--python/mach/mach/site.py1405
-rw-r--r--python/mach/mach/telemetry.py305
-rw-r--r--python/mach/mach/telemetry_interface.py77
-rw-r--r--python/mach/mach/terminal.py76
-rw-r--r--python/mach/mach/test/__init__.py0
-rw-r--r--python/mach/mach/test/conftest.py84
-rw-r--r--python/mach/mach/test/invoke_mach_command.py4
-rw-r--r--python/mach/mach/test/providers/__init__.py0
-rw-r--r--python/mach/mach/test/providers/basic.py15
-rw-r--r--python/mach/mach/test/providers/commands.py33
-rw-r--r--python/mach/mach/test/providers/conditions.py55
-rw-r--r--python/mach/mach/test/providers/conditions_invalid.py10
-rw-r--r--python/mach/mach/test/providers/throw.py18
-rw-r--r--python/mach/mach/test/providers/throw2.py15
-rw-r--r--python/mach/mach/test/python.ini22
-rw-r--r--python/mach/mach/test/script_site_activation.py67
-rw-r--r--python/mach/mach/test/test_commands.py79
-rw-r--r--python/mach/mach/test/test_conditions.py101
-rw-r--r--python/mach/mach/test/test_config.py292
-rw-r--r--python/mach/mach/test/test_decorators.py133
-rw-r--r--python/mach/mach/test/test_dispatcher.py60
-rw-r--r--python/mach/mach/test/test_entry_point.py59
-rw-r--r--python/mach/mach/test/test_error_output.py29
-rw-r--r--python/mach/mach/test/test_logger.py48
-rw-r--r--python/mach/mach/test/test_mach.py31
-rw-r--r--python/mach/mach/test/test_site.py56
-rw-r--r--python/mach/mach/test/test_site_activation.py463
-rw-r--r--python/mach/mach/test/test_site_compatibility.py189
-rw-r--r--python/mach/mach/test/zero_microseconds.py12
-rw-r--r--python/mach/mach/util.py110
-rw-r--r--python/mach/metrics.yaml206
-rw-r--r--python/mach/pings.yaml22
-rw-r--r--python/mach/setup.cfg2
-rw-r--r--python/mach/setup.py42
-rw-r--r--python/mach_commands.py366
-rw-r--r--python/moz.build78
-rw-r--r--python/mozboot/.ruff.toml4
-rw-r--r--python/mozboot/README.rst20
-rwxr-xr-xpython/mozboot/bin/bootstrap.py439
-rw-r--r--python/mozboot/mozboot/__init__.py0
-rw-r--r--python/mozboot/mozboot/android-avds/arm.json27
-rw-r--r--python/mozboot/mozboot/android-avds/arm64.json27
-rw-r--r--python/mozboot/mozboot/android-avds/x86_64.json26
-rw-r--r--python/mozboot/mozboot/android-emulator-packages.txt2
-rw-r--r--python/mozboot/mozboot/android-packages.txt4
-rw-r--r--python/mozboot/mozboot/android-system-images-packages.txt1
-rw-r--r--python/mozboot/mozboot/android.py886
-rw-r--r--python/mozboot/mozboot/archlinux.py33
-rw-r--r--python/mozboot/mozboot/base.py733
-rw-r--r--python/mozboot/mozboot/bootstrap.py776
-rw-r--r--python/mozboot/mozboot/centosfedora.py80
-rw-r--r--python/mozboot/mozboot/debian.py83
-rw-r--r--python/mozboot/mozboot/freebsd.py70
-rw-r--r--python/mozboot/mozboot/gentoo.py29
-rw-r--r--python/mozboot/mozboot/linux_common.py93
-rw-r--r--python/mozboot/mozboot/mach_commands.py119
-rw-r--r--python/mozboot/mozboot/mozconfig.py156
-rw-r--r--python/mozboot/mozboot/mozillabuild.py235
-rw-r--r--python/mozboot/mozboot/openbsd.py34
-rw-r--r--python/mozboot/mozboot/opensuse.py63
-rw-r--r--python/mozboot/mozboot/osx.py310
-rw-r--r--python/mozboot/mozboot/rust.py185
-rw-r--r--python/mozboot/mozboot/sccache.py9
-rw-r--r--python/mozboot/mozboot/solus.py32
-rw-r--r--python/mozboot/mozboot/test/python.ini4
-rw-r--r--python/mozboot/mozboot/test/test_mozconfig.py229
-rw-r--r--python/mozboot/mozboot/util.py49
-rw-r--r--python/mozboot/mozboot/void.py41
-rw-r--r--python/mozboot/mozboot/windows.py127
-rw-r--r--python/mozboot/setup.py16
-rw-r--r--python/mozbuild/.ruff.toml9
-rw-r--r--python/mozbuild/metrics.yaml140
-rw-r--r--python/mozbuild/mozbuild/__init__.py0
-rw-r--r--python/mozbuild/mozbuild/action/__init__.py0
-rw-r--r--python/mozbuild/mozbuild/action/buildlist.py49
-rw-r--r--python/mozbuild/mozbuild/action/check_binary.py343
-rw-r--r--python/mozbuild/mozbuild/action/download_wpt_manifest.py21
-rw-r--r--python/mozbuild/mozbuild/action/dump_env.py30
-rw-r--r--python/mozbuild/mozbuild/action/dumpsymbols.py109
-rw-r--r--python/mozbuild/mozbuild/action/exe_7z_archive.py89
-rw-r--r--python/mozbuild/mozbuild/action/fat_aar.py185
-rw-r--r--python/mozbuild/mozbuild/action/file_generate.py155
-rw-r--r--python/mozbuild/mozbuild/action/file_generate_wrapper.py38
-rw-r--r--python/mozbuild/mozbuild/action/generate_symbols_file.py95
-rw-r--r--python/mozbuild/mozbuild/action/html_fragment_preprocesor.py101
-rw-r--r--python/mozbuild/mozbuild/action/install.py22
-rw-r--r--python/mozbuild/mozbuild/action/jar_maker.py16
-rw-r--r--python/mozbuild/mozbuild/action/l10n_merge.py42
-rw-r--r--python/mozbuild/mozbuild/action/langpack_localeNames.json426
-rw-r--r--python/mozbuild/mozbuild/action/langpack_manifest.py587
-rw-r--r--python/mozbuild/mozbuild/action/make_dmg.py67
-rw-r--r--python/mozbuild/mozbuild/action/make_unzip.py25
-rw-r--r--python/mozbuild/mozbuild/action/node.py137
-rw-r--r--python/mozbuild/mozbuild/action/package_generated_sources.py42
-rw-r--r--python/mozbuild/mozbuild/action/preprocessor.py24
-rw-r--r--python/mozbuild/mozbuild/action/process_define_files.py115
-rw-r--r--python/mozbuild/mozbuild/action/process_install_manifest.py125
-rw-r--r--python/mozbuild/mozbuild/action/symbols_archive.py89
-rw-r--r--python/mozbuild/mozbuild/action/test_archive.py875
-rwxr-xr-xpython/mozbuild/mozbuild/action/tooltool.py1714
-rw-r--r--python/mozbuild/mozbuild/action/unify_symbols.py49
-rw-r--r--python/mozbuild/mozbuild/action/unify_tests.py65
-rw-r--r--python/mozbuild/mozbuild/action/unpack_dmg.py52
-rw-r--r--python/mozbuild/mozbuild/action/util.py24
-rw-r--r--python/mozbuild/mozbuild/action/webidl.py19
-rw-r--r--python/mozbuild/mozbuild/action/wrap_rustc.py79
-rw-r--r--python/mozbuild/mozbuild/action/xpccheck.py109
-rwxr-xr-xpython/mozbuild/mozbuild/action/xpidl-process.py153
-rw-r--r--python/mozbuild/mozbuild/action/zip.py52
-rw-r--r--python/mozbuild/mozbuild/analyze/__init__.py0
-rw-r--r--python/mozbuild/mozbuild/analyze/hg.py176
-rw-r--r--python/mozbuild/mozbuild/android_version_code.py197
-rw-r--r--python/mozbuild/mozbuild/artifact_builds.py27
-rw-r--r--python/mozbuild/mozbuild/artifact_cache.py251
-rw-r--r--python/mozbuild/mozbuild/artifact_commands.py615
-rw-r--r--python/mozbuild/mozbuild/artifacts.py1661
-rw-r--r--python/mozbuild/mozbuild/backend/__init__.py27
-rw-r--r--python/mozbuild/mozbuild/backend/base.py389
-rw-r--r--python/mozbuild/mozbuild/backend/cargo_build_defs.py87
-rw-r--r--python/mozbuild/mozbuild/backend/clangd.py126
-rw-r--r--python/mozbuild/mozbuild/backend/common.py603
-rw-r--r--python/mozbuild/mozbuild/backend/configenvironment.py357
-rw-r--r--python/mozbuild/mozbuild/backend/cpp_eclipse.py876
-rw-r--r--python/mozbuild/mozbuild/backend/fastermake.py300
-rw-r--r--python/mozbuild/mozbuild/backend/mach_commands.py420
-rw-r--r--python/mozbuild/mozbuild/backend/make.py139
-rw-r--r--python/mozbuild/mozbuild/backend/recursivemake.py1904
-rw-r--r--python/mozbuild/mozbuild/backend/static_analysis.py52
-rw-r--r--python/mozbuild/mozbuild/backend/test_manifest.py110
-rw-r--r--python/mozbuild/mozbuild/backend/visualstudio.py712
-rw-r--r--python/mozbuild/mozbuild/base.py1110
-rw-r--r--python/mozbuild/mozbuild/bootstrap.py61
-rw-r--r--python/mozbuild/mozbuild/build_commands.py366
-rw-r--r--python/mozbuild/mozbuild/chunkify.py56
-rw-r--r--python/mozbuild/mozbuild/code_analysis/__init__.py0
-rw-r--r--python/mozbuild/mozbuild/code_analysis/mach_commands.py1976
-rw-r--r--python/mozbuild/mozbuild/code_analysis/moz.build8
-rw-r--r--python/mozbuild/mozbuild/code_analysis/utils.py138
-rw-r--r--python/mozbuild/mozbuild/codecoverage/__init__.py0
-rw-r--r--python/mozbuild/mozbuild/codecoverage/chrome_map.py175
-rw-r--r--python/mozbuild/mozbuild/codecoverage/lcov_rewriter.py777
-rw-r--r--python/mozbuild/mozbuild/codecoverage/manifest_handler.py52
-rw-r--r--python/mozbuild/mozbuild/codecoverage/packager.py71
-rw-r--r--python/mozbuild/mozbuild/compilation/__init__.py0
-rw-r--r--python/mozbuild/mozbuild/compilation/codecomplete.py55
-rw-r--r--python/mozbuild/mozbuild/compilation/database.py244
-rw-r--r--python/mozbuild/mozbuild/compilation/util.py64
-rw-r--r--python/mozbuild/mozbuild/compilation/warnings.py392
-rw-r--r--python/mozbuild/mozbuild/config_status.py184
-rw-r--r--python/mozbuild/mozbuild/configure/__init__.py1311
-rw-r--r--python/mozbuild/mozbuild/configure/check_debug_ranges.py68
-rw-r--r--python/mozbuild/mozbuild/configure/constants.py131
-rw-r--r--python/mozbuild/mozbuild/configure/help.py90
-rw-r--r--python/mozbuild/mozbuild/configure/lint.py348
-rw-r--r--python/mozbuild/mozbuild/configure/options.py614
-rw-r--r--python/mozbuild/mozbuild/configure/util.py235
-rw-r--r--python/mozbuild/mozbuild/controller/__init__.py0
-rw-r--r--python/mozbuild/mozbuild/controller/building.py1872
-rw-r--r--python/mozbuild/mozbuild/controller/clobber.py249
-rw-r--r--python/mozbuild/mozbuild/doctor.py605
-rw-r--r--python/mozbuild/mozbuild/dotproperties.py86
-rw-r--r--python/mozbuild/mozbuild/faster_daemon.py328
-rw-r--r--python/mozbuild/mozbuild/frontend/__init__.py0
-rw-r--r--python/mozbuild/mozbuild/frontend/context.py3144
-rw-r--r--python/mozbuild/mozbuild/frontend/data.py1369
-rw-r--r--python/mozbuild/mozbuild/frontend/emitter.py1892
-rw-r--r--python/mozbuild/mozbuild/frontend/gyp_reader.py497
-rw-r--r--python/mozbuild/mozbuild/frontend/mach_commands.py338
-rw-r--r--python/mozbuild/mozbuild/frontend/reader.py1432
-rw-r--r--python/mozbuild/mozbuild/frontend/sandbox.py313
-rw-r--r--python/mozbuild/mozbuild/gen_test_backend.py53
-rw-r--r--python/mozbuild/mozbuild/generated_sources.py75
-rw-r--r--python/mozbuild/mozbuild/gn_processor.py788
-rw-r--r--python/mozbuild/mozbuild/html_build_viewer.py118
-rw-r--r--python/mozbuild/mozbuild/jar.py648
-rw-r--r--python/mozbuild/mozbuild/mach_commands.py2941
-rw-r--r--python/mozbuild/mozbuild/makeutil.py209
-rw-r--r--python/mozbuild/mozbuild/mozconfig.py403
-rwxr-xr-xpython/mozbuild/mozbuild/mozconfig_loader48
-rw-r--r--python/mozbuild/mozbuild/mozinfo.py163
-rw-r--r--python/mozbuild/mozbuild/nodeutil.py126
-rw-r--r--python/mozbuild/mozbuild/preprocessor.py938
-rw-r--r--python/mozbuild/mozbuild/pythonutil.py23
-rw-r--r--python/mozbuild/mozbuild/repackaging/__init__.py0
-rw-r--r--python/mozbuild/mozbuild/repackaging/application_ini.py66
-rw-r--r--python/mozbuild/mozbuild/repackaging/deb.py694
-rw-r--r--python/mozbuild/mozbuild/repackaging/dmg.py56
-rw-r--r--python/mozbuild/mozbuild/repackaging/installer.py55
-rw-r--r--python/mozbuild/mozbuild/repackaging/mar.py93
-rw-r--r--python/mozbuild/mozbuild/repackaging/msi.py122
-rw-r--r--python/mozbuild/mozbuild/repackaging/msix.py1193
-rw-r--r--python/mozbuild/mozbuild/repackaging/pkg.py46
-rw-r--r--python/mozbuild/mozbuild/repackaging/test/python.ini4
-rw-r--r--python/mozbuild/mozbuild/repackaging/test/test_msix.py53
-rw-r--r--python/mozbuild/mozbuild/resources/html-build-viewer/build_resources.html694
-rw-r--r--python/mozbuild/mozbuild/schedules.py77
-rw-r--r--python/mozbuild/mozbuild/settings.py30
-rw-r--r--python/mozbuild/mozbuild/shellutil.py210
-rw-r--r--python/mozbuild/mozbuild/sphinx.py293
-rw-r--r--python/mozbuild/mozbuild/telemetry.py264
-rw-r--r--python/mozbuild/mozbuild/test/__init__.py0
-rw-r--r--python/mozbuild/mozbuild/test/action/data/html_fragment_preprocesor/example_basic.xml10
-rw-r--r--python/mozbuild/mozbuild/test/action/data/html_fragment_preprocesor/example_multiple_templates.xml30
-rw-r--r--python/mozbuild/mozbuild/test/action/data/html_fragment_preprocesor/example_xul.xml14
-rw-r--r--python/mozbuild/mozbuild/test/action/data/invalid/region.properties12
-rw-r--r--python/mozbuild/mozbuild/test/action/data/node/node-test-script.js11
-rw-r--r--python/mozbuild/mozbuild/test/action/test_buildlist.py96
-rw-r--r--python/mozbuild/mozbuild/test/action/test_html_fragment_preprocessor.py196
-rw-r--r--python/mozbuild/mozbuild/test/action/test_langpack_manifest.py269
-rw-r--r--python/mozbuild/mozbuild/test/action/test_node.py80
-rw-r--r--python/mozbuild/mozbuild/test/action/test_process_install_manifest.py65
-rw-r--r--python/mozbuild/mozbuild/test/backend/__init__.py0
-rw-r--r--python/mozbuild/mozbuild/test/backend/common.py253
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/build/app/moz.build54
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/build/bar.ini1
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/build/bar.js2
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/build/bar.jsm1
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/build/baz.ini2
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/build/baz.jsm2
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/build/components.manifest2
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/build/foo.css2
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/build/foo.ini1
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/build/foo.js1
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/build/foo.jsm1
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/build/jar.mn11
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/build/moz.build68
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/build/prefs.js1
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/build/qux.ini5
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/build/qux.jsm5
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/build/resource1
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/build/resource21
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/build/subdir/bar.js1
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/database/bar.c0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/database/baz.cpp0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/database/build/non-unified-compat0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/database/foo.c0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/database/moz.build14
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/database/qux.cpp0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/defines/moz.build9
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/dist-files/install.rdf0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/dist-files/main.js0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/dist-files/moz.build8
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/exports-generated/dom1.h0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/exports-generated/foo.h0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/exports-generated/gfx.h0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/exports-generated/moz.build12
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/exports-generated/mozilla1.h0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/exports/dom1.h0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/exports/dom2.h0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/exports/foo.h0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/exports/gfx.h0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/exports/moz.build8
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/exports/mozilla1.h0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/exports/mozilla2.h0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/exports/pprio.h0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/final-target-files-wildcard/bar.xyz0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/final-target-files-wildcard/foo.xyz0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/final-target-files-wildcard/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/final_target/both/moz.build6
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/final_target/dist-subdir/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/final_target/final-target/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/final_target/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/final_target/xpi-name/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/generated-files-force/foo-data0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/generated-files-force/generate-bar.py0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/generated-files-force/generate-foo.py0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/generated-files-force/moz.build14
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/generated-files/foo-data0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/generated-files/generate-bar.py0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/generated-files/generate-foo.py0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/generated-files/moz.build12
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/generated_includes/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/host-defines/moz.build9
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/host-rust-library-features/Cargo.toml13
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/host-rust-library-features/moz.build22
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/host-rust-library/Cargo.toml15
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/host-rust-library/moz.build22
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/install_substitute_config_files/moz.build6
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/install_substitute_config_files/sub/foo.h.in1
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/install_substitute_config_files/sub/moz.build7
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/ipdl_sources/bar/moz.build16
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/ipdl_sources/foo/moz.build16
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/ipdl_sources/ipdl/moz.build9
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/ipdl_sources/moz.build19
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/jar-manifests/moz.build7
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/linkage/moz.build11
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/linkage/prog/moz.build11
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/linkage/prog/qux/moz.build6
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/linkage/prog/qux/qux1.c0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/linkage/real/foo/foo1.c0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/linkage/real/foo/foo2.c0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/linkage/real/foo/moz.build6
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/linkage/real/moz.build14
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/linkage/shared/baz/baz1.c0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/linkage/shared/baz/moz.build6
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/linkage/shared/moz.build14
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/linkage/static/bar/bar1.cc0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/linkage/static/bar/bar2.cc0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/linkage/static/bar/bar_helper/bar_helper1.cpp0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/linkage/static/bar/bar_helper/moz.build8
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/linkage/static/bar/moz.build13
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/linkage/static/moz.build12
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/linkage/templates.mozbuild23
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/local_includes/bar/baz/dummy_file_for_nonempty_directory0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/local_includes/foo/dummy_file_for_nonempty_directory0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/local_includes/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/localized-files/en-US/bar.ini0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/localized-files/en-US/foo.js0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/localized-files/moz.build9
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/en-US/localized-input0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/foo-data0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/generate-foo.py0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/inner/locales/en-US/localized-input0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/locales/en-US/localized-input0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/moz.build32
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/non-localized-input0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/localized-generated-files-force/en-US/localized-input0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/localized-generated-files-force/foo-data0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/localized-generated-files-force/generate-foo.py0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/localized-generated-files-force/moz.build22
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/localized-generated-files-force/non-localized-input0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/localized-generated-files/en-US/localized-input0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/localized-generated-files/foo-data0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/localized-generated-files/generate-foo.py0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/localized-generated-files/moz.build15
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/localized-generated-files/non-localized-input0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/localized-pp-files/en-US/bar.ini0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/localized-pp-files/en-US/foo.js0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/localized-pp-files/moz.build8
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/c-library/c-library.c2
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/c-library/moz.build7
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/c-program/c_test_program.c2
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/c-program/moz.build7
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/c-simple-programs/c_simple_program.c2
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/c-simple-programs/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-library/c-source.c2
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-library/cxx-library.cpp2
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-library/moz.build10
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-program/cxx_test_program.cpp2
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-program/moz.build7
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-simple-programs/cxx_simple_program.cpp2
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-simple-programs/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/moz.build35
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/simple-programs/moz.build3
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/program-paths/dist-bin/moz.build4
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/program-paths/dist-subdir/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/program-paths/final-target/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/program-paths/moz.build15
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/program-paths/not-installed/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/resources/bar.res.in0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/resources/cursor.cur0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/resources/desktop1.ttf0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/resources/desktop2.ttf0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/resources/extra.manifest0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/resources/font1.ttf0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/resources/font2.ttf0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/resources/foo.res0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/resources/mobile.ttf0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/resources/moz.build9
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/resources/test.manifest0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/rust-library-features/Cargo.toml15
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/rust-library-features/moz.build20
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/rust-library/Cargo.toml15
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/rust-library/moz.build19
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/rust-programs/code/Cargo.toml10
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/rust-programs/code/moz.build6
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/rust-programs/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/sources/bar.cpp0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/sources/bar.s0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/sources/baz.c0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/sources/foo.asm0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/sources/foo.cpp0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/sources/fuga.mm0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/sources/hoge.mm0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/sources/moz.build26
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/sources/qux.c0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/sources/titi.S0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/sources/toto.S0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/stub0/Makefile.in4
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/stub0/dir1/Makefile.in7
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/stub0/dir1/moz.build3
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/stub0/dir2/moz.build3
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/stub0/dir3/Makefile.in7
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/stub0/dir3/moz.build3
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/stub0/moz.build7
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/substitute_config_files/Makefile.in0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/substitute_config_files/foo.in1
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/substitute_config_files/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/another-file.sjs0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/browser.ini6
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/data/one.txt0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/data/two.txt0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/test_sub.js0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/mochitest.ini8
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/support-file.txt0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/test_foo.js0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifests-backend-sources/mochitest-common.ini1
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifests-backend-sources/mochitest.ini2
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifests-backend-sources/moz.build6
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifests-backend-sources/test_bar.js0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifests-backend-sources/test_foo.js0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/mochitest1.ini4
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/mochitest2.ini4
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/moz.build7
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/test_bar.js0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/test_foo.js0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/instrumentation.ini1
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/mochitest.ini1
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/mochitest.js0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/moz.build10
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/not_packaged.java0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifests-written/dir1/test_bar.js0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifests-written/dir1/xpcshell.ini3
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifests-written/mochitest.ini3
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifests-written/mochitest.js0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifests-written/moz.build9
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifests-written/xpcshell.ini4
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-manifests-written/xpcshell.js0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-support-binaries-tracked/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-support-binaries-tracked/src/moz.build12
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-support-binaries-tracked/test/moz.build32
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-support-binaries-tracked/test/test-one.cpp0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test-support-binaries-tracked/test/test-two.cpp0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test_config/file.in3
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/test_config/moz.build3
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/variable_passthru/Makefile.in0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/variable_passthru/baz.def0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/variable_passthru/moz.build11
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/variable_passthru/test1.c0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/variable_passthru/test1.cpp0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/variable_passthru/test1.mm0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/variable_passthru/test2.c0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/variable_passthru/test2.cpp0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/variable_passthru/test2.mm0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/visual-studio/dir1/bar.cpp0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/visual-studio/dir1/foo.cpp0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/visual-studio/dir1/moz.build9
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/visual-studio/moz.build7
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/xpidl/bar.idl0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/xpidl/config/makefiles/xpidl/Makefile.in0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/xpidl/foo.idl0
-rw-r--r--python/mozbuild/mozbuild/test/backend/data/xpidl/moz.build6
-rw-r--r--python/mozbuild/mozbuild/test/backend/test_build.py265
-rw-r--r--python/mozbuild/mozbuild/test/backend/test_configenvironment.py73
-rw-r--r--python/mozbuild/mozbuild/test/backend/test_database.py91
-rw-r--r--python/mozbuild/mozbuild/test/backend/test_fastermake.py42
-rw-r--r--python/mozbuild/mozbuild/test/backend/test_partialconfigenvironment.py173
-rw-r--r--python/mozbuild/mozbuild/test/backend/test_recursivemake.py1307
-rw-r--r--python/mozbuild/mozbuild/test/backend/test_test_manifest.py94
-rw-r--r--python/mozbuild/mozbuild/test/backend/test_visualstudio.py63
-rw-r--r--python/mozbuild/mozbuild/test/code_analysis/test_mach_commands.py90
-rw-r--r--python/mozbuild/mozbuild/test/codecoverage/sample_lcov.info1895
-rw-r--r--python/mozbuild/mozbuild/test/codecoverage/test_lcov_rewrite.py444
-rw-r--r--python/mozbuild/mozbuild/test/common.py69
-rw-r--r--python/mozbuild/mozbuild/test/compilation/__init__.py0
-rw-r--r--python/mozbuild/mozbuild/test/compilation/test_warnings.py240
-rw-r--r--python/mozbuild/mozbuild/test/configure/common.py307
-rw-r--r--python/mozbuild/mozbuild/test/configure/data/decorators.configure53
-rw-r--r--python/mozbuild/mozbuild/test/configure/data/empty_mozconfig0
-rw-r--r--python/mozbuild/mozbuild/test/configure/data/extra.configure15
-rw-r--r--python/mozbuild/mozbuild/test/configure/data/imply_option/imm.configure37
-rw-r--r--python/mozbuild/mozbuild/test/configure/data/imply_option/infer.configure28
-rw-r--r--python/mozbuild/mozbuild/test/configure/data/imply_option/infer_ko.configure36
-rw-r--r--python/mozbuild/mozbuild/test/configure/data/imply_option/negative.configure40
-rw-r--r--python/mozbuild/mozbuild/test/configure/data/imply_option/simple.configure28
-rw-r--r--python/mozbuild/mozbuild/test/configure/data/imply_option/values.configure28
-rw-r--r--python/mozbuild/mozbuild/test/configure/data/included.configure68
-rw-r--r--python/mozbuild/mozbuild/test/configure/data/moz.configure205
-rw-r--r--python/mozbuild/mozbuild/test/configure/data/set_config.configure51
-rw-r--r--python/mozbuild/mozbuild/test/configure/data/set_define.configure51
-rw-r--r--python/mozbuild/mozbuild/test/configure/data/subprocess.configure24
-rw-r--r--python/mozbuild/mozbuild/test/configure/lint.py62
-rw-r--r--python/mozbuild/mozbuild/test/configure/macos_fake_sdk/SDKSettings.plist8
-rw-r--r--python/mozbuild/mozbuild/test/configure/test_bootstrap.py43
-rw-r--r--python/mozbuild/mozbuild/test/configure/test_checks_configure.py1169
-rw-r--r--python/mozbuild/mozbuild/test/configure/test_compile_checks.py599
-rw-r--r--python/mozbuild/mozbuild/test/configure/test_configure.py1986
-rw-r--r--python/mozbuild/mozbuild/test/configure/test_lint.py487
-rw-r--r--python/mozbuild/mozbuild/test/configure/test_moz_configure.py185
-rw-r--r--python/mozbuild/mozbuild/test/configure/test_options.py905
-rw-r--r--python/mozbuild/mozbuild/test/configure/test_toolchain_configure.py2056
-rw-r--r--python/mozbuild/mozbuild/test/configure/test_toolchain_helpers.py433
-rw-r--r--python/mozbuild/mozbuild/test/configure/test_toolkit_moz_configure.py102
-rw-r--r--python/mozbuild/mozbuild/test/configure/test_util.py539
-rw-r--r--python/mozbuild/mozbuild/test/controller/__init__.py0
-rw-r--r--python/mozbuild/mozbuild/test/controller/test_ccachestats.py866
-rw-r--r--python/mozbuild/mozbuild/test/controller/test_clobber.py214
-rw-r--r--python/mozbuild/mozbuild/test/data/Makefile0
-rw-r--r--python/mozbuild/mozbuild/test/data/bad.properties12
-rw-r--r--python/mozbuild/mozbuild/test/data/test-dir/Makefile0
-rw-r--r--python/mozbuild/mozbuild/test/data/test-dir/with/Makefile0
-rw-r--r--python/mozbuild/mozbuild/test/data/test-dir/with/without/with/Makefile0
-rw-r--r--python/mozbuild/mozbuild/test/data/test-dir/without/with/Makefile0
-rw-r--r--python/mozbuild/mozbuild/test/data/valid.properties11
-rw-r--r--python/mozbuild/mozbuild/test/frontend/__init__.py0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/allow-compiler-warnings/moz.build20
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/allow-compiler-warnings/test1.c0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/asflags/moz.build15
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/asflags/test1.c0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/asflags/test2.S0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/branding-files/bar.ico0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/branding-files/baz.png0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/branding-files/foo.xpm0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/branding-files/moz.build12
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/branding-files/quux.icns0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/compile-defines/moz.build16
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/compile-defines/test1.c0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/compile-flags-field-validation/moz.build15
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/compile-flags-field-validation/test1.c0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/compile-flags-templates/moz.build27
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/compile-flags-templates/test1.c0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/compile-flags-type-validation/moz.build15
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/compile-flags-type-validation/test1.c0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/compile-flags/moz.build22
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/compile-flags/test1.c0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/compile-includes/moz.build15
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/compile-includes/subdir/header.h0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/compile-includes/test1.c0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/config-file-substitution/moz.build6
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/Cargo.toml18
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/moz.build19
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/shallow/Cargo.toml6
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/the/depths/Cargo.toml9
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/defines/moz.build9
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/disable-compiler-warnings/moz.build20
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/disable-compiler-warnings/test1.c0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/disable-stl-wrapping/moz.build21
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/disable-stl-wrapping/test1.c0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/dist-files-missing/install.rdf0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/dist-files-missing/moz.build8
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/dist-files/install.rdf0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/dist-files/main.js0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/dist-files/moz.build8
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/exports-generated/foo.h0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/exports-generated/moz.build8
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/exports-generated/mozilla1.h0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/exports-missing-generated/foo.h0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/exports-missing-generated/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/exports-missing/foo.h0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/exports-missing/moz.build6
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/exports-missing/mozilla1.h0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/exports/bar.h0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/exports/baz.h0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/exports/dom1.h0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/exports/dom2.h0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/exports/dom3.h0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/exports/foo.h0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/exports/gfx.h0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/exports/mem.h0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/exports/mem2.h0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/exports/moz.build13
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/exports/mozilla1.h0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/exports/mozilla2.h0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/exports/pprio.h0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/exports/pprthred.h0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/bad-assignment/moz.build2
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/different-matchers/moz.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/final/moz.build3
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/final/subcomponent/moz.build2
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/moz.build2
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/simple/moz.build2
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/static/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/files-info/moz.build0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/final-target-pp-files-non-srcdir/moz.build7
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/generated-files-absolute-script/moz.build9
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/generated-files-absolute-script/script.py0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/generated-files-force/moz.build11
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/generated-files-method-names/moz.build13
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/generated-files-method-names/script.py0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/generated-files-no-inputs/moz.build9
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/generated-files-no-inputs/script.py0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/generated-files-no-python-script/moz.build8
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/generated-files-no-python-script/script.rb0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/generated-files-no-script/moz.build8
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/generated-files/moz.build9
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/generated-sources/a.cpp0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/generated-sources/b.cc0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/generated-sources/c.cxx0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/generated-sources/d.c0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/generated-sources/e.m0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/generated-sources/f.mm0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/generated-sources/g.S0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/generated-sources/h.s0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/generated-sources/i.asm0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/generated-sources/moz.build39
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/generated_includes/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/host-compile-flags/moz.build22
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/host-compile-flags/test1.c0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/host-program-paths/final-target/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/host-program-paths/installed/moz.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/host-program-paths/moz.build14
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/host-program-paths/not-installed/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/host-rust-libraries/Cargo.toml15
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/host-rust-libraries/moz.build22
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/host-rust-program-no-cargo-toml/moz.build1
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/host-rust-program-nonexistent-name/Cargo.toml7
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/host-rust-program-nonexistent-name/moz.build1
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/host-rust-programs/Cargo.toml7
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/host-rust-programs/moz.build1
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/host-sources/a.cpp0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/host-sources/b.cc0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/host-sources/c.cxx0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/host-sources/d.c0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/host-sources/e.mm0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/host-sources/f.mm0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/host-sources/moz.build27
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/include-basic/included.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/include-basic/moz.build7
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/include-file-stack/included-1.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/include-file-stack/included-2.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/include-file-stack/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/include-missing/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/include-outside-topsrcdir/relative.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/child/child.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/child/child2.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/child/grandchild/grandchild.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/parent.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/include-topsrcdir-relative/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/include-topsrcdir-relative/sibling.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/bar/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/foo/baz/moz.build7
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/foo/moz.build7
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/moz.build10
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/ipdl_sources/bar/moz.build14
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/ipdl_sources/foo/moz.build14
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/ipdl_sources/moz.build10
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/jar-manifests-multiple-files/moz.build7
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/jar-manifests/moz.build7
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/library-defines/liba/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/library-defines/libb/moz.build7
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/library-defines/libc/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/library-defines/libd/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/library-defines/moz.build11
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/link-flags/moz.build16
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/link-flags/test1.c0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/local_includes-filename/foo.h0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/local_includes-filename/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/local_includes-invalid/objdir/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/local_includes-invalid/srcdir/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/local_includes/bar/baz/dummy_file_for_nonempty_directory0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/local_includes/foo/dummy_file_for_nonempty_directory0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/local_includes/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/localized-files-from-generated/moz.build6
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/localized-files-no-en-us/en-US/bar.ini0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/localized-files-no-en-us/foo.js0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/localized-files-no-en-us/inner/locales/en-US/bar.ini0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/localized-files-no-en-us/moz.build9
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/localized-files-not-localized-generated/moz.build6
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/localized-files/en-US/bar.ini0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/localized-files/en-US/foo.js0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/localized-files/moz.build9
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/localized-generated-files-final-target-files/moz.build6
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/localized-generated-files-force/moz.build6
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/localized-generated-files/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/localized-pp-files/en-US/bar.ini0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/localized-pp-files/en-US/foo.js0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/localized-pp-files/moz.build8
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/missing-local-includes/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/missing-xpidl/moz.build6
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/moz.build29
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust1/Cargo.toml15
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust1/moz.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust2/Cargo.toml15
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust2/moz.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/object-conflicts/1/Test.c0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/object-conflicts/1/Test.cpp0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/object-conflicts/1/moz.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/object-conflicts/2/Test.cpp0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/object-conflicts/2/moz.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/object-conflicts/2/subdir/Test.cpp0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/object-conflicts/3/Test.c0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/object-conflicts/3/Test.cpp0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/object-conflicts/3/moz.build7
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/object-conflicts/4/Test.c0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/object-conflicts/4/Test.cpp0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/object-conflicts/4/moz.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/program-paths/dist-bin/moz.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/program-paths/dist-subdir/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/program-paths/final-target/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/program-paths/moz.build15
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/program-paths/not-installed/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/program/moz.build18
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/program/test_program1.cpp0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/program/test_program2.cpp0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-error-bad-dir/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-error-basic/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-error-empty-list/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-error-error-func/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-error-included-from/child.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-error-included-from/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-error-missing-include/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-error-outside-topsrcdir/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-error-read-unknown-global/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-error-repeated-dir/moz.build7
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-error-script-error/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-error-syntax/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-error-write-bad-value/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-error-write-unknown-global/moz.build7
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/a/file0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/a/moz.build0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/b/file0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/b/moz.build0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/moz.build0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/file10
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/file20
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/moz.build0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/no-intermediate-moz-build/child/file0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/no-intermediate-moz-build/child/moz.build0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/parent-is-far/dir1/dir2/dir3/file0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/parent-is-far/moz.build0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/dir1/file0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/dir1/moz.build0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/dir2/file0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/dir2/moz.build0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/moz.build0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/file0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/moz.build0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/resolved-flags-error/moz.build17
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/resolved-flags-error/test1.c0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/rust-library-dash-folding/Cargo.toml15
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/rust-library-dash-folding/moz.build19
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/rust-library-duplicate-features/Cargo.toml15
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/rust-library-duplicate-features/moz.build20
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/rust-library-features/Cargo.toml15
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/rust-library-features/moz.build20
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/rust-library-invalid-crate-type/Cargo.toml15
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/rust-library-invalid-crate-type/moz.build19
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/rust-library-name-mismatch/Cargo.toml12
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/rust-library-name-mismatch/moz.build19
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/rust-library-no-cargo-toml/moz.build19
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/rust-library-no-lib-section/Cargo.toml12
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/rust-library-no-lib-section/moz.build19
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/rust-program-no-cargo-toml/moz.build1
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/rust-program-nonexistent-name/Cargo.toml7
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/rust-program-nonexistent-name/moz.build1
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/rust-programs/Cargo.toml7
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/rust-programs/moz.build1
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/schedules/moz.build19
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/schedules/subd/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/sources-just-c/d.c0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/sources-just-c/e.m0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/sources-just-c/g.S0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/sources-just-c/h.s0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/sources-just-c/i.asm0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/sources-just-c/moz.build29
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/sources/a.cpp0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/sources/b.cc0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/sources/c.cxx0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/sources/d.c0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/sources/e.m0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/sources/f.mm0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/sources/g.S0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/sources/h.s0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/sources/i.asm0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/sources/moz.build39
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/templates/templates.mozbuild21
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-harness-files-root/moz.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-harness-files/mochitest.ini1
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-harness-files/mochitest.py1
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-harness-files/moz.build7
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-harness-files/runtests.py1
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-harness-files/utils.py1
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-install-shared-lib/moz.build16
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/moz.build14
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/one/foo.cpp0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/one/moz.build11
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/three/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/two/foo.c0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/two/moz.build11
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/absolute-support.ini4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/foo.txt1
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/moz.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/test_file.js0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/bar.js0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/foo.js0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/mochitest.ini7
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/moz.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/test_baz.js0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-emitted-includes/included-reftest.list1
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-emitted-includes/moz.build1
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-emitted-includes/reftest.list2
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-empty/empty.ini2
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-empty/moz.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-inactive-ignored/test_inactive.html0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/common.ini1
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/mochitest.ini3
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/moz.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/test_foo.html1
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-just-support/foo.txt1
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-just-support/just-support.ini2
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-just-support/moz.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/a11y-support/dir1/bar0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/a11y-support/foo0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/a11y.ini4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/browser.ini4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/chrome.ini3
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/crashtest.list1
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/metro.ini3
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/mochitest.ini5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/moz.build12
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/python.ini1
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/reftest.list1
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_a11y.js0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_browser.js0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_chrome.js0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_foo.py0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_metro.js0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_mochitest.js0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_xpcshell.js0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/xpcshell.ini5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-manifest/moz.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file-unfiltered/moz.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file-unfiltered/xpcshell.ini4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file/mochitest.ini1
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file/moz.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/child/mochitest.ini4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/child/test_foo.js0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/moz.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/support-file.txt0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/another-file.sjs0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/browser.ini6
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/data/one.txt0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/data/two.txt0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/test_sub.js0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/mochitest.ini9
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/support-file.txt0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/test_foo.js0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/another-file.sjs0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/browser.ini6
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/data/one.txt0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/data/two.txt0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/test_sub.js0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/mochitest.ini8
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/support-file.txt0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/test_foo.js0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-unmatched-generated/moz.build4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-unmatched-generated/test.ini4
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-manifest-unmatched-generated/test_foo0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-symbols-file-objdir-missing-generated/moz.build12
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-symbols-file-objdir/foo.py0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-symbols-file-objdir/moz.build15
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-symbols-file/foo.symbols1
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/test-symbols-file/moz.build12
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/moz.build6
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/parallel/moz.build0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/regular/moz.build0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/test/moz.build0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/traversal-outside-topsrcdir/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/traversal-relative-dirs/bar/moz.build0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/traversal-relative-dirs/foo/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/traversal-relative-dirs/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/traversal-repeated-dirs/bar/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/traversal-repeated-dirs/foo/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/traversal-repeated-dirs/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/traversal-simple/bar/moz.build0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/traversal-simple/foo/biz/moz.build0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/traversal-simple/foo/moz.build2
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/traversal-simple/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/bar.cxx0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/c1.c0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/c2.c0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/foo.cpp0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/moz.build30
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/objc1.mm0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/objc2.mm0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/quux.cc0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/unified-sources/bar.cxx0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/unified-sources/c1.c0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/unified-sources/c2.c0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/unified-sources/foo.cpp0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/unified-sources/moz.build30
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/unified-sources/objc1.mm0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/unified-sources/objc2.mm0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/unified-sources/quux.cc0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/use-nasm/moz.build15
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/use-nasm/test1.S0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/variable-passthru/bans.S0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/variable-passthru/baz.def0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/variable-passthru/moz.build13
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test1.c0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test1.cpp0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test1.mm0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test2.c0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test2.cpp0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test2.mm0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/visibility-flags/moz.build21
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/visibility-flags/test1.c0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/wasm-compile-flags/moz.build14
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/wasm-compile-flags/test1.c0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/wasm-sources/a.cpp0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/wasm-sources/b.cc0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/wasm-sources/c.cxx0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/wasm-sources/d.c0
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/wasm-sources/moz.build15
-rw-r--r--python/mozbuild/mozbuild/test/frontend/data/xpidl-module-no-sources/moz.build5
-rw-r--r--python/mozbuild/mozbuild/test/frontend/test_context.py736
-rw-r--r--python/mozbuild/mozbuild/test/frontend/test_emitter.py1877
-rw-r--r--python/mozbuild/mozbuild/test/frontend/test_namespaces.py225
-rw-r--r--python/mozbuild/mozbuild/test/frontend/test_reader.py531
-rw-r--r--python/mozbuild/mozbuild/test/frontend/test_sandbox.py536
-rw-r--r--python/mozbuild/mozbuild/test/python.ini64
-rw-r--r--python/mozbuild/mozbuild/test/repackaging/test_deb.py551
-rw-r--r--python/mozbuild/mozbuild/test/test_android_version_code.py111
-rw-r--r--python/mozbuild/mozbuild/test/test_artifact_cache.py145
-rw-r--r--python/mozbuild/mozbuild/test/test_artifacts.py115
-rw-r--r--python/mozbuild/mozbuild/test/test_base.py446
-rw-r--r--python/mozbuild/mozbuild/test/test_containers.py224
-rw-r--r--python/mozbuild/mozbuild/test/test_dotproperties.py183
-rw-r--r--python/mozbuild/mozbuild/test/test_expression.py88
-rw-r--r--python/mozbuild/mozbuild/test/test_jarmaker.py493
-rw-r--r--python/mozbuild/mozbuild/test/test_licenses.py33
-rw-r--r--python/mozbuild/mozbuild/test/test_line_endings.py45
-rw-r--r--python/mozbuild/mozbuild/test/test_makeutil.py164
-rw-r--r--python/mozbuild/mozbuild/test/test_manifest.py2081
-rw-r--r--python/mozbuild/mozbuild/test/test_mozconfig.py275
-rwxr-xr-xpython/mozbuild/mozbuild/test/test_mozinfo.py318
-rw-r--r--python/mozbuild/mozbuild/test/test_preprocessor.py832
-rw-r--r--python/mozbuild/mozbuild/test/test_pythonutil.py24
-rw-r--r--python/mozbuild/mozbuild/test/test_rewrite_mozbuild.py515
-rw-r--r--python/mozbuild/mozbuild/test/test_telemetry.py102
-rw-r--r--python/mozbuild/mozbuild/test/test_telemetry_settings.py174
-rw-r--r--python/mozbuild/mozbuild/test/test_util.py889
-rw-r--r--python/mozbuild/mozbuild/test/test_util_fileavoidwrite.py110
-rw-r--r--python/mozbuild/mozbuild/test/test_vendor.py48
-rw-r--r--python/mozbuild/mozbuild/test/test_vendor_tools.py90
-rw-r--r--python/mozbuild/mozbuild/test/vendor_requirements.in5
-rw-r--r--python/mozbuild/mozbuild/test/vendor_requirements.txt416
-rw-r--r--python/mozbuild/mozbuild/testing.py266
-rw-r--r--python/mozbuild/mozbuild/toolchains.py32
-rw-r--r--python/mozbuild/mozbuild/util.py1407
-rw-r--r--python/mozbuild/mozbuild/vendor/__init__.py0
-rw-r--r--python/mozbuild/mozbuild/vendor/host_angle.py37
-rw-r--r--python/mozbuild/mozbuild/vendor/host_base.py77
-rw-r--r--python/mozbuild/mozbuild/vendor/host_codeberg.py28
-rw-r--r--python/mozbuild/mozbuild/vendor/host_github.py27
-rw-r--r--python/mozbuild/mozbuild/vendor/host_gitlab.py26
-rw-r--r--python/mozbuild/mozbuild/vendor/host_googlesource.py32
-rw-r--r--python/mozbuild/mozbuild/vendor/mach_commands.py232
-rw-r--r--python/mozbuild/mozbuild/vendor/moz.build8
-rw-r--r--python/mozbuild/mozbuild/vendor/moz_yaml.py770
-rw-r--r--python/mozbuild/mozbuild/vendor/rewrite_mozbuild.py1286
-rwxr-xr-xpython/mozbuild/mozbuild/vendor/test_vendor_changes.sh65
-rw-r--r--python/mozbuild/mozbuild/vendor/vendor_manifest.py789
-rw-r--r--python/mozbuild/mozbuild/vendor/vendor_python.py228
-rw-r--r--python/mozbuild/mozbuild/vendor/vendor_rust.py961
-rw-r--r--python/mozbuild/mozpack/__init__.py0
-rw-r--r--python/mozbuild/mozpack/apple_pkg/Distribution.template19
-rw-r--r--python/mozbuild/mozpack/apple_pkg/PackageInfo.template19
-rw-r--r--python/mozbuild/mozpack/archive.py153
-rw-r--r--python/mozbuild/mozpack/chrome/__init__.py0
-rw-r--r--python/mozbuild/mozpack/chrome/flags.py278
-rw-r--r--python/mozbuild/mozpack/chrome/manifest.py400
-rw-r--r--python/mozbuild/mozpack/copier.py605
-rw-r--r--python/mozbuild/mozpack/dmg.py230
-rw-r--r--python/mozbuild/mozpack/errors.py151
-rw-r--r--python/mozbuild/mozpack/executables.py140
-rw-r--r--python/mozbuild/mozpack/files.py1271
-rw-r--r--python/mozbuild/mozpack/macpkg.py217
-rw-r--r--python/mozbuild/mozpack/manifests.py483
-rw-r--r--python/mozbuild/mozpack/mozjar.py842
-rw-r--r--python/mozbuild/mozpack/packager/__init__.py445
-rw-r--r--python/mozbuild/mozpack/packager/formats.py354
-rw-r--r--python/mozbuild/mozpack/packager/l10n.py304
-rw-r--r--python/mozbuild/mozpack/packager/unpack.py200
-rw-r--r--python/mozbuild/mozpack/path.py246
-rw-r--r--python/mozbuild/mozpack/pkg.py299
-rw-r--r--python/mozbuild/mozpack/test/__init__.py0
-rw-r--r--python/mozbuild/mozpack/test/data/test_data1
-rw-r--r--python/mozbuild/mozpack/test/python.ini18
-rw-r--r--python/mozbuild/mozpack/test/support/minify_js_verify.py15
-rw-r--r--python/mozbuild/mozpack/test/test_archive.py197
-rw-r--r--python/mozbuild/mozpack/test/test_chrome_flags.py150
-rw-r--r--python/mozbuild/mozpack/test/test_chrome_manifest.py176
-rw-r--r--python/mozbuild/mozpack/test/test_copier.py548
-rw-r--r--python/mozbuild/mozpack/test/test_errors.py95
-rw-r--r--python/mozbuild/mozpack/test/test_files.py1362
-rw-r--r--python/mozbuild/mozpack/test/test_manifests.py465
-rw-r--r--python/mozbuild/mozpack/test/test_mozjar.py350
-rw-r--r--python/mozbuild/mozpack/test/test_packager.py630
-rw-r--r--python/mozbuild/mozpack/test/test_packager_formats.py537
-rw-r--r--python/mozbuild/mozpack/test/test_packager_l10n.py153
-rw-r--r--python/mozbuild/mozpack/test/test_packager_unpack.py67
-rw-r--r--python/mozbuild/mozpack/test/test_path.py152
-rw-r--r--python/mozbuild/mozpack/test/test_pkg.py138
-rw-r--r--python/mozbuild/mozpack/test/test_unify.py250
-rw-r--r--python/mozbuild/mozpack/unify.py265
-rw-r--r--python/mozbuild/setup.py29
-rw-r--r--python/mozlint/.ruff.toml4
-rw-r--r--python/mozlint/mozlint/__init__.py7
-rw-r--r--python/mozlint/mozlint/cli.py445
-rw-r--r--python/mozlint/mozlint/editor.py57
-rw-r--r--python/mozlint/mozlint/errors.py33
-rw-r--r--python/mozlint/mozlint/formatters/__init__.py31
-rw-r--r--python/mozlint/mozlint/formatters/compact.py41
-rw-r--r--python/mozlint/mozlint/formatters/stylish.py156
-rw-r--r--python/mozlint/mozlint/formatters/summary.py50
-rw-r--r--python/mozlint/mozlint/formatters/treeherder.py34
-rw-r--r--python/mozlint/mozlint/formatters/unix.py33
-rw-r--r--python/mozlint/mozlint/parser.py130
-rw-r--r--python/mozlint/mozlint/pathutils.py313
-rw-r--r--python/mozlint/mozlint/result.py163
-rw-r--r--python/mozlint/mozlint/roller.py421
-rw-r--r--python/mozlint/mozlint/types.py214
-rw-r--r--python/mozlint/mozlint/util/__init__.py0
-rw-r--r--python/mozlint/mozlint/util/implementation.py35
-rw-r--r--python/mozlint/mozlint/util/string.py9
-rw-r--r--python/mozlint/setup.py26
-rw-r--r--python/mozlint/test/__init__.py0
-rw-r--r--python/mozlint/test/conftest.py66
-rw-r--r--python/mozlint/test/files/foobar.js2
-rw-r--r--python/mozlint/test/files/foobar.py3
-rw-r--r--python/mozlint/test/files/irrelevant/file.txt1
-rw-r--r--python/mozlint/test/files/no_foobar.js2
-rw-r--r--python/mozlint/test/filter/a.js0
-rw-r--r--python/mozlint/test/filter/a.py0
-rw-r--r--python/mozlint/test/filter/foo/empty.txt0
-rw-r--r--python/mozlint/test/filter/foobar/empty.txt0
-rw-r--r--python/mozlint/test/filter/subdir1/b.js0
-rw-r--r--python/mozlint/test/filter/subdir1/b.py0
-rw-r--r--python/mozlint/test/filter/subdir1/subdir3/d.js0
-rw-r--r--python/mozlint/test/filter/subdir1/subdir3/d.py0
-rw-r--r--python/mozlint/test/filter/subdir2/c.js0
-rw-r--r--python/mozlint/test/filter/subdir2/c.py0
-rw-r--r--python/mozlint/test/linters/badreturncode.yml8
-rw-r--r--python/mozlint/test/linters/excludes.yml10
-rw-r--r--python/mozlint/test/linters/excludes_empty.yml8
-rw-r--r--python/mozlint/test/linters/explicit_path.yml8
-rw-r--r--python/mozlint/test/linters/external.py74
-rw-r--r--python/mozlint/test/linters/external.yml8
-rw-r--r--python/mozlint/test/linters/global.yml8
-rw-r--r--python/mozlint/test/linters/global_payload.py38
-rw-r--r--python/mozlint/test/linters/global_skipped.yml8
-rw-r--r--python/mozlint/test/linters/invalid_exclude.yml6
-rw-r--r--python/mozlint/test/linters/invalid_extension.ym5
-rw-r--r--python/mozlint/test/linters/invalid_include.yml6
-rw-r--r--python/mozlint/test/linters/invalid_include_with_glob.yml6
-rw-r--r--python/mozlint/test/linters/invalid_support_files.yml6
-rw-r--r--python/mozlint/test/linters/invalid_type.yml5
-rw-r--r--python/mozlint/test/linters/missing_attrs.yml3
-rw-r--r--python/mozlint/test/linters/missing_definition.yml1
-rw-r--r--python/mozlint/test/linters/multiple.yml19
-rw-r--r--python/mozlint/test/linters/non_existing_exclude.yml7
-rw-r--r--python/mozlint/test/linters/non_existing_include.yml7
-rw-r--r--python/mozlint/test/linters/non_existing_support_files.yml7
-rw-r--r--python/mozlint/test/linters/raises.yml6
-rw-r--r--python/mozlint/test/linters/regex.yml10
-rw-r--r--python/mozlint/test/linters/setup.yml9
-rw-r--r--python/mozlint/test/linters/setupfailed.yml9
-rw-r--r--python/mozlint/test/linters/setupraised.yml9
-rw-r--r--python/mozlint/test/linters/slow.yml8
-rw-r--r--python/mozlint/test/linters/string.yml9
-rw-r--r--python/mozlint/test/linters/structured.yml8
-rw-r--r--python/mozlint/test/linters/support_files.yml10
-rw-r--r--python/mozlint/test/linters/warning.yml11
-rw-r--r--python/mozlint/test/linters/warning_no_code_review.yml12
-rw-r--r--python/mozlint/test/python.ini11
-rw-r--r--python/mozlint/test/runcli.py17
-rw-r--r--python/mozlint/test/test_cli.py127
-rw-r--r--python/mozlint/test/test_editor.py92
-rw-r--r--python/mozlint/test/test_formatters.py141
-rw-r--r--python/mozlint/test/test_parser.py80
-rw-r--r--python/mozlint/test/test_pathutils.py166
-rw-r--r--python/mozlint/test/test_result.py26
-rw-r--r--python/mozlint/test/test_roller.py396
-rw-r--r--python/mozlint/test/test_types.py84
-rw-r--r--python/mozperftest/.ruff.toml4
-rw-r--r--python/mozperftest/README.rst6
-rw-r--r--python/mozperftest/mozperftest/.coveragerc10
-rw-r--r--python/mozperftest/mozperftest/__init__.py13
-rw-r--r--python/mozperftest/mozperftest/argparser.py475
-rw-r--r--python/mozperftest/mozperftest/environment.py106
-rw-r--r--python/mozperftest/mozperftest/fzf/__init__.py3
-rw-r--r--python/mozperftest/mozperftest/fzf/fzf.py116
-rw-r--r--python/mozperftest/mozperftest/fzf/preview.py90
-rw-r--r--python/mozperftest/mozperftest/hooks.py63
-rw-r--r--python/mozperftest/mozperftest/layers.py177
-rw-r--r--python/mozperftest/mozperftest/mach_commands.py305
-rw-r--r--python/mozperftest/mozperftest/metadata.py44
-rw-r--r--python/mozperftest/mozperftest/metrics/__init__.py23
-rw-r--r--python/mozperftest/mozperftest/metrics/common.py356
-rw-r--r--python/mozperftest/mozperftest/metrics/consoleoutput.py59
-rw-r--r--python/mozperftest/mozperftest/metrics/exceptions.py53
-rw-r--r--python/mozperftest/mozperftest/metrics/notebook/__init__.py7
-rw-r--r--python/mozperftest/mozperftest/metrics/notebook/constant.py31
-rw-r--r--python/mozperftest/mozperftest/metrics/notebook/notebook-sections/compare85
-rw-r--r--python/mozperftest/mozperftest/metrics/notebook/notebook-sections/header12
-rw-r--r--python/mozperftest/mozperftest/metrics/notebook/notebook-sections/scatterplot15
-rw-r--r--python/mozperftest/mozperftest/metrics/notebook/perftestetl.py167
-rw-r--r--python/mozperftest/mozperftest/metrics/notebook/perftestnotebook.py79
-rw-r--r--python/mozperftest/mozperftest/metrics/notebook/template_upload_file.html39
-rw-r--r--python/mozperftest/mozperftest/metrics/notebook/transformer.py228
-rw-r--r--python/mozperftest/mozperftest/metrics/notebook/transforms/__init__.py0
-rw-r--r--python/mozperftest/mozperftest/metrics/notebook/transforms/logcattime.py121
-rw-r--r--python/mozperftest/mozperftest/metrics/notebook/transforms/single_json.py56
-rw-r--r--python/mozperftest/mozperftest/metrics/notebook/utilities.py63
-rw-r--r--python/mozperftest/mozperftest/metrics/notebookupload.py115
-rw-r--r--python/mozperftest/mozperftest/metrics/perfboard/__init__.py3
-rw-r--r--python/mozperftest/mozperftest/metrics/perfboard/dashboard.json56
-rw-r--r--python/mozperftest/mozperftest/metrics/perfboard/grafana.py87
-rw-r--r--python/mozperftest/mozperftest/metrics/perfboard/influx.py188
-rw-r--r--python/mozperftest/mozperftest/metrics/perfboard/panel.json81
-rw-r--r--python/mozperftest/mozperftest/metrics/perfboard/target.json20
-rw-r--r--python/mozperftest/mozperftest/metrics/perfherder.py374
-rw-r--r--python/mozperftest/mozperftest/metrics/utils.py149
-rw-r--r--python/mozperftest/mozperftest/metrics/visualmetrics.py221
-rw-r--r--python/mozperftest/mozperftest/runner.py280
-rw-r--r--python/mozperftest/mozperftest/schemas/intermediate-results-schema.json113
-rw-r--r--python/mozperftest/mozperftest/schemas/transformer_schema.json55
-rw-r--r--python/mozperftest/mozperftest/script.py269
-rw-r--r--python/mozperftest/mozperftest/system/__init__.py35
-rw-r--r--python/mozperftest/mozperftest/system/android.py238
-rw-r--r--python/mozperftest/mozperftest/system/android_perf_tuner.py193
-rw-r--r--python/mozperftest/mozperftest/system/android_startup.py414
-rw-r--r--python/mozperftest/mozperftest/system/example.zipbin0 -> 6588776 bytes
-rw-r--r--python/mozperftest/mozperftest/system/macos.py120
-rw-r--r--python/mozperftest/mozperftest/system/pingserver.py94
-rw-r--r--python/mozperftest/mozperftest/system/profile.py122
-rw-r--r--python/mozperftest/mozperftest/system/proxy.py232
-rw-r--r--python/mozperftest/mozperftest/test/__init__.py25
-rw-r--r--python/mozperftest/mozperftest/test/androidlog.py62
-rw-r--r--python/mozperftest/mozperftest/test/browsertime/__init__.py19
-rw-r--r--python/mozperftest/mozperftest/test/browsertime/package-lock.json1874
-rw-r--r--python/mozperftest/mozperftest/test/browsertime/package.json12
-rw-r--r--python/mozperftest/mozperftest/test/browsertime/runner.py473
-rw-r--r--python/mozperftest/mozperftest/test/browsertime/visualtools.py196
-rw-r--r--python/mozperftest/mozperftest/test/noderunner.py75
-rw-r--r--python/mozperftest/mozperftest/test/webpagetest.py413
-rw-r--r--python/mozperftest/mozperftest/test/xpcshell.py189
-rw-r--r--python/mozperftest/mozperftest/tests/__init__.py1
-rw-r--r--python/mozperftest/mozperftest/tests/conftest.py153
-rw-r--r--python/mozperftest/mozperftest/tests/data/WPT_fakekey.txt0
-rw-r--r--python/mozperftest/mozperftest/tests/data/browsertime-results-video/browsertime.json991
-rw-r--r--python/mozperftest/mozperftest/tests/data/browsertime-results-video/pages/www.bbc.com/data/video/1.mp4bin0 -> 212135 bytes
-rw-r--r--python/mozperftest/mozperftest/tests/data/browsertime-results-video/pages/www.bbc.com/news/world-middle-east-53598965/data/video/1.mp4bin0 -> 841128 bytes
-rw-r--r--python/mozperftest/mozperftest/tests/data/browsertime-results-video/pages/www.bbc.com/news/world-us-canada-53599363/data/video/1.mp4bin0 -> 510158 bytes
-rw-r--r--python/mozperftest/mozperftest/tests/data/browsertime-results/browsertime.json1
-rw-r--r--python/mozperftest/mozperftest/tests/data/failing-samples/perftest_doc_failure_example.js40
-rw-r--r--python/mozperftest/mozperftest/tests/data/firefox.dmgbin0 -> 18561 bytes
-rw-r--r--python/mozperftest/mozperftest/tests/data/home_activity.txt2806
-rw-r--r--python/mozperftest/mozperftest/tests/data/hook.py7
-rw-r--r--python/mozperftest/mozperftest/tests/data/hook_raises.py3
-rw-r--r--python/mozperftest/mozperftest/tests/data/hook_resume.py3
-rw-r--r--python/mozperftest/mozperftest/tests/data/hooks_iteration.py2
-rw-r--r--python/mozperftest/mozperftest/tests/data/hooks_state.py11
-rw-r--r--python/mozperftest/mozperftest/tests/data/logcat5511
-rw-r--r--python/mozperftest/mozperftest/tests/data/mozinfo.json1
-rw-r--r--python/mozperftest/mozperftest/tests/data/multiple_transforms_error/test_transformer_1.py6
-rw-r--r--python/mozperftest/mozperftest/tests/data/multiple_transforms_error/test_transformer_1_copy.py6
-rw-r--r--python/mozperftest/mozperftest/tests/data/perftestetl_plugin/test_transformer_perftestetl_plugin_1.py6
-rw-r--r--python/mozperftest/mozperftest/tests/data/perftestetl_plugin/test_transformer_perftestetl_plugin_2.py6
-rw-r--r--python/mozperftest/mozperftest/tests/data/samples/head.js7
-rw-r--r--python/mozperftest/mozperftest/tests/data/samples/perftest_example.js46
-rw-r--r--python/mozperftest/mozperftest/tests/data/samples/test_perftest_WPT_init_file.js113
-rw-r--r--python/mozperftest/mozperftest/tests/data/samples/test_perftest_android_startup.js34
-rw-r--r--python/mozperftest/mozperftest/tests/data/samples/test_xpcshell.js39
-rw-r--r--python/mozperftest/mozperftest/tests/data/samples/test_xpcshell_flavor2.js35
-rw-r--r--python/mozperftest/mozperftest/tests/data/samples/xpcshell.ini5
-rw-r--r--python/mozperftest/mozperftest/tests/data/xpcshell1
-rw-r--r--python/mozperftest/mozperftest/tests/fetched_artifact.zipbin0 -> 728 bytes
-rw-r--r--python/mozperftest/mozperftest/tests/support.py120
-rw-r--r--python/mozperftest/mozperftest/tests/test_android.py331
-rw-r--r--python/mozperftest/mozperftest/tests/test_android_startup.py285
-rw-r--r--python/mozperftest/mozperftest/tests/test_androidlog.py81
-rw-r--r--python/mozperftest/mozperftest/tests/test_argparser.py160
-rw-r--r--python/mozperftest/mozperftest/tests/test_browsertime.py364
-rw-r--r--python/mozperftest/mozperftest/tests/test_change_detector.py113
-rw-r--r--python/mozperftest/mozperftest/tests/test_consoleoutput.py36
-rw-r--r--python/mozperftest/mozperftest/tests/test_constant.py13
-rw-r--r--python/mozperftest/mozperftest/tests/test_environment.py158
-rw-r--r--python/mozperftest/mozperftest/tests/test_fzf.py59
-rw-r--r--python/mozperftest/mozperftest/tests/test_influx.py121
-rw-r--r--python/mozperftest/mozperftest/tests/test_ir_schema.py103
-rw-r--r--python/mozperftest/mozperftest/tests/test_layers.py88
-rw-r--r--python/mozperftest/mozperftest/tests/test_logcat_transformer.py125
-rw-r--r--python/mozperftest/mozperftest/tests/test_mach_commands.py331
-rw-r--r--python/mozperftest/mozperftest/tests/test_macos.py94
-rw-r--r--python/mozperftest/mozperftest/tests/test_metrics_utils.py97
-rw-r--r--python/mozperftest/mozperftest/tests/test_notebookupload.py121
-rw-r--r--python/mozperftest/mozperftest/tests/test_perfherder.py620
-rw-r--r--python/mozperftest/mozperftest/tests/test_perftestetl.py106
-rw-r--r--python/mozperftest/mozperftest/tests/test_perftestnotebook.py76
-rw-r--r--python/mozperftest/mozperftest/tests/test_pingserver.py38
-rw-r--r--python/mozperftest/mozperftest/tests/test_profile.py52
-rw-r--r--python/mozperftest/mozperftest/tests/test_proxy.py231
-rw-r--r--python/mozperftest/mozperftest/tests/test_runner.py48
-rw-r--r--python/mozperftest/mozperftest/tests/test_script.py99
-rw-r--r--python/mozperftest/mozperftest/tests/test_single_json_transformer.py80
-rw-r--r--python/mozperftest/mozperftest/tests/test_transformer.py161
-rw-r--r--python/mozperftest/mozperftest/tests/test_utils.py233
-rw-r--r--python/mozperftest/mozperftest/tests/test_visualmetrics.py103
-rw-r--r--python/mozperftest/mozperftest/tests/test_visualtools.py47
-rw-r--r--python/mozperftest/mozperftest/tests/test_webpagetest.py271
-rw-r--r--python/mozperftest/mozperftest/tests/test_xpcshell.py165
-rw-r--r--python/mozperftest/mozperftest/tools.py139
-rw-r--r--python/mozperftest/mozperftest/utils.py478
-rw-r--r--python/mozperftest/perfdocs/config.yml44
-rw-r--r--python/mozperftest/perfdocs/developing.rst154
-rw-r--r--python/mozperftest/perfdocs/index.rst20
-rw-r--r--python/mozperftest/perfdocs/running.rst51
-rw-r--r--python/mozperftest/perfdocs/tools.rst21
-rw-r--r--python/mozperftest/perfdocs/vision.rst66
-rw-r--r--python/mozperftest/perfdocs/writing.rst176
-rw-r--r--python/mozperftest/setup.cfg2
-rw-r--r--python/mozperftest/setup.py37
-rw-r--r--python/mozrelease/.ruff.toml4
-rw-r--r--python/mozrelease/mozrelease/__init__.py0
-rw-r--r--python/mozrelease/mozrelease/attribute_builds.py214
-rw-r--r--python/mozrelease/mozrelease/balrog.py72
-rw-r--r--python/mozrelease/mozrelease/buglist_creator.py261
-rw-r--r--python/mozrelease/mozrelease/chunking.py27
-rw-r--r--python/mozrelease/mozrelease/l10n.py17
-rw-r--r--python/mozrelease/mozrelease/mach_commands.py141
-rw-r--r--python/mozrelease/mozrelease/partner_repack.py895
-rw-r--r--python/mozrelease/mozrelease/paths.py85
-rw-r--r--python/mozrelease/mozrelease/platforms.py54
-rw-r--r--python/mozrelease/mozrelease/scriptworker_canary.py107
-rw-r--r--python/mozrelease/mozrelease/update_verify.py275
-rw-r--r--python/mozrelease/mozrelease/util.py26
-rw-r--r--python/mozrelease/mozrelease/versions.py114
-rw-r--r--python/mozrelease/setup.py25
-rw-r--r--python/mozrelease/test/data/Firefox-62.0.3.update.json74
-rw-r--r--python/mozrelease/test/data/Firefox-62.0b11-update.json74
-rw-r--r--python/mozrelease/test/data/Firefox-64.0b13.update.json9
-rw-r--r--python/mozrelease/test/data/buglist_changesets.json94
-rw-r--r--python/mozrelease/test/data/sample-update-verify.cfg4
-rw-r--r--python/mozrelease/test/data/whatsnew-62.0.3.yml65
-rw-r--r--python/mozrelease/test/data/whatsnew-release.yml65
-rw-r--r--python/mozrelease/test/python.ini7
-rw-r--r--python/mozrelease/test/test_balrog.py54
-rw-r--r--python/mozrelease/test/test_buglist_creator.py178
-rw-r--r--python/mozrelease/test/test_update_verify.py425
-rw-r--r--python/mozrelease/test/test_versions.py101
-rw-r--r--python/mozterm/.ruff.toml4
-rw-r--r--python/mozterm/mozterm/__init__.py4
-rw-r--r--python/mozterm/mozterm/terminal.py50
-rw-r--r--python/mozterm/mozterm/widgets.py67
-rw-r--r--python/mozterm/setup.cfg2
-rw-r--r--python/mozterm/setup.py30
-rw-r--r--python/mozterm/test/python.ini5
-rw-r--r--python/mozterm/test/test_terminal.py35
-rw-r--r--python/mozterm/test/test_widgets.py51
-rw-r--r--python/mozversioncontrol/.ruff.toml4
-rw-r--r--python/mozversioncontrol/mozversioncontrol/__init__.py946
-rw-r--r--python/mozversioncontrol/mozversioncontrol/repoupdate.py37
-rw-r--r--python/mozversioncontrol/setup.py28
-rw-r--r--python/mozversioncontrol/test/conftest.py84
-rw-r--r--python/mozversioncontrol/test/python.ini10
-rw-r--r--python/mozversioncontrol/test/test_branch.py57
-rw-r--r--python/mozversioncontrol/test/test_commit.py72
-rw-r--r--python/mozversioncontrol/test/test_context_manager.py28
-rw-r--r--python/mozversioncontrol/test/test_push_to_try.py81
-rw-r--r--python/mozversioncontrol/test/test_update.py63
-rw-r--r--python/mozversioncontrol/test/test_workdir_outgoing.py108
-rw-r--r--python/mozversioncontrol/test/test_working_directory.py46
-rw-r--r--python/sites/build.txt1
-rw-r--r--python/sites/common.txt1
-rw-r--r--python/sites/docs.txt68
-rw-r--r--python/sites/ipython.txt1
-rw-r--r--python/sites/lint.txt2
-rw-r--r--python/sites/mach.txt145
-rw-r--r--python/sites/perftest-side-by-side.txt2
-rw-r--r--python/sites/perftest-test.txt2
-rw-r--r--python/sites/python-test.txt1
-rw-r--r--python/sites/repackage-deb.txt1
-rw-r--r--python/sites/upload-generated-sources.txt1
-rw-r--r--python/sites/vendor.txt4
-rw-r--r--python/sites/watch.txt1
-rw-r--r--python/sites/webcompat.txt5
-rw-r--r--python/sites/wpt.txt6
1356 files changed, 151496 insertions, 0 deletions
diff --git a/python/README b/python/README
new file mode 100644
index 0000000000..e7de122d5c
--- /dev/null
+++ b/python/README
@@ -0,0 +1,16 @@
+This directory contains common Python code.
+
+The basic rule is that if Python code is cross-module (that's "module" in the
+Mozilla meaning - as in "module ownership") and is MPL-compatible, it should
+go here.
+
+What should not go here:
+
+* Vendored python modules (use third_party/python instead)
+* Python that is not MPL-compatible (see other-licenses/)
+* Python that has good reason to remain close to its "owning" (Mozilla)
+ module (e.g. it is only being consumed from there).
+
+Historical information can be found at
+https://bugzilla.mozilla.org/show_bug.cgi?id=775243
+https://bugzilla.mozilla.org/show_bug.cgi?id=1346025
diff --git a/python/docs/index.rst b/python/docs/index.rst
new file mode 100644
index 0000000000..68b16ab649
--- /dev/null
+++ b/python/docs/index.rst
@@ -0,0 +1,228 @@
+=================================
+Using third-party Python packages
+=================================
+
+Mach and its associated commands have a variety of 3rd-party Python dependencies. Many of these
+are vendored in ``third_party/python``, while others are installed at runtime via ``pip``.
+
+The dependencies of Mach itself can be found at ``python/sites/mach.txt``. Mach commands
+may have additional dependencies which are specified at ``python/sites/<site>.txt``.
+
+For example, the following Mach command would have its 3rd-party dependencies declared at
+``python/sites/foo.txt``.
+
+.. code:: python
+
+ @Command(
+ "foo-it",
+ virtualenv_name="foo",
+ )
+ # ...
+ def foo_it_command():
+ import specific_dependency
+
+The format of ``<site>.txt`` files are documented further in the
+:py:class:`~mach.requirements.MachEnvRequirements` class.
+
+Adding a Python package
+=======================
+
+There's two ways of using 3rd-party Python dependencies:
+
+* :ref:`pip install the packages <python-pip-install>`. Python dependencies with native code must
+ be installed using ``pip``. This is the recommended technique for adding new Python dependencies.
+* :ref:`Vendor the source of the Python package in-tree <python-vendor>`. Dependencies of the Mach
+ core logic or of building Firefox itself must be vendored.
+
+.. note::
+
+ For dependencies that meet both restrictions (dependency of Mach/build, *and* has
+ native code), see the :ref:`mach-and-build-native-dependencies` section below.
+
+.. _python-pip-install:
+
+``pip install`` the package
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+To add a ``pip install``-d package dependency, add it to your site's
+``python/sites/<site>.txt`` manifest file:
+
+.. code::
+
+ ...
+ pypi:new-package==<version>
+ ...
+
+.. note::
+
+ Some tasks are not permitted to use external resources, and for those we can
+ publish packages to an internal PyPI mirror.
+ See `how to upload to internal PyPI <https://wiki.mozilla.org/ReleaseEngineering/How_To/Upload_to_internal_Pypi>`_
+ for more details.
+
+.. _python-vendor:
+
+Vendoring Python packages
+~~~~~~~~~~~~~~~~~~~~~~~~~
+
+To vendor a Python package, add it to ``third_party/python/requirements.in``
+and then run ``mach vendor python``. This will update the tree of pinned
+dependencies in ``third_party/python/requirements.txt`` and download them all
+into the ``third_party/python`` directory.
+
+Next, add that package and any new transitive dependencies (you'll see them added in
+``third_party/python/requirements.txt``) to the associated site's dependency manifest in
+``python/sites/<site>.txt``:
+
+.. code::
+
+ ...
+ vendored:third_party/python/new-package
+ vendored:third_party/python/new-package-dependency-foo
+ vendored:third_party/python/new-package-dependency-bar
+ ...
+
+.. note::
+
+ The following policy applies to **ALL** vendored packages:
+
+ * Vendored PyPI libraries **MUST NOT** be modified
+ * Vendored libraries **SHOULD** be released copies of libraries available on
+ PyPI.
+
+ * When considering manually vendoring a package, discuss the situation with
+ the ``#build`` team to ensure that other, more maintainable options are exhausted.
+
+.. note::
+
+ We require that it is possible to build Firefox using only a checkout of the source,
+ without depending on a package index. This ensures that building Firefox is
+ deterministic and dependable, avoids packages from changing out from under us,
+ and means we’re not affected when 3rd party services are offline. We don't want a
+ DoS against PyPI or a random package maintainer removing an old tarball to delay
+ a Firefox chemspill. Therefore, packages required by Mach core logic or for building
+ Firefox itself must be vendored.
+
+.. _mach-and-build-native-dependencies:
+
+Mach/Build Native 3rd-party Dependencies
+========================================
+
+There are cases where Firefox is built without being able to ``pip install``, but where
+native 3rd party Python dependencies enable optional functionality. This can't be solved
+by vendoring the platform-specific libraries, as then each one would have to be stored
+multiple times in-tree according to how many platforms we wish to support.
+
+Instead, this is solved by pre-installing such native packages onto the host system
+in advance, then having Mach attempt to use such packages directly from the system.
+This feature is only viable in very specific environments, as the system Python packages
+have to be compatible with Mach's vendored packages.
+
+.. note:
+
+ All of these native build-specific dependencies **MUST** be optional requirements
+ as to support the "no strings attached" builds that only use vendored packages.
+
+To control this behaviour, the ``MACH_BUILD_PYTHON_NATIVE_PACKAGE_SOURCE`` environment
+variable can be used:
+
+.. list-table:: ``MACH_BUILD_PYTHON_NATIVE_PACKAGE_SOURCE``
+ :header-rows: 1
+
+ * - ``MACH_BUILD_PYTHON_NATIVE_PACKAGE_SOURCE``
+ - Behaviour
+ * - ``"pip"``
+ - Mach will ``pip install`` all needed dependencies from PyPI at runtime into a Python
+ virtual environment that's reused in future Mach invocations.
+ * - ``"none"``
+ - Mach will perform the build using only vendored packages. No Python virtual environment
+ will be created for Mach.
+ * - ``"system"``
+ - Mach will use the host system's Python packages as part of doing the build. This option
+ allows the usage of native Python packages without leaning on a ``pip install`` at
+ build-time. This is generally slower because the system Python packages have to
+ be asserted to be compatible with Mach. Additionally, dependency lockfiles are ignored,
+ so there's higher risk of breakage. Finally, as with ``"none"``, no Python virtualenv
+ environment is created for Mach.
+ * - ``<unset>``
+ - Same behaviour as ``"pip"`` if ``MOZ_AUTOMATION`` isn't set. Otherwise, uses
+ the same behaviour as ``"none"``.
+
+There's a couple restrictions here:
+
+* ``MACH_BUILD_PYTHON_NATIVE_PACKAGE_SOURCE`` only applies to the top-level ``"mach"`` site,
+ the ``"common"`` site and the ``"build"`` site. All other sites will use ``pip install`` at
+ run-time as needed.
+
+* ``MACH_BUILD_PYTHON_NATIVE_PACKAGE_SOURCE="system"`` is not allowed when using any site other
+ than ``"mach"``, ``"common"`` or ``"build"``, because:
+
+ * As described in :ref:`package-compatibility` below, packages used by Mach are still
+ in scope when commands are run, and
+ * The host system is practically guaranteed to be incompatible with commands' dependency
+ lockfiles.
+
+The ``MACH_BUILD_PYTHON_NATIVE_PACKAGE_SOURCE`` environment variable fits into the following use
+cases:
+
+Mozilla CI Builds
+~~~~~~~~~~~~~~~~~
+
+We need access to the native packages of ``zstandard`` and ``psutil`` to extract archives and
+get OS information respectively. Use ``MACH_BUILD_PYTHON_NATIVE_PACKAGE_SOURCE="system"``.
+
+Mozilla CI non-Build Tasks
+~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+We generally don't want to create a Mach virtual environment to avoid redundant processing,
+but it's ok to ``pip install`` for specific command sites as needed, so leave
+``MACH_BUILD_PYTHON_NATIVE_PACKAGE_SOURCE`` unset (``MOZ_AUTOMATION`` implies the default
+behaviour of ``MACH_BUILD_PYTHON_NATIVE_PACKAGE_SOURCE="none"``).
+
+In cases where native packages *are* needed by Mach, use
+``MACH_BUILD_PYTHON_NATIVE_PACKAGE_SOURCE="pip"``.
+
+Downstream CI Builds
+~~~~~~~~~~~~~~~~~~~~
+
+Sometimes these builds happen in sandboxed, network-less environments, and usually these builds
+don't need any of the behaviour enabled by installing native Python dependencies.
+Use ``MACH_BUILD_PYTHON_NATIVE_PACKAGE_SOURCE="none"``.
+
+Gentoo Builds
+~~~~~~~~~~~~~
+
+When installing Firefox via the package manager, Gentoo generally builds it from source rather than
+distributing a compiled binary artifact. Accordingly, users doing a build of Firefox in this
+context don't want stray files created in ``~/.mozbuild`` or unnecessary ``pip install`` calls.
+Use ``MACH_BUILD_PYTHON_NATIVE_PACKAGE_SOURCE="none"``.
+
+Firefox Developers
+~~~~~~~~~~~~~~~~~~
+
+Leave ``MACH_BUILD_PYTHON_NATIVE_PACKAGE_SOURCE`` unset so that all Mach commands can be run,
+Python dependency lockfiles are respected, and optional behaviour is enabled by installing
+native packages.
+
+.. _package-compatibility:
+
+Package compatibility
+=====================
+
+Mach requires that all commands' package requirements be compatible with those of Mach itself.
+(This is because functions and state created by Mach are still usable from within the commands, and
+they may still need access to their associated 3rd-party modules).
+
+However, it is OK for Mach commands to have package requirements which are incompatible with each
+other. This allows the flexibility for some Mach commands to depend on modern dependencies while
+other, more mature commands may still only be compatible with a much older version.
+
+.. note::
+
+ Only one version of a package may be vendored at any given time. If two Mach commands need to
+ have conflicting packages, then at least one of them must ``pip install`` the package instead
+ of vendoring.
+
+ If a Mach command's dependency conflicts with a vendored package, and that vendored package
+ isn't needed by Mach itself, then that vendored dependency should be moved from
+ ``python/sites/mach.txt`` to its associated environment.
diff --git a/python/gdbpp/gdbpp/__init__.py b/python/gdbpp/gdbpp/__init__.py
new file mode 100644
index 0000000000..376061b679
--- /dev/null
+++ b/python/gdbpp/gdbpp/__init__.py
@@ -0,0 +1,31 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import gdb
+import gdb.printing
+
+
+class GeckoPrettyPrinter(object):
+ pp = gdb.printing.RegexpCollectionPrettyPrinter("GeckoPrettyPrinters")
+
+ def __init__(self, name, regexp):
+ self.name = name
+ self.regexp = regexp
+
+ def __call__(self, wrapped):
+ GeckoPrettyPrinter.pp.add_printer(self.name, self.regexp, wrapped)
+ return wrapped
+
+
+import gdbpp.enumset # noqa: F401
+import gdbpp.linkedlist # noqa: F401
+import gdbpp.owningthread # noqa: F401
+import gdbpp.smartptr # noqa: F401
+import gdbpp.string # noqa: F401
+import gdbpp.tarray # noqa: F401
+import gdbpp.thashtable # noqa: F401
+
+gdb.printing.register_pretty_printer(None, GeckoPrettyPrinter.pp)
diff --git a/python/gdbpp/gdbpp/enumset.py b/python/gdbpp/gdbpp/enumset.py
new file mode 100644
index 0000000000..c36e863627
--- /dev/null
+++ b/python/gdbpp/gdbpp/enumset.py
@@ -0,0 +1,31 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import gdb
+
+from gdbpp import GeckoPrettyPrinter
+
+
+@GeckoPrettyPrinter("mozilla::EnumSet", "^mozilla::EnumSet<.*>$")
+class enumset_printer(object):
+ def __init__(self, value):
+ self.value = value
+ self.enum_type = value.type.template_argument(0)
+
+ def children(self):
+ bitfield = self.value["mBitField"]
+ max_bit = (self.enum_type.sizeof * 8) - 1
+ return (
+ ("flag", gdb.Value(i).cast(self.enum_type))
+ for i in range(0, max_bit)
+ if ((bitfield & (2 ** i)) != 0)
+ )
+
+ def to_string(self):
+ return str(self.value.type)
+
+ def display_hint(self):
+ return "array"
diff --git a/python/gdbpp/gdbpp/linkedlist.py b/python/gdbpp/gdbpp/linkedlist.py
new file mode 100644
index 0000000000..1ba25a1cf0
--- /dev/null
+++ b/python/gdbpp/gdbpp/linkedlist.py
@@ -0,0 +1,48 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from gdbpp import GeckoPrettyPrinter
+
+
+# mfbt's LinkedList<T> is a doubly-linked list where the items in the list store
+# the next/prev pointers as part of themselves rather than the list structure be
+# its own independent data structure. This means:
+# - Every item may belong to at most one LinkedList instance.
+# - For our pretty printer, we only want to pretty-print the LinkedList object
+# itself. We do not want to start printing every item in the list whenever
+# we run into a LinkedListElement<T>.
+@GeckoPrettyPrinter("mozilla::LinkedList", "^mozilla::LinkedList<.*>$")
+class linkedlist_printer(object):
+ def __init__(self, value):
+ self.value = value
+ # mfbt's LinkedList has the elements of the linked list subclass from
+ # LinkedListElement<T>. We want its pointer type for casting purposes.
+ #
+ # (We want to list pointers since we expect all of these objects to be
+ # complex enough that we don't want to automatically expand them. The
+ # LinkedListElement type itself isn't small.)
+ self.t_ptr_type = value.type.template_argument(0).pointer()
+
+ def children(self):
+ # Walk mNext until we loop back around to the sentinel. The sentinel
+ # item always exists and in the zero-length base-case mNext == sentinel,
+ # so extract that immediately and update it throughout the loop.
+ sentinel = self.value["sentinel"]
+ pSentinel = sentinel.address
+ pNext = sentinel["mNext"]
+ i = 0
+ while pSentinel != pNext:
+ list_elem = pNext.dereference()
+ list_value = pNext.cast(self.t_ptr_type)
+ yield ("%d" % i, list_value)
+ pNext = list_elem["mNext"]
+ i += 1
+
+ def to_string(self):
+ return str(self.value.type)
+
+ def display_hint(self):
+ return "array"
diff --git a/python/gdbpp/gdbpp/owningthread.py b/python/gdbpp/gdbpp/owningthread.py
new file mode 100644
index 0000000000..52055b3c99
--- /dev/null
+++ b/python/gdbpp/gdbpp/owningthread.py
@@ -0,0 +1,26 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import gdb
+
+from gdbpp import GeckoPrettyPrinter
+
+
+@GeckoPrettyPrinter("nsAutoOwningThread", "^nsAutoOwningThread$")
+class owning_thread_printer(object):
+ def __init__(self, value):
+ self.value = value
+
+ def to_string(self):
+ prthread_type = gdb.lookup_type("PRThread").pointer()
+ prthread = self.value["mThread"].cast(prthread_type)
+ name = prthread["name"]
+
+ # if the thread doesn't have a name try to get its thread id (might not
+ # work on !linux)
+ name = prthread["tid"]
+
+ return name if name else "(PRThread *) %s" % prthread
diff --git a/python/gdbpp/gdbpp/smartptr.py b/python/gdbpp/gdbpp/smartptr.py
new file mode 100644
index 0000000000..ecc35bee28
--- /dev/null
+++ b/python/gdbpp/gdbpp/smartptr.py
@@ -0,0 +1,60 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from gdbpp import GeckoPrettyPrinter
+
+
+@GeckoPrettyPrinter("nsWeakPtr", "^nsCOMPtr<nsIWeakReference>$")
+class weak_ptr_printer(object):
+ def __init__(self, value):
+ self.value = value
+
+ def to_string(self):
+ proxy = self.value["mRawPtr"]
+ if not proxy:
+ return "[(%s) 0x0]" % proxy.type
+
+ ref_type = proxy.dynamic_type
+ weak_ptr = proxy.cast(ref_type).dereference()["mReferent"]
+ if not weak_ptr:
+ return "[(%s) %s]" % (weak_ptr.type, weak_ptr)
+
+ return "[(%s) %s]" % (weak_ptr.dynamic_type, weak_ptr)
+
+
+@GeckoPrettyPrinter("mozilla::StaticAutoPtr", "^mozilla::StaticAutoPtr<.*>$")
+@GeckoPrettyPrinter("mozilla::StaticRefPtr", "^mozilla::StaticRefPtr<.*>$")
+@GeckoPrettyPrinter("nsAutoPtr", "^nsAutoPtr<.*>$")
+@GeckoPrettyPrinter("nsCOMPtr", "^nsCOMPtr<.*>$")
+@GeckoPrettyPrinter("RefPtr", "^RefPtr<.*>$")
+class smartptr_printer(object):
+ def __init__(self, value):
+ self.value = value["mRawPtr"]
+
+ def children(self):
+ yield ("mRawPtr", self.value)
+
+ def to_string(self):
+ if not self.value:
+ type_name = str(self.value.type)
+ else:
+ type_name = str(self.value.dereference().dynamic_type.pointer())
+
+ return "[(%s)]" % (type_name)
+
+
+@GeckoPrettyPrinter("UniquePtr", "^mozilla::UniquePtr<.*>$")
+class uniqueptr_printer(object):
+ def __init__(self, value):
+ self.value = value["mTuple"]["mFirstA"]
+
+ def to_string(self):
+ if not self.value:
+ type_name = str(self.value.type)
+ else:
+ type_name = str(self.value.dereference().dynamic_type.pointer())
+
+ return "[(%s) %s]" % (type_name, str(self.value))
diff --git a/python/gdbpp/gdbpp/string.py b/python/gdbpp/gdbpp/string.py
new file mode 100644
index 0000000000..b5722f2c62
--- /dev/null
+++ b/python/gdbpp/gdbpp/string.py
@@ -0,0 +1,21 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from gdbpp import GeckoPrettyPrinter
+
+
+@GeckoPrettyPrinter("nsTSubstring", "^nsTSubstring<.*>$")
+@GeckoPrettyPrinter("nsTAutoStringN", "^nsTAutoStringN<.*>$")
+@GeckoPrettyPrinter("nsString", "^nsTString<.*>$")
+class string_printer(object):
+ def __init__(self, value):
+ self.value = value
+
+ def to_string(self):
+ return self.value["mData"]
+
+ def display_hint(self):
+ return "string"
diff --git a/python/gdbpp/gdbpp/tarray.py b/python/gdbpp/gdbpp/tarray.py
new file mode 100644
index 0000000000..a5c82253b9
--- /dev/null
+++ b/python/gdbpp/gdbpp/tarray.py
@@ -0,0 +1,29 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from gdbpp import GeckoPrettyPrinter
+
+
+@GeckoPrettyPrinter("InfallibleTArray", "^InfallibleTArray<.*>$")
+@GeckoPrettyPrinter("FallibleTArray", "^FallibleTArray<.*>$")
+@GeckoPrettyPrinter("AutoTArray", "^AutoTArray<.*>$")
+@GeckoPrettyPrinter("nsTArray", "^nsTArray<.*>$")
+class tarray_printer(object):
+ def __init__(self, value):
+ self.value = value
+ self.elem_type = value.type.template_argument(0)
+
+ def children(self):
+ length = self.value["mHdr"].dereference()["mLength"]
+ data = self.value["mHdr"] + 1
+ elements = data.cast(self.elem_type.pointer())
+ return (("%d" % i, (elements + i).dereference()) for i in range(0, int(length)))
+
+ def to_string(self):
+ return str(self.value.type)
+
+ def display_hint(self):
+ return "array"
diff --git a/python/gdbpp/gdbpp/thashtable.py b/python/gdbpp/gdbpp/thashtable.py
new file mode 100644
index 0000000000..8b0294acf6
--- /dev/null
+++ b/python/gdbpp/gdbpp/thashtable.py
@@ -0,0 +1,152 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import gdb
+
+from gdbpp import GeckoPrettyPrinter
+
+
+def walk_template_to_given_base(value, desired_tag_prefix):
+ """Given a value of some template subclass, walk up its ancestry until we
+ hit the desired type, then return the appropriate value (which will then
+ have that type).
+ """
+ # Base case
+ t = value.type
+ # It's possible that we're dealing with an alias template that looks like:
+ # template<typename Protocol>
+ # using ManagedContainer = nsTHashtable<nsPtrHashKey<Protocol>>;
+ # In which case we want to strip the indirection, and strip_typedefs()
+ # accomplishes this. (Disclaimer: I tried it and it worked and it didn't
+ # break my other use cases, if things start exploding, do reconsider.)
+ t = t.strip_typedefs()
+ if t.tag.startswith(desired_tag_prefix):
+ return value
+ for f in t.fields():
+ # we only care about the inheritance hierarchy
+ if not f.is_base_class:
+ continue
+ # This is the answer or something we're going to need to recurse into.
+ fv = value[f]
+ ft = fv.type
+ # slightly optimize by checking the tag rather than in the recursion
+ if ft.tag.startswith(desired_tag_prefix):
+ # found it!
+ return fv
+ return walk_template_to_given_base(fv, desired_tag_prefix)
+ return None
+
+
+# The templates and their inheritance hierarchy form an onion of types around
+# the nsTHashtable core at the center. All we care about is that nsTHashtable,
+# but we register for the descendant types in order to avoid the default pretty
+# printers having to unwrap those onion layers, wasting precious lines.
+@GeckoPrettyPrinter("nsClassHashtable", "^nsClassHashtable<.*>$")
+@GeckoPrettyPrinter("nsDataHashtable", "^nsDataHashtable<.*>$")
+@GeckoPrettyPrinter("nsInterfaceHashtable", "^nsInterfaceHashtable<.*>$")
+@GeckoPrettyPrinter("nsRefPtrHashtable", "^nsRefPtrHashtable<.*>$")
+@GeckoPrettyPrinter("nsBaseHashtable", "^nsBaseHashtable<.*>$")
+@GeckoPrettyPrinter("nsTHashtable", "^nsTHashtable<.*>$")
+class thashtable_printer(object):
+ def __init__(self, outer_value):
+ self.outermost_type = outer_value.type
+
+ value = walk_template_to_given_base(outer_value, "nsTHashtable<")
+ self.value = value
+
+ self.entry_type = value.type.template_argument(0)
+
+ # -- Determine whether we're a hashTABLE or a hashSET
+ # If we're a table, the entry type will be a nsBaseHashtableET template.
+ # If we're a set, it will be something like nsPtrHashKey.
+ #
+ # So, assume we're a set if we're not nsBaseHashtableET<
+ # (It should ideally also be true that the type ends with HashKey, but
+ # since nsBaseHashtableET causes us to assume "mData" exists, let's
+ # pivot based on that.)
+ self.is_table = self.entry_type.tag.startswith("nsBaseHashtableET<")
+
+ # While we know that it has a field `mKeyHash` for the hash-code and
+ # book-keeping, and a DataType field mData for the value (if we're a
+ # table), the key field frustratingly varies by key type.
+ #
+ # So we want to walk its key type to figure out the field name. And we
+ # do mean field name. The field object is no good for subscripting the
+ # value unless the field was directly owned by that value's type. But
+ # by using a string name, we save ourselves all that fanciness.
+
+ if self.is_table:
+ # For nsBaseHashtableET<KeyClass, DataType>, we want the KeyClass
+ key_type = self.entry_type.template_argument(0)
+ else:
+ # If we're a set, our entry type is the key class already!
+ key_type = self.entry_type
+ self.key_field_name = None
+ for f in key_type.fields():
+ # No need to traverse up the type hierarchy...
+ if f.is_base_class:
+ continue
+ # ...just to skip the fields we know exist...
+ if f.name == "mKeyHash" or f.name == "mData":
+ continue
+ # ...and assume the first one we find is the key.
+ self.key_field_name = f.name
+ break
+
+ def children(self):
+ table = self.value["mTable"]
+
+ # mEntryCount is the number of occupied slots/entries in the table.
+ # We can use this to avoid doing wasted memory reads.
+ entryCount = table["mEntryCount"]
+ if entryCount == 0:
+ return
+
+ # The table capacity is tracked "cleverly" in terms of how many bits
+ # the hash needs to be shifted. CapacityFromHashShift calculates this
+ # quantity, but may be inlined, so we replicate the calculation here.
+ hashType = gdb.lookup_type("mozilla::HashNumber")
+ hashBits = hashType.sizeof * 8
+ capacity = 1 << (hashBits - table["mHashShift"])
+
+ # Pierce generation-tracking EntryStore class to get at buffer. The
+ # class instance always exists, but this char* may be null.
+ store = table["mEntryStore"]["mEntryStore"]
+
+ key_field_name = self.key_field_name
+
+ # The entry store is laid out with hashes for all possible entries
+ # first, followed by all the entries.
+ pHashes = store.cast(hashType.pointer())
+ pEntries = pHashes + capacity
+ pEntries = pEntries.cast(self.entry_type.pointer())
+ seenCount = 0
+ for i in range(0, int(capacity)):
+ entryHash = (pHashes + i).dereference()
+ # An entry hash of 0 means empty, 1 means deleted sentinel, so skip
+ # if that's the case.
+ if entryHash <= 1:
+ continue
+
+ entry = (pEntries + i).dereference()
+ yield ("%d" % i, entry[key_field_name])
+ if self.is_table:
+ yield ("%d" % i, entry["mData"])
+
+ # Stop iterating if we know there are no more occupied slots.
+ seenCount += 1
+ if seenCount >= entryCount:
+ break
+
+ def to_string(self):
+ # The most specific template type is the most interesting.
+ return str(self.outermost_type)
+
+ def display_hint(self):
+ if self.is_table:
+ return "map"
+ else:
+ return "array"
diff --git a/python/l10n/fluent_migrations/__init__.py b/python/l10n/fluent_migrations/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/l10n/fluent_migrations/__init__.py
diff --git a/python/l10n/fluent_migrations/bug_1552333_aboutCertError.py b/python/l10n/fluent_migrations/bug_1552333_aboutCertError.py
new file mode 100644
index 0000000000..5c8300e01f
--- /dev/null
+++ b/python/l10n/fluent_migrations/bug_1552333_aboutCertError.py
@@ -0,0 +1,40 @@
+# coding=utf8
+
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+import fluent.syntax.ast as FTL
+from fluent.migrate.helpers import transforms_from
+from fluent.migrate.helpers import VARIABLE_REFERENCE
+from fluent.migrate import COPY, REPLACE
+
+
+def migrate(ctx):
+ """Bug 1552333 - Migrate strings from pipnss.properties to aboutCertError.ftl"""
+ ctx.add_transforms(
+ "browser/browser/aboutCertError.ftl",
+ "browser/browser/aboutCertError.ftl",
+ transforms_from(
+ """
+cert-error-symantec-distrust-admin = { COPY(from_path, "certErrorSymantecDistrustAdministrator") }
+""",
+ from_path="security/manager/chrome/pipnss/pipnss.properties",
+ ),
+ )
+ ctx.add_transforms(
+ "browser/browser/aboutCertError.ftl",
+ "browser/browser/aboutCertError.ftl",
+ [
+ FTL.Message(
+ id=FTL.Identifier("cert-error-symantec-distrust-description"),
+ value=REPLACE(
+ "security/manager/chrome/pipnss/pipnss.properties",
+ "certErrorSymantecDistrustDescription1",
+ {
+ "%1$S": VARIABLE_REFERENCE("hostname"),
+ },
+ normalize_printf=True,
+ ),
+ ),
+ ],
+ )
diff --git a/python/l10n/fluent_migrations/bug_1635548_browser_context.py b/python/l10n/fluent_migrations/bug_1635548_browser_context.py
new file mode 100644
index 0000000000..33bd0efc95
--- /dev/null
+++ b/python/l10n/fluent_migrations/bug_1635548_browser_context.py
@@ -0,0 +1,82 @@
+# coding=utf8
+
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+import fluent.syntax.ast as FTL
+from fluent.migrate.helpers import transforms_from, VARIABLE_REFERENCE
+from fluent.migrate import REPLACE, COPY
+
+
+def migrate(ctx):
+ """Bug 1635548 - Migrate browser-context.inc to Fluent, part {index}"""
+ target = "toolkit/toolkit/global/textActions.ftl"
+ reference = "toolkit/toolkit/global/textActions.ftl"
+ ctx.add_transforms(
+ target,
+ reference,
+ transforms_from(
+ """
+text-action-spell-add-to-dictionary =
+ .label = { COPY(from_path, "spellAddToDictionary.label") }
+ .accesskey = { COPY(from_path, "spellAddToDictionary.accesskey") }
+
+text-action-spell-undo-add-to-dictionary =
+ .label = { COPY(from_path, "spellUndoAddToDictionary.label") }
+ .accesskey = { COPY(from_path, "spellUndoAddToDictionary.accesskey") }
+
+text-action-spell-check-toggle =
+ .label = { COPY(from_path, "spellCheckToggle.label") }
+ .accesskey = { COPY(from_path, "spellCheckToggle.accesskey") }
+
+text-action-spell-dictionaries =
+ .label = { COPY(from_path, "spellDictionaries.label") }
+ .accesskey = { COPY(from_path, "spellDictionaries.accesskey") }
+""",
+ from_path="toolkit/chrome/global/textcontext.dtd",
+ ),
+ )
+
+ target = "toolkit/toolkit/global/textActions.ftl"
+ reference = "toolkit/toolkit/global/textActions.ftl"
+ ctx.add_transforms(
+ target,
+ reference,
+ transforms_from(
+ """
+text-action-spell-add-dictionaries =
+ .label = { COPY(from_path, "spellAddDictionaries.label") }
+ .accesskey = { COPY(from_path, "spellAddDictionaries.accesskey") }
+""",
+ from_path="browser/chrome/browser/browser.dtd",
+ ),
+ )
+
+ target = "browser/browser/browserContext.ftl"
+ reference = "browser/browser/browserContext.ftl"
+ ctx.add_transforms(
+ target,
+ reference,
+ [
+ FTL.Message(
+ id=FTL.Identifier("main-context-menu-open-link-in-container-tab"),
+ attributes=[
+ FTL.Attribute(
+ FTL.Identifier("label"),
+ REPLACE(
+ "browser/chrome/browser/browser.properties",
+ "userContextOpenLink.label",
+ {"%1$S": VARIABLE_REFERENCE("containerName")},
+ ),
+ ),
+ FTL.Attribute(
+ FTL.Identifier("accesskey"),
+ COPY(
+ "browser/chrome/browser/browser.dtd",
+ "openLinkCmdInTab.accesskey",
+ ),
+ ),
+ ],
+ )
+ ],
+ )
diff --git a/python/l10n/fluent_migrations/bug_1738056_aboutDialog_channel.py b/python/l10n/fluent_migrations/bug_1738056_aboutDialog_channel.py
new file mode 100644
index 0000000000..b867155ebf
--- /dev/null
+++ b/python/l10n/fluent_migrations/bug_1738056_aboutDialog_channel.py
@@ -0,0 +1,33 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+import fluent.syntax.ast as FTL
+import re
+
+from fluent.migrate.transforms import TransformPattern
+
+
+class INSERT_VARIABLE(TransformPattern):
+ def visit_TextElement(self, node):
+ node.value = re.sub(
+ 'current-channel"></label',
+ 'current-channel">{ $channel }</label',
+ node.value,
+ )
+ return node
+
+
+def migrate(ctx):
+ """Bug 1738056 - Convert about dialog channel listing to fluent, part {index}."""
+
+ about_dialog_ftl = "browser/browser/aboutDialog.ftl"
+ ctx.add_transforms(
+ about_dialog_ftl,
+ about_dialog_ftl,
+ [
+ FTL.Message(
+ id=FTL.Identifier("aboutdialog-channel-description"),
+ value=INSERT_VARIABLE(about_dialog_ftl, "channel-description"),
+ ),
+ ],
+ )
diff --git a/python/l10n/fluent_migrations/bug_1786186_mobile_aboutConfig.py b/python/l10n/fluent_migrations/bug_1786186_mobile_aboutConfig.py
new file mode 100644
index 0000000000..99c6673f92
--- /dev/null
+++ b/python/l10n/fluent_migrations/bug_1786186_mobile_aboutConfig.py
@@ -0,0 +1,65 @@
+# coding=utf8
+
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+import fluent.syntax.ast as FTL
+from fluent.migrate.helpers import transforms_from
+from fluent.migrate.transforms import COPY
+
+
+def migrate(ctx):
+ """Bug 1786186 - Migrate mobile about:config to Fluent, part {index}"""
+
+ target = "mobile/android/mobile/android/aboutConfig.ftl"
+
+ ctx.add_transforms(
+ target,
+ target,
+ transforms_from(
+ """
+
+config-toolbar-search =
+ .placeholder = { COPY(path1, "toolbar.searchPlaceholder") }
+config-new-pref-name =
+ .placeholder = { COPY(path1, "newPref.namePlaceholder") }
+config-new-pref-value-boolean = { COPY(path1, "newPref.valueBoolean") }
+config-new-pref-value-string = { COPY(path1, "newPref.valueString") }
+config-new-pref-value-integer = { COPY(path1, "newPref.valueInteger") }
+config-new-pref-string =
+ .placeholder = { COPY(path1, "newPref.stringPlaceholder") }
+config-new-pref-number =
+ .placeholder = { COPY(path1, "newPref.numberPlaceholder") }
+config-new-pref-cancel-button = { COPY(path1, "newPref.cancelButton") }
+config-context-menu-copy-pref-name =
+ .label = { COPY(path1, "contextMenu.copyPrefName") }
+config-context-menu-copy-pref-value =
+ .label = { COPY(path1, "contextMenu.copyPrefValue") }
+""",
+ path1="mobile/android/chrome/config.dtd",
+ ),
+ )
+
+ source = "mobile/android/chrome/config.properties"
+ ctx.add_transforms(
+ target,
+ target,
+ [
+ FTL.Message(
+ id=FTL.Identifier("config-new-pref-create-button"),
+ value=COPY(source, "newPref.createButton"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("config-new-pref-change-button"),
+ value=COPY(source, "newPref.changeButton"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("config-pref-toggle-button"),
+ value=COPY(source, "pref.toggleButton"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("config-pref-reset-button"),
+ value=COPY(source, "pref.resetButton"),
+ ),
+ ],
+ )
diff --git a/python/l10n/fluent_migrations/bug_1793557_extensions.py b/python/l10n/fluent_migrations/bug_1793557_extensions.py
new file mode 100644
index 0000000000..0c04a87509
--- /dev/null
+++ b/python/l10n/fluent_migrations/bug_1793557_extensions.py
@@ -0,0 +1,912 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+import fluent.syntax.ast as FTL
+from fluent.migrate.helpers import TERM_REFERENCE, VARIABLE_REFERENCE
+from fluent.migrate.transforms import (
+ COPY,
+ COPY_PATTERN,
+ PLURALS,
+ REPLACE,
+ REPLACE_IN_TEXT,
+)
+
+
+def migrate(ctx):
+ """Bug 1793557 - Convert extension strings to Fluent, part {index}."""
+
+ browser_properties = "browser/chrome/browser/browser.properties"
+ browser_ftl = "browser/browser/browser.ftl"
+ notifications = "browser/browser/addonNotifications.ftl"
+ extensions_ui = "browser/browser/extensionsUI.ftl"
+ extensions = "toolkit/toolkit/global/extensions.ftl"
+ permissions = "toolkit/toolkit/global/extensionPermissions.ftl"
+
+ ctx.add_transforms(
+ browser_ftl,
+ browser_ftl,
+ [
+ FTL.Message(
+ id=FTL.Identifier("popup-notification-addon-install-unsigned"),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("value"),
+ value=COPY(browser_properties, "addonInstall.unsigned"),
+ )
+ ],
+ ),
+ ],
+ )
+
+ ctx.add_transforms(
+ notifications,
+ notifications,
+ [
+ FTL.Message(
+ id=FTL.Identifier("xpinstall-prompt"),
+ value=REPLACE(
+ browser_properties,
+ "xpinstallPromptMessage",
+ {"%1$S": TERM_REFERENCE("brand-short-name")},
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("xpinstall-prompt-header"),
+ value=REPLACE(
+ browser_properties,
+ "xpinstallPromptMessage.header",
+ {"%1$S": VARIABLE_REFERENCE("host")},
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("xpinstall-prompt-message"),
+ value=REPLACE(
+ browser_properties,
+ "xpinstallPromptMessage.message",
+ {"%1$S": VARIABLE_REFERENCE("host")},
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("xpinstall-prompt-header-unknown"),
+ value=COPY(browser_properties, "xpinstallPromptMessage.header.unknown"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("xpinstall-prompt-message-unknown"),
+ value=COPY(
+ browser_properties, "xpinstallPromptMessage.message.unknown"
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("xpinstall-prompt-dont-allow"),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("label"),
+ value=COPY(
+ browser_properties, "xpinstallPromptMessage.dontAllow"
+ ),
+ ),
+ FTL.Attribute(
+ id=FTL.Identifier("accesskey"),
+ value=COPY(
+ browser_properties,
+ "xpinstallPromptMessage.dontAllow.accesskey",
+ ),
+ ),
+ ],
+ ),
+ FTL.Message(
+ id=FTL.Identifier("xpinstall-prompt-never-allow"),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("label"),
+ value=COPY(
+ browser_properties, "xpinstallPromptMessage.neverAllow"
+ ),
+ ),
+ FTL.Attribute(
+ id=FTL.Identifier("accesskey"),
+ value=COPY(
+ browser_properties,
+ "xpinstallPromptMessage.neverAllow.accesskey",
+ ),
+ ),
+ ],
+ ),
+ FTL.Message(
+ id=FTL.Identifier("xpinstall-prompt-never-allow-and-report"),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("label"),
+ value=COPY(
+ browser_properties,
+ "xpinstallPromptMessage.neverAllowAndReport",
+ ),
+ ),
+ FTL.Attribute(
+ id=FTL.Identifier("accesskey"),
+ value=COPY(
+ browser_properties,
+ "xpinstallPromptMessage.neverAllowAndReport.accesskey",
+ ),
+ ),
+ ],
+ ),
+ FTL.Message(
+ id=FTL.Identifier("site-permission-install-first-prompt-midi-header"),
+ value=COPY(
+ browser_properties, "sitePermissionInstallFirstPrompt.midi.header"
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("site-permission-install-first-prompt-midi-message"),
+ value=COPY(
+ browser_properties, "sitePermissionInstallFirstPrompt.midi.message"
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("xpinstall-prompt-install"),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("label"),
+ value=COPY(
+ browser_properties, "xpinstallPromptMessage.install"
+ ),
+ ),
+ FTL.Attribute(
+ id=FTL.Identifier("accesskey"),
+ value=COPY(
+ browser_properties,
+ "xpinstallPromptMessage.install.accesskey",
+ ),
+ ),
+ ],
+ ),
+ FTL.Message(
+ id=FTL.Identifier("xpinstall-disabled-locked"),
+ value=COPY(browser_properties, "xpinstallDisabledMessageLocked"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("xpinstall-disabled"),
+ value=COPY(browser_properties, "xpinstallDisabledMessage"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("xpinstall-disabled-button"),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("label"),
+ value=COPY(browser_properties, "xpinstallDisabledButton"),
+ ),
+ FTL.Attribute(
+ id=FTL.Identifier("accesskey"),
+ value=COPY(
+ browser_properties, "xpinstallDisabledButton.accesskey"
+ ),
+ ),
+ ],
+ ),
+ FTL.Message(
+ id=FTL.Identifier("addon-install-blocked-by-policy"),
+ value=REPLACE(
+ browser_properties,
+ "addonInstallBlockedByPolicy",
+ {
+ "%1$S": VARIABLE_REFERENCE("addonName"),
+ "%2$S": VARIABLE_REFERENCE("addonId"),
+ "%3$S": FTL.TextElement(""),
+ },
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("addon-domain-blocked-by-policy"),
+ value=COPY(browser_properties, "addonDomainBlockedByPolicy"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("addon-install-full-screen-blocked"),
+ value=COPY(browser_properties, "addonInstallFullScreenBlocked"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-perms-sideload-menu-item"),
+ value=REPLACE(
+ browser_properties,
+ "webextPerms.sideloadMenuItem",
+ {
+ "%1$S": VARIABLE_REFERENCE("addonName"),
+ "%2$S": TERM_REFERENCE("brand-short-name"),
+ },
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-perms-update-menu-item"),
+ value=REPLACE(
+ browser_properties,
+ "webextPerms.updateMenuItem",
+ {"%1$S": VARIABLE_REFERENCE("addonName")},
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("addon-removal-title"),
+ value=COPY_PATTERN(browser_ftl, "addon-removal-title"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("addon-removal-message"),
+ value=REPLACE(
+ browser_properties,
+ "webext.remove.confirmation.message",
+ {
+ "%1$S": VARIABLE_REFERENCE("name"),
+ "%2$S": TERM_REFERENCE("brand-shorter-name"),
+ },
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("addon-removal-button"),
+ value=COPY(browser_properties, "webext.remove.confirmation.button"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("addon-removal-abuse-report-checkbox"),
+ value=COPY_PATTERN(browser_ftl, "addon-removal-abuse-report-checkbox"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("addon-downloading-and-verifying"),
+ value=PLURALS(
+ browser_properties,
+ "addonDownloadingAndVerifying",
+ VARIABLE_REFERENCE("addonCount"),
+ foreach=lambda n: REPLACE_IN_TEXT(
+ n,
+ {"#1": VARIABLE_REFERENCE("addonCount")},
+ ),
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("addon-download-verifying"),
+ value=COPY(browser_properties, "addonDownloadVerifying"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("addon-install-cancel-button"),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("label"),
+ value=COPY(
+ browser_properties, "addonInstall.cancelButton.label"
+ ),
+ ),
+ FTL.Attribute(
+ id=FTL.Identifier("accesskey"),
+ value=COPY(
+ browser_properties, "addonInstall.cancelButton.accesskey"
+ ),
+ ),
+ ],
+ ),
+ FTL.Message(
+ id=FTL.Identifier("addon-install-accept-button"),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("label"),
+ value=COPY(
+ browser_properties, "addonInstall.acceptButton2.label"
+ ),
+ ),
+ FTL.Attribute(
+ id=FTL.Identifier("accesskey"),
+ value=COPY(
+ browser_properties, "addonInstall.acceptButton2.accesskey"
+ ),
+ ),
+ ],
+ ),
+ FTL.Message(
+ id=FTL.Identifier("addon-confirm-install-message"),
+ value=PLURALS(
+ browser_properties,
+ "addonConfirmInstall.message",
+ VARIABLE_REFERENCE("addonCount"),
+ foreach=lambda n: REPLACE_IN_TEXT(
+ n,
+ {
+ "#1": TERM_REFERENCE("brand-short-name"),
+ "#2": VARIABLE_REFERENCE("addonCount"),
+ },
+ ),
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("addon-confirm-install-unsigned-message"),
+ value=PLURALS(
+ browser_properties,
+ "addonConfirmInstallUnsigned.message",
+ VARIABLE_REFERENCE("addonCount"),
+ foreach=lambda n: REPLACE_IN_TEXT(
+ n,
+ {
+ "#1": TERM_REFERENCE("brand-short-name"),
+ "#2": VARIABLE_REFERENCE("addonCount"),
+ },
+ ),
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("addon-confirm-install-some-unsigned-message"),
+ value=PLURALS(
+ browser_properties,
+ "addonConfirmInstallSomeUnsigned.message",
+ VARIABLE_REFERENCE("addonCount"),
+ foreach=lambda n: REPLACE_IN_TEXT(
+ n,
+ {
+ "#1": TERM_REFERENCE("brand-short-name"),
+ "#2": VARIABLE_REFERENCE("addonCount"),
+ },
+ ),
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("addon-install-error-network-failure"),
+ value=COPY(browser_properties, "addonInstallError-1"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("addon-install-error-incorrect-hash"),
+ value=REPLACE(
+ browser_properties,
+ "addonInstallError-2",
+ {"%1$S": TERM_REFERENCE("brand-short-name")},
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("addon-install-error-corrupt-file"),
+ value=COPY(browser_properties, "addonInstallError-3"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("addon-install-error-file-access"),
+ value=REPLACE(
+ browser_properties,
+ "addonInstallError-4",
+ {
+ "%2$S": VARIABLE_REFERENCE("addonName"),
+ "%1$S": TERM_REFERENCE("brand-short-name"),
+ },
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("addon-install-error-not-signed"),
+ value=REPLACE(
+ browser_properties,
+ "addonInstallError-5",
+ {"%1$S": TERM_REFERENCE("brand-short-name")},
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("addon-install-error-invalid-domain"),
+ value=REPLACE(
+ browser_properties,
+ "addonInstallError-8",
+ {"%2$S": VARIABLE_REFERENCE("addonName")},
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("addon-local-install-error-network-failure"),
+ value=COPY(browser_properties, "addonLocalInstallError-1"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("addon-local-install-error-incorrect-hash"),
+ value=REPLACE(
+ browser_properties,
+ "addonLocalInstallError-2",
+ {"%1$S": TERM_REFERENCE("brand-short-name")},
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("addon-local-install-error-corrupt-file"),
+ value=COPY(browser_properties, "addonLocalInstallError-3"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("addon-local-install-error-file-access"),
+ value=REPLACE(
+ browser_properties,
+ "addonLocalInstallError-4",
+ {
+ "%2$S": VARIABLE_REFERENCE("addonName"),
+ "%1$S": TERM_REFERENCE("brand-short-name"),
+ },
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("addon-local-install-error-not-signed"),
+ value=COPY(browser_properties, "addonLocalInstallError-5"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("addon-install-error-incompatible"),
+ value=REPLACE(
+ browser_properties,
+ "addonInstallErrorIncompatible",
+ {
+ "%3$S": VARIABLE_REFERENCE("addonName"),
+ "%1$S": TERM_REFERENCE("brand-short-name"),
+ "%2$S": VARIABLE_REFERENCE("appVersion"),
+ },
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("addon-install-error-blocklisted"),
+ value=REPLACE(
+ browser_properties,
+ "addonInstallErrorBlocklisted",
+ {"%1$S": VARIABLE_REFERENCE("addonName")},
+ ),
+ ),
+ ],
+ )
+
+ ctx.add_transforms(
+ extensions_ui,
+ extensions_ui,
+ [
+ FTL.Message(
+ id=FTL.Identifier("webext-perms-learn-more"),
+ value=COPY(browser_properties, "webextPerms.learnMore2"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-default-search-description"),
+ value=REPLACE(
+ browser_properties,
+ "webext.defaultSearch.description",
+ {
+ "%1$S": VARIABLE_REFERENCE("addonName"),
+ "%2$S": VARIABLE_REFERENCE("currentEngine"),
+ "%3$S": VARIABLE_REFERENCE("newEngine"),
+ },
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-default-search-yes"),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("label"),
+ value=COPY(browser_properties, "webext.defaultSearchYes.label"),
+ ),
+ FTL.Attribute(
+ id=FTL.Identifier("accesskey"),
+ value=COPY(
+ browser_properties, "webext.defaultSearchYes.accessKey"
+ ),
+ ),
+ ],
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-default-search-no"),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("label"),
+ value=COPY(browser_properties, "webext.defaultSearchNo.label"),
+ ),
+ FTL.Attribute(
+ id=FTL.Identifier("accesskey"),
+ value=COPY(
+ browser_properties, "webext.defaultSearchNo.accessKey"
+ ),
+ ),
+ ],
+ ),
+ FTL.Message(
+ id=FTL.Identifier("addon-post-install-message"),
+ value=REPLACE(
+ browser_properties,
+ "addonPostInstall.message3",
+ {"%1$S": VARIABLE_REFERENCE("addonName")},
+ ),
+ ),
+ ],
+ )
+
+ ctx.add_transforms(
+ extensions,
+ extensions,
+ [
+ FTL.Message(
+ id=FTL.Identifier("webext-perms-header"),
+ value=REPLACE(
+ browser_properties,
+ "webextPerms.header",
+ {"%1$S": VARIABLE_REFERENCE("extension")},
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-perms-header-with-perms"),
+ value=REPLACE(
+ browser_properties,
+ "webextPerms.headerWithPerms",
+ {"%1$S": VARIABLE_REFERENCE("extension")},
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-perms-header-unsigned"),
+ value=REPLACE(
+ browser_properties,
+ "webextPerms.headerUnsigned",
+ {"%1$S": VARIABLE_REFERENCE("extension")},
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-perms-header-unsigned-with-perms"),
+ value=REPLACE(
+ browser_properties,
+ "webextPerms.headerUnsignedWithPerms",
+ {"%1$S": VARIABLE_REFERENCE("extension")},
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-perms-add"),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("label"),
+ value=COPY(browser_properties, "webextPerms.add.label"),
+ ),
+ FTL.Attribute(
+ id=FTL.Identifier("accesskey"),
+ value=COPY(browser_properties, "webextPerms.add.accessKey"),
+ ),
+ ],
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-perms-cancel"),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("label"),
+ value=COPY(browser_properties, "webextPerms.cancel.label"),
+ ),
+ FTL.Attribute(
+ id=FTL.Identifier("accesskey"),
+ value=COPY(browser_properties, "webextPerms.cancel.accessKey"),
+ ),
+ ],
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-perms-sideload-header"),
+ value=REPLACE(
+ browser_properties,
+ "webextPerms.sideloadHeader",
+ {"%1$S": VARIABLE_REFERENCE("extension")},
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-perms-sideload-text"),
+ value=COPY(browser_properties, "webextPerms.sideloadText2"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-perms-sideload-text-no-perms"),
+ value=COPY(browser_properties, "webextPerms.sideloadTextNoPerms"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-perms-sideload-enable"),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("label"),
+ value=COPY(
+ browser_properties, "webextPerms.sideloadEnable.label"
+ ),
+ ),
+ FTL.Attribute(
+ id=FTL.Identifier("accesskey"),
+ value=COPY(
+ browser_properties, "webextPerms.sideloadEnable.accessKey"
+ ),
+ ),
+ ],
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-perms-sideload-cancel"),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("label"),
+ value=COPY(
+ browser_properties, "webextPerms.sideloadCancel.label"
+ ),
+ ),
+ FTL.Attribute(
+ id=FTL.Identifier("accesskey"),
+ value=COPY(
+ browser_properties, "webextPerms.sideloadCancel.accessKey"
+ ),
+ ),
+ ],
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-perms-update-text"),
+ value=REPLACE(
+ browser_properties,
+ "webextPerms.updateText2",
+ {"%1$S": VARIABLE_REFERENCE("extension")},
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-perms-update-accept"),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("label"),
+ value=COPY(
+ browser_properties, "webextPerms.updateAccept.label"
+ ),
+ ),
+ FTL.Attribute(
+ id=FTL.Identifier("accesskey"),
+ value=COPY(
+ browser_properties, "webextPerms.updateAccept.accessKey"
+ ),
+ ),
+ ],
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-perms-optional-perms-header"),
+ value=REPLACE(
+ browser_properties,
+ "webextPerms.optionalPermsHeader",
+ {"%1$S": VARIABLE_REFERENCE("extension")},
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-perms-optional-perms-list-intro"),
+ value=COPY(browser_properties, "webextPerms.optionalPermsListIntro"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-perms-optional-perms-allow"),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("label"),
+ value=COPY(
+ browser_properties, "webextPerms.optionalPermsAllow.label"
+ ),
+ ),
+ FTL.Attribute(
+ id=FTL.Identifier("accesskey"),
+ value=COPY(
+ browser_properties,
+ "webextPerms.optionalPermsAllow.accessKey",
+ ),
+ ),
+ ],
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-perms-optional-perms-deny"),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("label"),
+ value=COPY(
+ browser_properties, "webextPerms.optionalPermsDeny.label"
+ ),
+ ),
+ FTL.Attribute(
+ id=FTL.Identifier("accesskey"),
+ value=COPY(
+ browser_properties,
+ "webextPerms.optionalPermsDeny.accessKey",
+ ),
+ ),
+ ],
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-perms-host-description-all-urls"),
+ value=COPY(browser_properties, "webextPerms.hostDescription.allUrls"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-perms-host-description-wildcard"),
+ value=REPLACE(
+ browser_properties,
+ "webextPerms.hostDescription.wildcard",
+ {"%1$S": VARIABLE_REFERENCE("domain")},
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-perms-host-description-too-many-wildcards"),
+ value=PLURALS(
+ browser_properties,
+ "webextPerms.hostDescription.tooManyWildcards",
+ VARIABLE_REFERENCE("domainCount"),
+ foreach=lambda n: REPLACE_IN_TEXT(
+ n,
+ {"#1": VARIABLE_REFERENCE("domainCount")},
+ ),
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-perms-host-description-one-site"),
+ value=REPLACE(
+ browser_properties,
+ "webextPerms.hostDescription.oneSite",
+ {"%1$S": VARIABLE_REFERENCE("domain")},
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-perms-host-description-too-many-sites"),
+ value=PLURALS(
+ browser_properties,
+ "webextPerms.hostDescription.tooManySites",
+ VARIABLE_REFERENCE("domainCount"),
+ foreach=lambda n: REPLACE_IN_TEXT(
+ n,
+ {"#1": VARIABLE_REFERENCE("domainCount")},
+ ),
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-site-perms-header-with-gated-perms-midi"),
+ value=REPLACE(
+ browser_properties,
+ "webextSitePerms.headerWithGatedPerms.midi",
+ {
+ "%1$S": VARIABLE_REFERENCE("hostname"),
+ },
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier(
+ "webext-site-perms-header-with-gated-perms-midi-sysex"
+ ),
+ value=REPLACE(
+ browser_properties,
+ "webextSitePerms.headerWithGatedPerms.midi-sysex",
+ {
+ "%1$S": VARIABLE_REFERENCE("hostname"),
+ },
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-site-perms-description-gated-perms-midi"),
+ value=COPY(
+ browser_properties, "webextSitePerms.descriptionGatedPerms.midi"
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-site-perms-header-with-perms"),
+ value=REPLACE(
+ browser_properties,
+ "webextSitePerms.headerWithPerms",
+ {
+ "%1$S": VARIABLE_REFERENCE("extension"),
+ "%2$S": VARIABLE_REFERENCE("hostname"),
+ },
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-site-perms-header-unsigned-with-perms"),
+ value=REPLACE(
+ browser_properties,
+ "webextSitePerms.headerUnsignedWithPerms",
+ {
+ "%1$S": VARIABLE_REFERENCE("extension"),
+ "%2$S": VARIABLE_REFERENCE("hostname"),
+ },
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-site-perms-midi"),
+ value=COPY(browser_properties, "webextSitePerms.description.midi"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-site-perms-midi-sysex"),
+ value=COPY(
+ browser_properties, "webextSitePerms.description.midi-sysex"
+ ),
+ ),
+ ],
+ )
+
+ ctx.add_transforms(
+ permissions,
+ permissions,
+ [
+ FTL.Message(
+ id=FTL.Identifier("webext-perms-description-bookmarks"),
+ value=COPY(browser_properties, "webextPerms.description.bookmarks"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-perms-description-browserSettings"),
+ value=COPY(
+ browser_properties, "webextPerms.description.browserSettings"
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-perms-description-browsingData"),
+ value=COPY(browser_properties, "webextPerms.description.browsingData"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-perms-description-clipboardRead"),
+ value=COPY(browser_properties, "webextPerms.description.clipboardRead"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-perms-description-clipboardWrite"),
+ value=COPY(
+ browser_properties, "webextPerms.description.clipboardWrite"
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-perms-description-declarativeNetRequest"),
+ value=COPY(
+ browser_properties, "webextPerms.description.declarativeNetRequest"
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier(
+ "webext-perms-description-declarativeNetRequestFeedback"
+ ),
+ value=COPY(
+ browser_properties,
+ "webextPerms.description.declarativeNetRequestFeedback",
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-perms-description-devtools"),
+ value=COPY(browser_properties, "webextPerms.description.devtools"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-perms-description-downloads"),
+ value=COPY(browser_properties, "webextPerms.description.downloads"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-perms-description-downloads-open"),
+ value=COPY(
+ browser_properties, "webextPerms.description.downloads.open"
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-perms-description-find"),
+ value=COPY(browser_properties, "webextPerms.description.find"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-perms-description-geolocation"),
+ value=COPY(browser_properties, "webextPerms.description.geolocation"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-perms-description-history"),
+ value=COPY(browser_properties, "webextPerms.description.history"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-perms-description-management"),
+ value=COPY(browser_properties, "webextPerms.description.management"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-perms-description-nativeMessaging"),
+ value=REPLACE(
+ browser_properties,
+ "webextPerms.description.nativeMessaging",
+ {"%1$S": TERM_REFERENCE("brand-short-name")},
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-perms-description-notifications"),
+ value=COPY(browser_properties, "webextPerms.description.notifications"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-perms-description-pkcs11"),
+ value=COPY(browser_properties, "webextPerms.description.pkcs11"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-perms-description-privacy"),
+ value=COPY(browser_properties, "webextPerms.description.privacy"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-perms-description-proxy"),
+ value=COPY(browser_properties, "webextPerms.description.proxy"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-perms-description-sessions"),
+ value=COPY(browser_properties, "webextPerms.description.sessions"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-perms-description-tabs"),
+ value=COPY(browser_properties, "webextPerms.description.tabs"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-perms-description-tabHide"),
+ value=COPY(browser_properties, "webextPerms.description.tabHide"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-perms-description-topSites"),
+ value=COPY(browser_properties, "webextPerms.description.topSites"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webext-perms-description-webNavigation"),
+ value=COPY(browser_properties, "webextPerms.description.webNavigation"),
+ ),
+ ],
+ )
diff --git a/python/l10n/fluent_migrations/bug_1793572_webrtc.py b/python/l10n/fluent_migrations/bug_1793572_webrtc.py
new file mode 100644
index 0000000000..eb07f939a8
--- /dev/null
+++ b/python/l10n/fluent_migrations/bug_1793572_webrtc.py
@@ -0,0 +1,771 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+import fluent.syntax.ast as FTL
+from fluent.migrate.helpers import TERM_REFERENCE, VARIABLE_REFERENCE
+from fluent.migrate.transforms import (
+ COPY,
+ COPY_PATTERN,
+ PLURALS,
+ REPLACE,
+ REPLACE_IN_TEXT,
+)
+
+
+def migrate(ctx):
+ """Bug 1793572 - Convert WebRTC strings to Fluent, part {index}."""
+
+ source = "browser/chrome/browser/webrtcIndicator.properties"
+ browser = "browser/chrome/browser/browser.properties"
+ browser_ftl = "browser/browser/browser.ftl"
+ target = "browser/browser/webrtcIndicator.ftl"
+
+ ctx.add_transforms(
+ target,
+ target,
+ [
+ FTL.Message(
+ id=FTL.Identifier("webrtc-indicator-window"),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("title"),
+ value=COPY_PATTERN(target, "webrtc-indicator-title"),
+ )
+ ],
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-indicator-sharing-camera-and-microphone"),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("tooltiptext"),
+ value=COPY(
+ source, "webrtcIndicator.sharingCameraAndMicrophone.tooltip"
+ ),
+ )
+ ],
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-indicator-sharing-camera"),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("tooltiptext"),
+ value=COPY(source, "webrtcIndicator.sharingCamera.tooltip"),
+ )
+ ],
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-indicator-sharing-microphone"),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("tooltiptext"),
+ value=COPY(source, "webrtcIndicator.sharingMicrophone.tooltip"),
+ )
+ ],
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-indicator-sharing-application"),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("tooltiptext"),
+ value=COPY(
+ source, "webrtcIndicator.sharingApplication.tooltip"
+ ),
+ )
+ ],
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-indicator-sharing-screen"),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("tooltiptext"),
+ value=COPY(source, "webrtcIndicator.sharingScreen.tooltip"),
+ )
+ ],
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-indicator-sharing-window"),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("tooltiptext"),
+ value=COPY(source, "webrtcIndicator.sharingWindow.tooltip"),
+ )
+ ],
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-indicator-sharing-browser"),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("tooltiptext"),
+ value=COPY(source, "webrtcIndicator.sharingBrowser.tooltip"),
+ )
+ ],
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-indicator-menuitem-control-sharing"),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("label"),
+ value=COPY(source, "webrtcIndicator.controlSharing.menuitem"),
+ )
+ ],
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-indicator-menuitem-control-sharing-on"),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("label"),
+ value=REPLACE(
+ source,
+ "webrtcIndicator.controlSharingOn.menuitem",
+ {"%1$S": VARIABLE_REFERENCE("streamTitle")},
+ ),
+ )
+ ],
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-indicator-menuitem-sharing-camera-with"),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("label"),
+ value=REPLACE(
+ source,
+ "webrtcIndicator.sharingCameraWith.menuitem",
+ {"%1$S": VARIABLE_REFERENCE("streamTitle")},
+ ),
+ )
+ ],
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-indicator-menuitem-sharing-microphone-with"),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("label"),
+ value=REPLACE(
+ source,
+ "webrtcIndicator.sharingMicrophoneWith.menuitem",
+ {"%1$S": VARIABLE_REFERENCE("streamTitle")},
+ ),
+ )
+ ],
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-indicator-menuitem-sharing-application-with"),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("label"),
+ value=REPLACE(
+ source,
+ "webrtcIndicator.sharingApplicationWith.menuitem",
+ {"%1$S": VARIABLE_REFERENCE("streamTitle")},
+ ),
+ )
+ ],
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-indicator-menuitem-sharing-screen-with"),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("label"),
+ value=REPLACE(
+ source,
+ "webrtcIndicator.sharingScreenWith.menuitem",
+ {"%1$S": VARIABLE_REFERENCE("streamTitle")},
+ ),
+ )
+ ],
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-indicator-menuitem-sharing-window-with"),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("label"),
+ value=REPLACE(
+ source,
+ "webrtcIndicator.sharingWindowWith.menuitem",
+ {"%1$S": VARIABLE_REFERENCE("streamTitle")},
+ ),
+ )
+ ],
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-indicator-menuitem-sharing-browser-with"),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("label"),
+ value=REPLACE(
+ source,
+ "webrtcIndicator.sharingBrowserWith.menuitem",
+ {"%1$S": VARIABLE_REFERENCE("streamTitle")},
+ ),
+ )
+ ],
+ ),
+ FTL.Message(
+ id=FTL.Identifier(
+ "webrtc-indicator-menuitem-sharing-camera-with-n-tabs"
+ ),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("label"),
+ value=PLURALS(
+ source,
+ "webrtcIndicator.sharingCameraWithNTabs.menuitem",
+ VARIABLE_REFERENCE("tabCount"),
+ foreach=lambda n: REPLACE_IN_TEXT(
+ n,
+ {"#1": VARIABLE_REFERENCE("tabCount")},
+ ),
+ ),
+ )
+ ],
+ ),
+ FTL.Message(
+ id=FTL.Identifier(
+ "webrtc-indicator-menuitem-sharing-microphone-with-n-tabs"
+ ),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("label"),
+ value=PLURALS(
+ source,
+ "webrtcIndicator.sharingMicrophoneWithNTabs.menuitem",
+ VARIABLE_REFERENCE("tabCount"),
+ foreach=lambda n: REPLACE_IN_TEXT(
+ n,
+ {"#1": VARIABLE_REFERENCE("tabCount")},
+ ),
+ ),
+ )
+ ],
+ ),
+ FTL.Message(
+ id=FTL.Identifier(
+ "webrtc-indicator-menuitem-sharing-application-with-n-tabs"
+ ),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("label"),
+ value=PLURALS(
+ source,
+ "webrtcIndicator.sharingApplicationWithNTabs.menuitem",
+ VARIABLE_REFERENCE("tabCount"),
+ foreach=lambda n: REPLACE_IN_TEXT(
+ n,
+ {"#1": VARIABLE_REFERENCE("tabCount")},
+ ),
+ ),
+ )
+ ],
+ ),
+ FTL.Message(
+ id=FTL.Identifier(
+ "webrtc-indicator-menuitem-sharing-screen-with-n-tabs"
+ ),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("label"),
+ value=PLURALS(
+ source,
+ "webrtcIndicator.sharingScreenWithNTabs.menuitem",
+ VARIABLE_REFERENCE("tabCount"),
+ foreach=lambda n: REPLACE_IN_TEXT(
+ n,
+ {"#1": VARIABLE_REFERENCE("tabCount")},
+ ),
+ ),
+ )
+ ],
+ ),
+ FTL.Message(
+ id=FTL.Identifier(
+ "webrtc-indicator-menuitem-sharing-window-with-n-tabs"
+ ),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("label"),
+ value=PLURALS(
+ source,
+ "webrtcIndicator.sharingWindowWithNTabs.menuitem",
+ VARIABLE_REFERENCE("tabCount"),
+ foreach=lambda n: REPLACE_IN_TEXT(
+ n,
+ {"#1": VARIABLE_REFERENCE("tabCount")},
+ ),
+ ),
+ )
+ ],
+ ),
+ FTL.Message(
+ id=FTL.Identifier(
+ "webrtc-indicator-menuitem-sharing-browser-with-n-tabs"
+ ),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("label"),
+ value=PLURALS(
+ source,
+ "webrtcIndicator.sharingBrowserWithNTabs.menuitem",
+ VARIABLE_REFERENCE("tabCount"),
+ foreach=lambda n: REPLACE_IN_TEXT(
+ n,
+ {"#1": VARIABLE_REFERENCE("tabCount")},
+ ),
+ ),
+ )
+ ],
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-item-camera"),
+ value=REPLACE(
+ browser,
+ "getUserMedia.sharingMenuCamera",
+ {
+ "%1$S (": FTL.TextElement(""),
+ "%1$S(": FTL.TextElement(""),
+ ")": FTL.TextElement(""),
+ ")": FTL.TextElement(""),
+ },
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-item-microphone"),
+ value=REPLACE(
+ browser,
+ "getUserMedia.sharingMenuMicrophone",
+ {
+ "%1$S (": FTL.TextElement(""),
+ "%1$S(": FTL.TextElement(""),
+ ")": FTL.TextElement(""),
+ ")": FTL.TextElement(""),
+ },
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-item-audio-capture"),
+ value=REPLACE(
+ browser,
+ "getUserMedia.sharingMenuAudioCapture",
+ {
+ "%1$S (": FTL.TextElement(""),
+ "%1$S(": FTL.TextElement(""),
+ ")": FTL.TextElement(""),
+ ")": FTL.TextElement(""),
+ },
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-item-application"),
+ value=REPLACE(
+ browser,
+ "getUserMedia.sharingMenuApplication",
+ {
+ "%1$S (": FTL.TextElement(""),
+ "%1$S(": FTL.TextElement(""),
+ ")": FTL.TextElement(""),
+ ")": FTL.TextElement(""),
+ },
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-item-screen"),
+ value=REPLACE(
+ browser,
+ "getUserMedia.sharingMenuScreen",
+ {
+ "%1$S (": FTL.TextElement(""),
+ "%1$S(": FTL.TextElement(""),
+ ")": FTL.TextElement(""),
+ ")": FTL.TextElement(""),
+ },
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-item-window"),
+ value=REPLACE(
+ browser,
+ "getUserMedia.sharingMenuWindow",
+ {
+ "%1$S (": FTL.TextElement(""),
+ "%1$S(": FTL.TextElement(""),
+ ")": FTL.TextElement(""),
+ ")": FTL.TextElement(""),
+ },
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-item-browser"),
+ value=REPLACE(
+ browser,
+ "getUserMedia.sharingMenuBrowser",
+ {
+ "%1$S (": FTL.TextElement(""),
+ "%1$S(": FTL.TextElement(""),
+ ")": FTL.TextElement(""),
+ ")": FTL.TextElement(""),
+ },
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-sharing-menuitem-unknown-host"),
+ value=COPY(browser, "getUserMedia.sharingMenuUnknownHost"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-sharing-menuitem"),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("label"),
+ value=FTL.Pattern(
+ [FTL.TextElement("{ $origin } ({ $itemList })")]
+ ),
+ ),
+ ],
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-sharing-menu"),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("label"),
+ value=COPY(browser, "getUserMedia.sharingMenu.label"),
+ ),
+ FTL.Attribute(
+ id=FTL.Identifier("accesskey"),
+ value=COPY(browser, "getUserMedia.sharingMenu.accesskey"),
+ ),
+ ],
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-allow-share-camera"),
+ value=REPLACE(
+ browser,
+ "getUserMedia.shareCamera3.message",
+ {"%1$S": VARIABLE_REFERENCE("origin")},
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-allow-share-microphone"),
+ value=REPLACE(
+ browser,
+ "getUserMedia.shareMicrophone3.message",
+ {"%1$S": VARIABLE_REFERENCE("origin")},
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-allow-share-screen"),
+ value=REPLACE(
+ browser,
+ "getUserMedia.shareScreen4.message",
+ {"%1$S": VARIABLE_REFERENCE("origin")},
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-allow-share-camera-and-microphone"),
+ value=REPLACE(
+ browser,
+ "getUserMedia.shareCameraAndMicrophone3.message",
+ {"%1$S": VARIABLE_REFERENCE("origin")},
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-allow-share-camera-and-audio-capture"),
+ value=REPLACE(
+ browser,
+ "getUserMedia.shareCameraAndAudioCapture3.message",
+ {"%1$S": VARIABLE_REFERENCE("origin")},
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-allow-share-screen-and-microphone"),
+ value=REPLACE(
+ browser,
+ "getUserMedia.shareScreenAndMicrophone4.message",
+ {"%1$S": VARIABLE_REFERENCE("origin")},
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-allow-share-screen-and-audio-capture"),
+ value=REPLACE(
+ browser,
+ "getUserMedia.shareScreenAndAudioCapture4.message",
+ {"%1$S": VARIABLE_REFERENCE("origin")},
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-allow-share-audio-capture"),
+ value=REPLACE(
+ browser,
+ "getUserMedia.shareAudioCapture3.message",
+ {"%1$S": VARIABLE_REFERENCE("origin")},
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-allow-share-speaker"),
+ value=REPLACE(
+ browser,
+ "selectAudioOutput.shareSpeaker.message",
+ {"%1$S": VARIABLE_REFERENCE("origin")},
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-allow-share-camera-unsafe-delegation"),
+ value=REPLACE(
+ browser,
+ "getUserMedia.shareCameraUnsafeDelegation2.message",
+ {
+ "%1$S": VARIABLE_REFERENCE("origin"),
+ "%2$S": VARIABLE_REFERENCE("thirdParty"),
+ },
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-allow-share-microphone-unsafe-delegations"),
+ value=REPLACE(
+ browser,
+ "getUserMedia.shareMicrophoneUnsafeDelegations2.message",
+ {
+ "%1$S": VARIABLE_REFERENCE("origin"),
+ "%2$S": VARIABLE_REFERENCE("thirdParty"),
+ },
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-allow-share-screen-unsafe-delegation"),
+ value=REPLACE(
+ browser,
+ "getUserMedia.shareScreenUnsafeDelegation2.message",
+ {
+ "%1$S": VARIABLE_REFERENCE("origin"),
+ "%2$S": VARIABLE_REFERENCE("thirdParty"),
+ },
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier(
+ "webrtc-allow-share-camera-and-microphone-unsafe-delegation"
+ ),
+ value=REPLACE(
+ browser,
+ "getUserMedia.shareCameraAndMicrophoneUnsafeDelegation2.message",
+ {
+ "%1$S": VARIABLE_REFERENCE("origin"),
+ "%2$S": VARIABLE_REFERENCE("thirdParty"),
+ },
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier(
+ "webrtc-allow-share-camera-and-audio-capture-unsafe-delegation"
+ ),
+ value=REPLACE(
+ browser,
+ "getUserMedia.shareCameraAndAudioCaptureUnsafeDelegation2.message",
+ {
+ "%1$S": VARIABLE_REFERENCE("origin"),
+ "%2$S": VARIABLE_REFERENCE("thirdParty"),
+ },
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier(
+ "webrtc-allow-share-screen-and-microphone-unsafe-delegation"
+ ),
+ value=REPLACE(
+ browser,
+ "getUserMedia.shareScreenAndMicrophoneUnsafeDelegation2.message",
+ {
+ "%1$S": VARIABLE_REFERENCE("origin"),
+ "%2$S": VARIABLE_REFERENCE("thirdParty"),
+ },
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier(
+ "webrtc-allow-share-screen-and-audio-capture-unsafe-delegation"
+ ),
+ value=REPLACE(
+ browser,
+ "getUserMedia.shareScreenAndAudioCaptureUnsafeDelegation2.message",
+ {
+ "%1$S": VARIABLE_REFERENCE("origin"),
+ "%2$S": VARIABLE_REFERENCE("thirdParty"),
+ },
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-allow-share-speaker-unsafe-delegation"),
+ value=REPLACE(
+ browser,
+ "selectAudioOutput.shareSpeakerUnsafeDelegation.message",
+ {
+ "%1$S": VARIABLE_REFERENCE("origin"),
+ "%2$S": VARIABLE_REFERENCE("thirdParty"),
+ },
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-share-screen-warning"),
+ value=COPY(browser, "getUserMedia.shareScreenWarning2.message"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-share-browser-warning"),
+ value=REPLACE(
+ browser,
+ "getUserMedia.shareFirefoxWarning2.message",
+ {"%1$S": TERM_REFERENCE("brand-short-name")},
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-share-screen-learn-more"),
+ value=COPY(browser, "getUserMedia.shareScreen.learnMoreLabel"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-pick-window-or-screen"),
+ value=COPY(browser, "getUserMedia.pickWindowOrScreen.label"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-share-entire-screen"),
+ value=COPY(browser, "getUserMedia.shareEntireScreen.label"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-share-pipe-wire-portal"),
+ value=COPY(browser, "getUserMedia.sharePipeWirePortal.label"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-share-monitor"),
+ value=REPLACE(
+ browser,
+ "getUserMedia.shareMonitor.label",
+ {"%1$S": VARIABLE_REFERENCE("monitorIndex")},
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-share-application"),
+ value=PLURALS(
+ browser,
+ "getUserMedia.shareApplicationWindowCount.label",
+ VARIABLE_REFERENCE("windowCount"),
+ foreach=lambda n: REPLACE_IN_TEXT(
+ n,
+ {
+ "#1": VARIABLE_REFERENCE("appName"),
+ "#2": VARIABLE_REFERENCE("windowCount"),
+ },
+ ),
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-action-allow"),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("label"),
+ value=COPY(browser, "getUserMedia.allow.label"),
+ ),
+ FTL.Attribute(
+ id=FTL.Identifier("accesskey"),
+ value=COPY(browser, "getUserMedia.allow.accesskey"),
+ ),
+ ],
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-action-block"),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("label"),
+ value=COPY_PATTERN(
+ browser_ftl, "popup-screen-sharing-block.label"
+ ),
+ ),
+ FTL.Attribute(
+ id=FTL.Identifier("accesskey"),
+ value=COPY_PATTERN(
+ browser_ftl, "popup-screen-sharing-block.accesskey"
+ ),
+ ),
+ ],
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-action-always-block"),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("label"),
+ value=COPY_PATTERN(
+ browser_ftl, "popup-screen-sharing-always-block.label"
+ ),
+ ),
+ FTL.Attribute(
+ id=FTL.Identifier("accesskey"),
+ value=COPY_PATTERN(
+ browser_ftl,
+ "popup-screen-sharing-always-block.accesskey",
+ ),
+ ),
+ ],
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-action-not-now"),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("label"),
+ value=COPY(browser, "getUserMedia.notNow.label"),
+ ),
+ FTL.Attribute(
+ id=FTL.Identifier("accesskey"),
+ value=COPY(browser, "getUserMedia.notNow.accesskey"),
+ ),
+ ],
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-remember-allow-checkbox"),
+ value=COPY(browser, "getUserMedia.remember"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-mute-notifications-checkbox"),
+ value=COPY_PATTERN(browser_ftl, "popup-mute-notifications-checkbox"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-reason-for-no-permanent-allow-screen"),
+ value=REPLACE(
+ browser,
+ "getUserMedia.reasonForNoPermanentAllow.screen3",
+ {"%1$S": TERM_REFERENCE("brand-short-name")},
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-reason-for-no-permanent-allow-audio"),
+ value=REPLACE(
+ browser,
+ "getUserMedia.reasonForNoPermanentAllow.audio",
+ {"%1$S": TERM_REFERENCE("brand-short-name")},
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("webrtc-reason-for-no-permanent-allow-insecure"),
+ value=REPLACE(
+ browser,
+ "getUserMedia.reasonForNoPermanentAllow.insecure",
+ {"%1$S": TERM_REFERENCE("brand-short-name")},
+ ),
+ ),
+ ],
+ )
+
+ ctx.add_transforms(
+ browser_ftl,
+ browser_ftl,
+ [
+ FTL.Message(
+ id=FTL.Identifier("popup-select-window-or-screen"),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("label"),
+ value=COPY(browser, "getUserMedia.selectWindowOrScreen2.label"),
+ ),
+ FTL.Attribute(
+ id=FTL.Identifier("accesskey"),
+ value=COPY(
+ browser, "getUserMedia.selectWindowOrScreen2.accesskey"
+ ),
+ ),
+ ],
+ ),
+ ],
+ )
diff --git a/python/l10n/fluent_migrations/bug_1813077_popup_notification_learn_more.py b/python/l10n/fluent_migrations/bug_1813077_popup_notification_learn_more.py
new file mode 100644
index 0000000000..bba68d163e
--- /dev/null
+++ b/python/l10n/fluent_migrations/bug_1813077_popup_notification_learn_more.py
@@ -0,0 +1,22 @@
+# coding=utf8
+
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+from __future__ import absolute_import
+import fluent.syntax.ast as FTL
+from fluent.migrate.helpers import transforms_from
+
+
+def migrate(ctx):
+ """Bug 1813077 - Migrate xpinstallPromptMessage.learnMore to Fluent , part {index}."""
+
+ ctx.add_transforms(
+ "browser/browser/browser.ftl",
+ "browser/browser/browser.ftl",
+ transforms_from(
+ """
+popup-notification-xpinstall-prompt-learn-more = { COPY("browser/chrome/browser/browser.properties", "xpinstallPromptMessage.learnMore") }
+"""
+ ),
+ )
diff --git a/python/l10n/fluent_migrations/bug_1814261_mixed_content_identity_panel.py b/python/l10n/fluent_migrations/bug_1814261_mixed_content_identity_panel.py
new file mode 100644
index 0000000000..a135870df8
--- /dev/null
+++ b/python/l10n/fluent_migrations/bug_1814261_mixed_content_identity_panel.py
@@ -0,0 +1,49 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+import fluent.syntax.ast as FTL
+
+from fluent.migrate.transforms import TransformPattern
+
+
+class STRIP_LEARNMORE(TransformPattern):
+ # Used to remove `<a data-l10n-name="link">SOME TEXT</a>` from a string
+ def visit_TextElement(self, node):
+ link_start = node.value.find('<label data-l10n-name="link">')
+ if link_start != -1:
+ # Replace string up to the link, remove remaining spaces afterwards.
+ # Removing an extra character directly is not safe, as it could be
+ # punctuation.
+ node.value = node.value[:link_start].rstrip()
+
+ return node
+
+
+def migrate(ctx):
+ """Bug 1814261 - Use moz-support-link in the mixed-content section of the identity panel, part {index}."""
+
+ browser_ftl = "browser/browser/browser.ftl"
+ ctx.add_transforms(
+ browser_ftl,
+ browser_ftl,
+ [
+ FTL.Message(
+ id=FTL.Identifier("identity-description-active-blocked2"),
+ value=STRIP_LEARNMORE(
+ browser_ftl, "identity-description-active-blocked"
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("identity-description-passive-loaded-insecure2"),
+ value=STRIP_LEARNMORE(
+ browser_ftl, "identity-description-passive-loaded-insecure"
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("identity-description-passive-loaded-mixed2"),
+ value=STRIP_LEARNMORE(
+ browser_ftl, "identity-description-passive-loaded-mixed"
+ ),
+ ),
+ ],
+ )
diff --git a/python/l10n/fluent_migrations/bug_1814266_identity_custom_root.py b/python/l10n/fluent_migrations/bug_1814266_identity_custom_root.py
new file mode 100644
index 0000000000..7eb144230d
--- /dev/null
+++ b/python/l10n/fluent_migrations/bug_1814266_identity_custom_root.py
@@ -0,0 +1,34 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+import fluent.syntax.ast as FTL
+
+from fluent.migrate.transforms import TransformPattern
+
+
+class STRIP_LEARNMORE(TransformPattern):
+ # Used to remove `<a data-l10n-name="link">SOME TEXT</a>` from a string
+ def visit_TextElement(self, node):
+ link_start = node.value.find('<label data-l10n-name="link">')
+ # Replace string up to the link, remove remaining spaces afterwards.
+ # Removing an extra character directly is not safe, as it could be
+ # punctuation.
+ node.value = node.value[:link_start].rstrip()
+
+ return node
+
+
+def migrate(ctx):
+ """Bug 1814266 - Use moz-support-link in identity panel, part {index}."""
+
+ browser_ftl = "browser/browser/browser.ftl"
+ ctx.add_transforms(
+ browser_ftl,
+ browser_ftl,
+ [
+ FTL.Message(
+ id=FTL.Identifier("identity-description-custom-root2"),
+ value=STRIP_LEARNMORE(browser_ftl, "identity-description-custom-root"),
+ ),
+ ],
+ )
diff --git a/python/l10n/fluent_migrations/bug_1818322_mozTabList.py b/python/l10n/fluent_migrations/bug_1818322_mozTabList.py
new file mode 100644
index 0000000000..9be3037550
--- /dev/null
+++ b/python/l10n/fluent_migrations/bug_1818322_mozTabList.py
@@ -0,0 +1,26 @@
+# coding=utf8
+
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+from fluent.migrate.helpers import transforms_from
+from fluent.migrate import COPY
+
+
+def migrate(ctx):
+ """Bug 1818322 - Create MozTabList and MozTabRow reusable components, part {index}."""
+ ctx.add_transforms(
+ "toolkit/toolkit/global/mozTabList.ftl",
+ "toolkit/toolkit/global/mozTabList.ftl",
+ transforms_from(
+ """
+mztabrow-tabs-list-tab =
+ .title = {COPY_PATTERN(from_path, "firefoxview-tabs-list-tab-button.title")}
+mztabrow-dismiss-tab-button =
+ .title = {COPY_PATTERN(from_path, "firefoxview-closed-tabs-dismiss-tab.title")}
+mztabrow-just-now-timestamp = {COPY_PATTERN(from_path, "firefoxview-just-now-timestamp")}
+ """,
+ from_path="browser/browser/firefoxView.ftl",
+ ),
+ )
diff --git a/python/l10n/fluent_migrations/bug_1820654_update_manual.py b/python/l10n/fluent_migrations/bug_1820654_update_manual.py
new file mode 100644
index 0000000000..fe3a18e38b
--- /dev/null
+++ b/python/l10n/fluent_migrations/bug_1820654_update_manual.py
@@ -0,0 +1,30 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+import fluent.syntax.ast as FTL
+
+from fluent.migrate.transforms import TransformPattern
+
+
+class REPLACE_LABEL(TransformPattern):
+ # Used to replace `<label data-l10n-name="manual-link"/>`
+ def visit_TextElement(self, node):
+ node.value = node.value.replace("<label", "<a")
+
+ return node
+
+
+def migrate(ctx):
+ """Bug 1820654 - Use html:a in manualUpdate box, part {index}."""
+
+ aboutDialog_ftl = "browser/browser/aboutDialog.ftl"
+ ctx.add_transforms(
+ aboutDialog_ftl,
+ aboutDialog_ftl,
+ [
+ FTL.Message(
+ id=FTL.Identifier("aboutdialog-update-manual"),
+ value=REPLACE_LABEL(aboutDialog_ftl, "update-manual"),
+ ),
+ ],
+ )
diff --git a/python/l10n/fluent_migrations/bug_1821187_migrationWizard_password_file_import_strings.py b/python/l10n/fluent_migrations/bug_1821187_migrationWizard_password_file_import_strings.py
new file mode 100644
index 0000000000..9f74ff9837
--- /dev/null
+++ b/python/l10n/fluent_migrations/bug_1821187_migrationWizard_password_file_import_strings.py
@@ -0,0 +1,27 @@
+# coding=utf8
+
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+from __future__ import absolute_import
+import fluent.syntax.ast as FTL
+from fluent.migrate import COPY_PATTERN
+from fluent.migrate.helpers import transforms_from
+
+
+def migrate(ctx):
+ """Bug 1821187 - Copy password file import strings to migrationWizard.ftl, part {index}."""
+
+ ctx.add_transforms(
+ "browser/browser/migrationWizard.ftl",
+ "browser/browser/migrationWizard.ftl",
+ transforms_from(
+ """
+migration-passwords-from-file-csv-filter-title =
+ {COPY_PATTERN(from_path, "about-logins-import-file-picker-csv-filter-title")}
+migration-passwords-from-file-tsv-filter-title =
+ {COPY_PATTERN(from_path, "about-logins-import-file-picker-tsv-filter-title")}
+ """,
+ from_path="browser/browser/aboutLogins.ftl",
+ ),
+ )
diff --git a/python/l10n/fluent_migrations/bug_1821779_migrationWizard_browser_names.py b/python/l10n/fluent_migrations/bug_1821779_migrationWizard_browser_names.py
new file mode 100644
index 0000000000..6bcaa35da8
--- /dev/null
+++ b/python/l10n/fluent_migrations/bug_1821779_migrationWizard_browser_names.py
@@ -0,0 +1,38 @@
+# coding=utf8
+
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+from __future__ import absolute_import
+import fluent.syntax.ast as FTL
+from fluent.migrate.helpers import transforms_from
+
+
+def migrate(ctx):
+ """Bug 1821779 - Move existing browser names to migrationWizard.ftl, part {index}."""
+
+ ctx.add_transforms(
+ "browser/browser/migrationWizard.ftl",
+ "browser/browser/migrationWizard.ftl",
+ transforms_from(
+ """
+migration-wizard-migrator-display-name-brave = {COPY_PATTERN(from_path, "import-from-brave.label")}
+migration-wizard-migrator-display-name-canary = {COPY_PATTERN(from_path, "import-from-canary.label")}
+migration-wizard-migrator-display-name-chrome = {COPY_PATTERN(from_path, "import-from-chrome.label")}
+migration-wizard-migrator-display-name-chrome-beta = {COPY_PATTERN(from_path, "import-from-chrome-beta.label")}
+migration-wizard-migrator-display-name-chrome-dev = {COPY_PATTERN(from_path, "import-from-chrome-dev.label")}
+migration-wizard-migrator-display-name-chromium = {COPY_PATTERN(from_path, "import-from-chromium.label")}
+migration-wizard-migrator-display-name-chromium-360se = {COPY_PATTERN(from_path, "import-from-360se.label")}
+migration-wizard-migrator-display-name-chromium-edge = {COPY_PATTERN(from_path, "import-from-edge.label")}
+migration-wizard-migrator-display-name-chromium-edge-beta = {COPY_PATTERN(from_path, "import-from-edge-beta.label")}
+migration-wizard-migrator-display-name-edge-legacy = {COPY_PATTERN(from_path, "import-from-edge-legacy.label")}
+migration-wizard-migrator-display-name-firefox = {COPY_PATTERN(from_path, "import-from-firefox.label")}
+migration-wizard-migrator-display-name-ie = {COPY_PATTERN(from_path, "import-from-ie.label")}
+migration-wizard-migrator-display-name-opera = {COPY_PATTERN(from_path, "import-from-opera.label")}
+migration-wizard-migrator-display-name-opera-gx = {COPY_PATTERN(from_path, "import-from-opera-gx.label")}
+migration-wizard-migrator-display-name-safari = {COPY_PATTERN(from_path, "import-from-safari.label")}
+migration-wizard-migrator-display-name-vivaldi = {COPY_PATTERN(from_path, "import-from-vivaldi.label")}
+""",
+ from_path="browser/browser/migration.ftl",
+ ),
+ )
diff --git a/python/l10n/fluent_migrations/bug_1828443_pocket_policy.py b/python/l10n/fluent_migrations/bug_1828443_pocket_policy.py
new file mode 100644
index 0000000000..6f9fbe5d7b
--- /dev/null
+++ b/python/l10n/fluent_migrations/bug_1828443_pocket_policy.py
@@ -0,0 +1,30 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+import fluent.syntax.ast as FTL
+
+from fluent.migrate.transforms import TransformPattern
+
+
+class REPLACE_POCKET(TransformPattern):
+ # Replace Pocket with term `{ -pocket-brand-name }`
+ def visit_TextElement(self, node):
+ node.value = node.value.replace("Pocket", "{ -pocket-brand-name }")
+
+ return node
+
+
+def migrate(ctx):
+ """Bug 1828443 - Use Fluent term for Pocket in policy string, part {index}."""
+
+ ftl_file = "browser/browser/policies/policies-descriptions.ftl"
+ ctx.add_transforms(
+ ftl_file,
+ ftl_file,
+ [
+ FTL.Message(
+ id=FTL.Identifier("policy-DisablePocket2"),
+ value=REPLACE_POCKET(ftl_file, "policy-DisablePocket"),
+ ),
+ ],
+ )
diff --git a/python/l10n/fluent_migrations/bug_1828767_sanitize_dialog_native_size.py b/python/l10n/fluent_migrations/bug_1828767_sanitize_dialog_native_size.py
new file mode 100644
index 0000000000..9d311c3fae
--- /dev/null
+++ b/python/l10n/fluent_migrations/bug_1828767_sanitize_dialog_native_size.py
@@ -0,0 +1,77 @@
+from fluent.migrate import COPY_PATTERN
+from fluent.migrate.transforms import TransformPattern, REPLACE_IN_TEXT
+from fluent.migrate.helpers import MESSAGE_REFERENCE
+import fluent.syntax.ast as FTL
+
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+class REPLACE_PATTERN(TransformPattern):
+ """Hacky custom transform that works."""
+
+ def __init__(self, ctx, path, key, replacements, **kwargs):
+ super(REPLACE_PATTERN, self).__init__(path, key, **kwargs)
+ self.ctx = ctx
+ self.replacements = replacements
+
+ def visit_Pattern(self, source):
+ source = self.generic_visit(source)
+ target = FTL.Pattern([])
+ for element in source.elements:
+ if isinstance(element, FTL.TextElement):
+ pattern = REPLACE_IN_TEXT(element, self.replacements)(self.ctx)
+ target.elements += pattern.elements
+ else:
+ target.elements += [element]
+ return target
+
+
+def replace_with_min_size_transform(ctx, file, identifier, to_identifier=None):
+ if to_identifier is None:
+ to_identifier = identifier + "2"
+ attributes = [
+ FTL.Attribute(
+ id=FTL.Identifier("title"),
+ value=COPY_PATTERN(file, "{}.title".format(identifier)),
+ ),
+ FTL.Attribute(
+ id=FTL.Identifier("style"),
+ value=REPLACE_PATTERN(
+ ctx,
+ file,
+ "{}.style".format(identifier),
+ {
+ "width:": FTL.TextElement("min-width:"),
+ "height:": FTL.TextElement("min-height:"),
+ },
+ ),
+ ),
+ ]
+
+ return FTL.Message(
+ id=FTL.Identifier(to_identifier),
+ attributes=attributes,
+ )
+
+
+def migrate(ctx):
+ """Bug 1828767 - Migrate sanitize dialog to use min-width, part {index}."""
+ ctx.add_transforms(
+ "browser/browser/sanitize.ftl",
+ "browser/browser/sanitize.ftl",
+ [
+ replace_with_min_size_transform(
+ ctx,
+ "browser/browser/sanitize.ftl",
+ "dialog-title",
+ "sanitize-dialog-title",
+ ),
+ replace_with_min_size_transform(
+ ctx,
+ "browser/browser/sanitize.ftl",
+ "dialog-title-everything",
+ "sanitize-dialog-title-everything",
+ ),
+ ],
+ )
diff --git a/python/l10n/fluent_migrations/bug_1830042_places.py b/python/l10n/fluent_migrations/bug_1830042_places.py
new file mode 100644
index 0000000000..0109d3952f
--- /dev/null
+++ b/python/l10n/fluent_migrations/bug_1830042_places.py
@@ -0,0 +1,127 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+import fluent.syntax.ast as FTL
+from fluent.migrate.helpers import TERM_REFERENCE, transforms_from, VARIABLE_REFERENCE
+from fluent.migrate.transforms import COPY, PLURALS, REPLACE, REPLACE_IN_TEXT
+
+
+def migrate(ctx):
+ """Bug 1830042 - Convert places.properties to Fluent, part {index}."""
+
+ source = "browser/chrome/browser/places/places.properties"
+ target = "browser/browser/places.ftl"
+ ctx.add_transforms(
+ target,
+ target,
+ [
+ FTL.Message(
+ id=FTL.Identifier("places-details-pane-no-items"),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("value"),
+ value=COPY(source, "detailsPane.noItems"),
+ )
+ ],
+ ),
+ FTL.Message(
+ id=FTL.Identifier("places-details-pane-items-count"),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("value"),
+ value=PLURALS(
+ source,
+ "detailsPane.itemsCountLabel",
+ VARIABLE_REFERENCE("count"),
+ foreach=lambda n: REPLACE_IN_TEXT(
+ n,
+ {"#1": VARIABLE_REFERENCE("count")},
+ ),
+ ),
+ )
+ ],
+ ),
+ FTL.Message(
+ id=FTL.Identifier("places-locked-prompt"),
+ value=REPLACE(
+ source,
+ "lockPrompt.text",
+ {"%1$S": TERM_REFERENCE("brand-short-name")},
+ ),
+ ),
+ ]
+ + transforms_from(
+ """
+places-empty-bookmarks-folder =
+ .label = { COPY(source, "bookmarksMenuEmptyFolder") }
+places-delete-page =
+ .label =
+ { $count ->
+ [1] { COPY(source, "cmd.deleteSinglePage.label") }
+ *[other] { COPY(source, "cmd.deleteMultiplePages.label") }
+ }
+ .accesskey = { COPY(source, "cmd.deleteSinglePage.accesskey") }
+places-create-bookmark =
+ .label =
+ { $count ->
+ [1] { COPY(source, "cmd.bookmarkSinglePage2.label") }
+ *[other] { COPY(source, "cmd.bookmarkMultiplePages2.label") }
+ }
+ .accesskey = { COPY(source, "cmd.bookmarkSinglePage2.accesskey") }
+places-search-bookmarks =
+ .placeholder = { COPY(source, "searchBookmarks") }
+places-search-history =
+ .placeholder = { COPY(source, "searchHistory") }
+places-search-downloads =
+ .placeholder = { COPY(source, "searchDownloads") }
+places-view-sortby-name =
+ .label = { COPY(source, "view.sortBy.1.name.label") }
+ .accesskey = { COPY(source, "view.sortBy.1.name.accesskey") }
+places-view-sortby-url =
+ .label = { COPY(source, "view.sortBy.1.url.label") }
+ .accesskey = { COPY(source, "view.sortBy.1.url.accesskey") }
+places-view-sortby-date =
+ .label = { COPY(source, "view.sortBy.1.date.label") }
+ .accesskey = { COPY(source, "view.sortBy.1.date.accesskey") }
+places-view-sortby-visit-count =
+ .label = { COPY(source, "view.sortBy.1.visitCount.label") }
+ .accesskey = { COPY(source, "view.sortBy.1.visitCount.accesskey") }
+places-view-sortby-date-added =
+ .label = { COPY(source, "view.sortBy.1.dateAdded.label") }
+ .accesskey = { COPY(source, "view.sortBy.1.dateAdded.accesskey") }
+places-view-sortby-last-modified =
+ .label = { COPY(source, "view.sortBy.1.lastModified.label") }
+ .accesskey = { COPY(source, "view.sortBy.1.lastModified.accesskey") }
+places-view-sortby-tags =
+ .label = { COPY(source, "view.sortBy.1.tags.label") }
+ .accesskey = { COPY(source, "view.sortBy.1.tags.accesskey") }
+""",
+ source=source,
+ ),
+ )
+
+ ctx.add_transforms(
+ "browser/browser/placesPrompts.ftl",
+ "browser/browser/placesPrompts.ftl",
+ [
+ FTL.Message(
+ id=FTL.Identifier("places-error-title"),
+ value=FTL.Pattern([FTL.Placeable(TERM_REFERENCE("brand-short-name"))]),
+ ),
+ ]
+ + transforms_from(
+ """
+places-no-title = { COPY(source, "noTitle") }
+places-bookmarks-backup-title = { COPY(source, "bookmarksBackupTitle") }
+places-bookmarks-restore-alert-title = { COPY(source, "bookmarksRestoreAlertTitle") }
+places-bookmarks-restore-alert = { COPY(source, "bookmarksRestoreAlert") }
+places-bookmarks-restore-title = { COPY(source, "bookmarksRestoreTitle") }
+places-bookmarks-restore-filter-name = { COPY(source, "bookmarksRestoreFilterName") }
+places-bookmarks-restore-format-error = { COPY(source, "bookmarksRestoreFormatError") }
+places-bookmarks-restore-parse-error = { COPY(source, "bookmarksRestoreParseError") }
+places-bookmarks-import = { COPY(source, "SelectImport") }
+places-bookmarks-export = { COPY(source, "EnterExport") }
+""",
+ source=source,
+ ),
+ )
diff --git a/python/l10n/fluent_migrations/bug_1831851_accounts.py b/python/l10n/fluent_migrations/bug_1831851_accounts.py
new file mode 100644
index 0000000000..d50a0192c8
--- /dev/null
+++ b/python/l10n/fluent_migrations/bug_1831851_accounts.py
@@ -0,0 +1,195 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+import fluent.syntax.ast as FTL
+from fluent.migrate.helpers import VARIABLE_REFERENCE
+from fluent.migrate.transforms import COPY, PLURALS, REPLACE, REPLACE_IN_TEXT
+
+
+def migrate(ctx):
+ """Bug 1831851 - Migrate accounts.properties to Fluent, part {index}."""
+
+ accounts = "browser/chrome/browser/accounts.properties"
+ accounts_ftl = "browser/browser/accounts.ftl"
+ preferences_ftl = "browser/browser/preferences/preferences.ftl"
+
+ ctx.add_transforms(
+ accounts_ftl,
+ accounts_ftl,
+ [
+ FTL.Message(
+ id=FTL.Identifier("account-reconnect"),
+ value=REPLACE(
+ accounts,
+ "reconnectDescription",
+ {"%1$S": VARIABLE_REFERENCE("email")},
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("account-verify"),
+ value=REPLACE(
+ accounts, "verifyDescription", {"%1$S": VARIABLE_REFERENCE("email")}
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("account-send-to-all-devices-titlecase"),
+ value=COPY(accounts, "sendToAllDevices.menuitem"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("account-manage-devices-titlecase"),
+ value=COPY(accounts, "manageDevices.menuitem"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("account-send-tab-to-device-singledevice-status"),
+ value=COPY(accounts, "sendTabToDevice.singledevice.status"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("account-send-tab-to-device-singledevice-learnmore"),
+ value=COPY(accounts, "sendTabToDevice.singledevice"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("account-send-tab-to-device-connectdevice"),
+ value=COPY(accounts, "sendTabToDevice.connectdevice"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("account-send-tab-to-device-verify-status"),
+ value=COPY(accounts, "sendTabToDevice.verify.status"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("account-send-tab-to-device-verify"),
+ value=COPY(accounts, "sendTabToDevice.verify"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("account-connection-title"),
+ value=FTL.Pattern(
+ [
+ FTL.Placeable(
+ FTL.TermReference(
+ id=FTL.Identifier("fxaccount-brand-name"),
+ arguments=FTL.CallArguments(
+ named=[
+ FTL.NamedArgument(
+ FTL.Identifier("capitalization"),
+ FTL.StringLiteral("title"),
+ )
+ ]
+ ),
+ )
+ )
+ ]
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("account-connection-connected-with"),
+ value=REPLACE(
+ accounts,
+ "otherDeviceConnectedBody",
+ {"%1$S": VARIABLE_REFERENCE("deviceName")},
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("account-connection-connected-with-noname"),
+ value=COPY(accounts, "otherDeviceConnectedBody.noDeviceName"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("account-connection-connected"),
+ value=COPY(accounts, "thisDeviceConnectedBody"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("account-connection-disconnected"),
+ value=COPY(accounts, "thisDeviceDisconnectedBody"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("account-single-tab-arriving-title"),
+ value=COPY(accounts, "tabArrivingNotification.title"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("account-single-tab-arriving-from-device-title"),
+ value=REPLACE(
+ accounts,
+ "tabArrivingNotificationWithDevice.title",
+ {"%1$S": VARIABLE_REFERENCE("deviceName")},
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("account-single-tab-arriving-truncated-url"),
+ value=REPLACE(
+ accounts,
+ "singleTabArrivingWithTruncatedURL.body",
+ {"%1$S": VARIABLE_REFERENCE("url")},
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("account-multiple-tabs-arriving-title"),
+ value=COPY(accounts, "multipleTabsArrivingNotification.title"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("account-multiple-tabs-arriving-from-single-device"),
+ value=PLURALS(
+ accounts,
+ "unnamedTabsArrivingNotification2.body",
+ VARIABLE_REFERENCE("tabCount"),
+ foreach=lambda n: REPLACE_IN_TEXT(
+ n,
+ {
+ "#1": VARIABLE_REFERENCE("tabCount"),
+ "#2": VARIABLE_REFERENCE("deviceName"),
+ },
+ ),
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier(
+ "account-multiple-tabs-arriving-from-multiple-devices"
+ ),
+ value=PLURALS(
+ accounts,
+ "unnamedTabsArrivingNotificationMultiple2.body",
+ VARIABLE_REFERENCE("tabCount"),
+ foreach=lambda n: REPLACE_IN_TEXT(
+ n,
+ {"#1": VARIABLE_REFERENCE("tabCount")},
+ ),
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("account-multiple-tabs-arriving-from-unknown-device"),
+ value=PLURALS(
+ accounts,
+ "unnamedTabsArrivingNotificationNoDevice.body",
+ VARIABLE_REFERENCE("tabCount"),
+ foreach=lambda n: REPLACE_IN_TEXT(
+ n,
+ {"#1": VARIABLE_REFERENCE("tabCount")},
+ ),
+ ),
+ ),
+ ],
+ )
+
+ ctx.add_transforms(
+ preferences_ftl,
+ preferences_ftl,
+ [
+ FTL.Message(
+ id=FTL.Identifier("sync-verification-sent-title"),
+ value=COPY(accounts, "verificationSentTitle"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("sync-verification-sent-body"),
+ value=REPLACE(
+ accounts,
+ "verificationSentBody",
+ {"%1$S": VARIABLE_REFERENCE("email")},
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("sync-verification-not-sent-title"),
+ value=COPY(accounts, "verificationNotSentTitle"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("sync-verification-not-sent-body"),
+ value=COPY(accounts, "verificationNotSentBody"),
+ ),
+ ],
+ )
diff --git a/python/l10n/fluent_migrations/bug_1831872_sync.py b/python/l10n/fluent_migrations/bug_1831872_sync.py
new file mode 100644
index 0000000000..f66c27bf5a
--- /dev/null
+++ b/python/l10n/fluent_migrations/bug_1831872_sync.py
@@ -0,0 +1,31 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+import fluent.syntax.ast as FTL
+from fluent.migrate.helpers import TERM_REFERENCE, VARIABLE_REFERENCE
+from fluent.migrate.transforms import REPLACE
+
+
+def migrate(ctx):
+ """Bug 1831872 - Migrate sync.properties to Fluent, part {index}."""
+
+ source = "services/sync/sync.properties"
+ target = "toolkit/services/accounts.ftl"
+ ctx.add_transforms(
+ target,
+ target,
+ [
+ FTL.Message(
+ id=FTL.Identifier("account-client-name"),
+ value=REPLACE(
+ source,
+ "client.name2",
+ {
+ "%1$S": VARIABLE_REFERENCE("user"),
+ "%2$S": TERM_REFERENCE("brand-short-name"),
+ "%3$S": VARIABLE_REFERENCE("system"),
+ },
+ ),
+ ),
+ ],
+ )
diff --git a/python/l10n/fluent_migrations/bug_1832138_ctrlTab.py b/python/l10n/fluent_migrations/bug_1832138_ctrlTab.py
new file mode 100644
index 0000000000..195e3cee49
--- /dev/null
+++ b/python/l10n/fluent_migrations/bug_1832138_ctrlTab.py
@@ -0,0 +1,37 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+import fluent.syntax.ast as FTL
+from fluent.migrate.helpers import VARIABLE_REFERENCE
+from fluent.migrate.transforms import PLURALS, REPLACE_IN_TEXT
+
+
+def migrate(ctx):
+ """Bug 1832138 - Convert browser-ctrlTab.js to Fluent, part {index}."""
+
+ browser = "browser/chrome/browser/browser.properties"
+ target = "browser/browser/tabbrowser.ftl"
+
+ ctx.add_transforms(
+ target,
+ target,
+ [
+ FTL.Message(
+ id=FTL.Identifier("tabbrowser-ctrl-tab-list-all-tabs"),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("label"),
+ value=PLURALS(
+ browser,
+ "ctrlTab.listAllTabs.label",
+ VARIABLE_REFERENCE("tabCount"),
+ foreach=lambda n: REPLACE_IN_TEXT(
+ n,
+ {"#1": VARIABLE_REFERENCE("tabCount")},
+ ),
+ ),
+ )
+ ],
+ ),
+ ],
+ )
diff --git a/python/l10n/fluent_migrations/bug_1832141_recently_closed.py b/python/l10n/fluent_migrations/bug_1832141_recently_closed.py
new file mode 100644
index 0000000000..530f6f05dd
--- /dev/null
+++ b/python/l10n/fluent_migrations/bug_1832141_recently_closed.py
@@ -0,0 +1,76 @@
+import fluent.syntax.ast as FTL
+from fluent.migrate import COPY_PATTERN, PLURALS, REPLACE_IN_TEXT
+from fluent.migrate.helpers import VARIABLE_REFERENCE
+
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+class CUSTOM_PLURALS(PLURALS):
+ def __call__(self, ctx):
+ pattern = super().__call__(ctx)
+ el = pattern.elements[0]
+ if isinstance(el, FTL.Placeable) and isinstance(
+ el.expression, FTL.SelectExpression
+ ):
+ selexp = el.expression
+ else:
+ selexp = FTL.SelectExpression(
+ VARIABLE_REFERENCE("tabCount"),
+ [FTL.Variant(FTL.Identifier("other"), pattern, default=True)],
+ )
+ pattern = FTL.Pattern([FTL.Placeable(selexp)])
+ selexp.variants[0:0] = [
+ FTL.Variant(
+ FTL.NumberLiteral("0"),
+ FTL.Pattern([FTL.Placeable(VARIABLE_REFERENCE("winTitle"))]),
+ )
+ ]
+ return pattern
+
+
+def migrate(ctx):
+ """Bug 1832141 - Migrate strings to recentlyClosed.ftl, part {index}."""
+
+ appmenu = "browser/browser/appmenu.ftl"
+ browser = "browser/chrome/browser/browser.properties"
+ menubar = "browser/browser/menubar.ftl"
+ target = "browser/browser/recentlyClosed.ftl"
+
+ ctx.add_transforms(
+ target,
+ target,
+ [
+ FTL.Message(
+ id=FTL.Identifier("recently-closed-menu-reopen-all-tabs"),
+ value=COPY_PATTERN(menubar, "menu-history-reopen-all-tabs"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("recently-closed-menu-reopen-all-windows"),
+ value=COPY_PATTERN(menubar, "menu-history-reopen-all-windows"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("recently-closed-panel-reopen-all-tabs"),
+ value=COPY_PATTERN(appmenu, "appmenu-reopen-all-tabs"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("recently-closed-panel-reopen-all-windows"),
+ value=COPY_PATTERN(appmenu, "appmenu-reopen-all-windows"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("recently-closed-undo-close-window-label"),
+ value=CUSTOM_PLURALS(
+ browser,
+ "menuUndoCloseWindowLabel",
+ VARIABLE_REFERENCE("tabCount"),
+ foreach=lambda n: REPLACE_IN_TEXT(
+ n,
+ {
+ "#1": VARIABLE_REFERENCE("winTitle"),
+ "#2": VARIABLE_REFERENCE("tabCount"),
+ },
+ ),
+ ),
+ ),
+ ],
+ )
diff --git a/python/l10n/fluent_migrations/bug_1832179_sendTabToDevice.py b/python/l10n/fluent_migrations/bug_1832179_sendTabToDevice.py
new file mode 100644
index 0000000000..526b8a90e0
--- /dev/null
+++ b/python/l10n/fluent_migrations/bug_1832179_sendTabToDevice.py
@@ -0,0 +1,37 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+import fluent.syntax.ast as FTL
+from fluent.migrate.helpers import VARIABLE_REFERENCE
+from fluent.migrate.transforms import PLURALS, REPLACE_IN_TEXT
+
+
+def migrate(ctx):
+ """Bug 1832179 - Convert sendTabsToDevice.label string to Fluent, part {index}."""
+
+ browser = "browser/chrome/browser/browser.properties"
+ target = "browser/browser/sync.ftl"
+
+ ctx.add_transforms(
+ target,
+ target,
+ [
+ FTL.Message(
+ id=FTL.Identifier("fxa-menu-send-tab-to-device"),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("label"),
+ value=PLURALS(
+ browser,
+ "sendTabsToDevice.label",
+ VARIABLE_REFERENCE("tabCount"),
+ foreach=lambda n: REPLACE_IN_TEXT(
+ n,
+ {"#1": VARIABLE_REFERENCE("tabCount")},
+ ),
+ ),
+ )
+ ],
+ ),
+ ],
+ )
diff --git a/python/l10n/fluent_migrations/bug_1832186_popupwarning.py b/python/l10n/fluent_migrations/bug_1832186_popupwarning.py
new file mode 100644
index 0000000000..4239899725
--- /dev/null
+++ b/python/l10n/fluent_migrations/bug_1832186_popupwarning.py
@@ -0,0 +1,139 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+import fluent.syntax.ast as FTL
+from fluent.migrate.helpers import TERM_REFERENCE, VARIABLE_REFERENCE
+from fluent.migrate.transforms import (
+ COPY,
+ PLURALS,
+ REPLACE,
+ REPLACE_IN_TEXT,
+)
+
+
+def migrate(ctx):
+ """Bug 1832186 - Migrate popup warning strings to Fluent, part {index}."""
+
+ source = "browser/chrome/browser/browser.properties"
+ target = "browser/browser/browser.ftl"
+
+ ctx.add_transforms(
+ target,
+ target,
+ [
+ FTL.Message(
+ id=FTL.Identifier("popup-warning-message"),
+ value=PLURALS(
+ source,
+ "popupWarning.message",
+ VARIABLE_REFERENCE("popupCount"),
+ foreach=lambda n: REPLACE_IN_TEXT(
+ n,
+ {
+ "#1": TERM_REFERENCE("brand-short-name"),
+ "#2": VARIABLE_REFERENCE("popupCount"),
+ },
+ ),
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("popup-warning-exceeded-message"),
+ value=PLURALS(
+ source,
+ "popupWarning.exceeded.message",
+ VARIABLE_REFERENCE("popupCount"),
+ foreach=lambda n: REPLACE_IN_TEXT(
+ n,
+ {
+ "#1": TERM_REFERENCE("brand-short-name"),
+ "#2": VARIABLE_REFERENCE("popupCount"),
+ },
+ ),
+ ),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("popup-warning-button"),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("label"),
+ value=FTL.Pattern(
+ [
+ FTL.Placeable(
+ FTL.SelectExpression(
+ selector=FTL.FunctionReference(
+ FTL.Identifier("PLATFORM"),
+ FTL.CallArguments(),
+ ),
+ variants=[
+ FTL.Variant(
+ key=FTL.Identifier("windows"),
+ value=COPY(
+ source, "popupWarningButton"
+ ),
+ ),
+ FTL.Variant(
+ key=FTL.Identifier("other"),
+ value=COPY(
+ source, "popupWarningButtonUnix"
+ ),
+ default=True,
+ ),
+ ],
+ )
+ )
+ ]
+ ),
+ ),
+ FTL.Attribute(
+ id=FTL.Identifier("accesskey"),
+ value=FTL.Pattern(
+ [
+ FTL.Placeable(
+ FTL.SelectExpression(
+ selector=FTL.FunctionReference(
+ FTL.Identifier("PLATFORM"),
+ FTL.CallArguments(),
+ ),
+ variants=[
+ FTL.Variant(
+ key=FTL.Identifier("windows"),
+ value=COPY(
+ source,
+ "popupWarningButton.accesskey",
+ ),
+ ),
+ FTL.Variant(
+ key=FTL.Identifier("other"),
+ value=COPY(
+ source,
+ "popupWarningButtonUnix.accesskey",
+ ),
+ default=True,
+ ),
+ ],
+ )
+ )
+ ]
+ ),
+ ),
+ ],
+ ),
+ FTL.Message(
+ id=FTL.Identifier("popup-show-popup-menuitem"),
+ attributes=[
+ FTL.Attribute(
+ id=FTL.Identifier("label"),
+ value=REPLACE(
+ source,
+ "popupShowPopupPrefix",
+ {
+ "%1$S": VARIABLE_REFERENCE("popupURI"),
+ "‘": FTL.TextElement("“"),
+ "’": FTL.TextElement("â€"),
+ },
+ ),
+ )
+ ],
+ ),
+ ],
+ )
diff --git a/python/l10n/fluent_migrations/bug_1832668_firefoxView_navigation.py b/python/l10n/fluent_migrations/bug_1832668_firefoxView_navigation.py
new file mode 100644
index 0000000000..c742f64b36
--- /dev/null
+++ b/python/l10n/fluent_migrations/bug_1832668_firefoxView_navigation.py
@@ -0,0 +1,27 @@
+# coding=utf8
+
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+from fluent.migrate.helpers import transforms_from
+from fluent.migrate import COPY
+
+
+def migrate(ctx):
+ """Bug 1832668 - Add new side navigation component to Firefox View Next page, part {index}."""
+ ctx.add_transforms(
+ "browser/browser/firefoxView.ftl",
+ "browser/browser/firefoxView.ftl",
+ transforms_from(
+ """
+firefoxview-overview-nav = {COPY_PATTERN(from_path, "firefoxview-overview-navigation")}
+ .title = {COPY_PATTERN(from_path, "firefoxview-overview-navigation")}
+firefoxview-history-nav = {COPY_PATTERN(from_path, "firefoxview-history-navigation")}
+ .title = {COPY_PATTERN(from_path, "firefoxview-history-navigation")}
+firefoxview-opentabs-nav = {COPY_PATTERN(from_path, "firefoxview-opentabs-navigation")}
+ .title = {COPY_PATTERN(from_path, "firefoxview-opentabs-navigation")}
+ """,
+ from_path="browser/browser/firefoxView.ftl",
+ ),
+ )
diff --git a/python/l10n/fluent_migrations/bug_1833228_fxviewTabList.py b/python/l10n/fluent_migrations/bug_1833228_fxviewTabList.py
new file mode 100644
index 0000000000..43d0d8834c
--- /dev/null
+++ b/python/l10n/fluent_migrations/bug_1833228_fxviewTabList.py
@@ -0,0 +1,44 @@
+# coding=utf8
+
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+from fluent.migrate.helpers import transforms_from
+from fluent.migrate import COPY
+
+
+def migrate(ctx):
+ """Bug 1833228 - Remove "moz-" from moz-tab-list component and associated .ftl, .css files, part {index}."""
+ ctx.add_transforms(
+ "browser/browser/fxviewTabList.ftl",
+ "browser/browser/fxviewTabList.ftl",
+ transforms_from(
+ """
+fxviewtabrow-open-menu-button =
+ .title = {COPY_PATTERN(from_path, "mztabrow-open-menu-button.title")}
+fxviewtabrow-date = {COPY_PATTERN(from_path, "mztabrow-date")}
+fxviewtabrow-time = {COPY_PATTERN(from_path, "mztabrow-time")}
+fxviewtabrow-tabs-list-tab =
+ .title = {COPY_PATTERN(from_path, "mztabrow-tabs-list-tab.title")}
+fxviewtabrow-dismiss-tab-button =
+ .title = {COPY_PATTERN(from_path, "mztabrow-dismiss-tab-button.title")}
+fxviewtabrow-just-now-timestamp = {COPY_PATTERN(from_path, "mztabrow-just-now-timestamp")}
+fxviewtabrow-delete = {COPY_PATTERN(from_path, "mztabrow-delete")}
+ .accesskey = {COPY_PATTERN(from_path, "mztabrow-delete.accesskey")}
+fxviewtabrow-forget-about-this-site = {COPY_PATTERN(from_path, "mztabrow-forget-about-this-site")}
+ .accesskey = {COPY_PATTERN(from_path, "mztabrow-forget-about-this-site.accesskey")}
+fxviewtabrow-open-in-window = {COPY_PATTERN(from_path, "mztabrow-open-in-window")}
+ .accesskey = {COPY_PATTERN(from_path, "mztabrow-open-in-window.accesskey")}
+fxviewtabrow-open-in-private-window = {COPY_PATTERN(from_path, "mztabrow-open-in-private-window")}
+ .accesskey = {COPY_PATTERN(from_path, "mztabrow-open-in-private-window.accesskey")}
+fxviewtabrow-add-bookmark = {COPY_PATTERN(from_path, "mztabrow-add-bookmark")}
+ .accesskey = {COPY_PATTERN(from_path, "mztabrow-add-bookmark.accesskey")}
+fxviewtabrow-save-to-pocket = {COPY_PATTERN(from_path, "mztabrow-save-to-pocket")}
+ .accesskey = {COPY_PATTERN(from_path, "mztabrow-save-to-pocket.accesskey")}
+fxviewtabrow-copy-link = {COPY_PATTERN(from_path, "mztabrow-copy-link")}
+ .accesskey = {COPY_PATTERN(from_path, "mztabrow-copy-link.accesskey")}
+ """,
+ from_path="browser/browser/mozTabList.ftl",
+ ),
+ )
diff --git a/python/l10n/fluent_migrations/bug_1835559_aboutDialog_explicit_textContent.py b/python/l10n/fluent_migrations/bug_1835559_aboutDialog_explicit_textContent.py
new file mode 100644
index 0000000000..1d73d62692
--- /dev/null
+++ b/python/l10n/fluent_migrations/bug_1835559_aboutDialog_explicit_textContent.py
@@ -0,0 +1,67 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+import fluent.syntax.ast as FTL
+
+from fluent.migrate.transforms import TransformPattern
+
+
+class INSERT_VAR_IN_TAG(TransformPattern):
+ def visit_TextElement(self, node):
+ old_str = node.value
+ new_str = old_str
+ # update-downloading / update-downloading-message
+ new_str = new_str.replace(
+ '<label data-l10n-name="download-status"/>',
+ '<label data-l10n-name="download-status">{ $transfer }</label>',
+ )
+ # update-manual / update-internal-error
+ new_str = new_str.replace(
+ '<label data-l10n-name="manual-link"/>',
+ '<label data-l10n-name="manual-link">{ $displayUrl }</label>',
+ )
+ # aboutdialog-update-manual
+ new_str = new_str.replace(
+ '<a data-l10n-name="manual-link"/>',
+ '<a data-l10n-name="manual-link">{ $displayUrl }</a>',
+ )
+ if old_str == new_str and "$" not in old_str:
+ print("Warning: Failed to insert var in link in {}".format(old_str))
+ else:
+ node.value = new_str
+ return node
+
+
+def migrate(ctx):
+ """Bug 1835559, insert textContent var in a/label elements, part {index}."""
+
+ aboutDialog_ftl = "browser/browser/aboutDialog.ftl"
+ ctx.add_transforms(
+ aboutDialog_ftl,
+ aboutDialog_ftl,
+ [
+ FTL.Message(
+ id=FTL.Identifier("settings-update-downloading"),
+ value=INSERT_VAR_IN_TAG(aboutDialog_ftl, "update-downloading"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("aboutdialog-update-downloading"),
+ value=INSERT_VAR_IN_TAG(aboutDialog_ftl, "update-downloading-message"),
+ ),
+ # Note: while we're renaming anyway: strip "aboutdialog-" prefix
+ # because it is used in main.inc.xhtml, not aboutDialog.xhtml.
+ FTL.Message(
+ id=FTL.Identifier("settings-update-manual-with-link"),
+ value=INSERT_VAR_IN_TAG(aboutDialog_ftl, "aboutdialog-update-manual"),
+ ),
+ # This is the actual update-manual message in aboutDialog.xhtml.
+ FTL.Message(
+ id=FTL.Identifier("aboutdialog-update-manual-with-link"),
+ value=INSERT_VAR_IN_TAG(aboutDialog_ftl, "update-manual"),
+ ),
+ FTL.Message(
+ id=FTL.Identifier("update-internal-error2"),
+ value=INSERT_VAR_IN_TAG(aboutDialog_ftl, "update-internal-error"),
+ ),
+ ],
+ )
diff --git a/python/l10n/fluent_migrations/bug_1866295_new_device_migration_strings.py b/python/l10n/fluent_migrations/bug_1866295_new_device_migration_strings.py
new file mode 100644
index 0000000000..a8ccb8c145
--- /dev/null
+++ b/python/l10n/fluent_migrations/bug_1866295_new_device_migration_strings.py
@@ -0,0 +1,22 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+from fluent.migrate.helpers import transforms_from
+
+
+def migrate(ctx):
+ """Bug 1866295 - Land new strings for device migration ASRouter messages, part {index}."""
+
+ source = "browser/browser/newtab/asrouter.ftl"
+ target = source
+
+ ctx.add_transforms(
+ target,
+ target,
+ transforms_from(
+ """
+device-migration-fxa-spotlight-getting-new-device-primary-button = {COPY_PATTERN(from_path, "device-migration-fxa-spotlight-primary-button")}
+""",
+ from_path=source,
+ ),
+ )
diff --git a/python/l10n/fluent_migrations/bug_1867346_new_device_migration_string_replacement.py b/python/l10n/fluent_migrations/bug_1867346_new_device_migration_string_replacement.py
new file mode 100644
index 0000000000..a926bff41a
--- /dev/null
+++ b/python/l10n/fluent_migrations/bug_1867346_new_device_migration_string_replacement.py
@@ -0,0 +1,22 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+from fluent.migrate.helpers import transforms_from
+
+
+def migrate(ctx):
+ """Bug 1867346 - Replace a string for device migration ASRouter messages, part {index}."""
+
+ source = "browser/browser/newtab/asrouter.ftl"
+ target = source
+
+ ctx.add_transforms(
+ target,
+ target,
+ transforms_from(
+ """
+device-migration-fxa-spotlight-getting-new-device-header-2 = {COPY_PATTERN(from_path, "fxa-sync-cfr-header")}
+""",
+ from_path=source,
+ ),
+ )
diff --git a/python/l10n/mozxchannel/__init__.py b/python/l10n/mozxchannel/__init__.py
new file mode 100644
index 0000000000..66ee3966ca
--- /dev/null
+++ b/python/l10n/mozxchannel/__init__.py
@@ -0,0 +1,150 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this,
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import shutil
+from collections import defaultdict
+from dataclasses import dataclass, field
+from datetime import datetime
+from pathlib import Path
+
+import hglib
+from compare_locales import merge
+from mozpack import path as mozpath
+
+from . import projectconfig, source
+
+
+def get_default_config(topsrcdir, strings_path):
+ assert isinstance(topsrcdir, Path)
+ assert isinstance(strings_path, Path)
+ return {
+ "strings": {
+ "path": strings_path,
+ "url": "https://hg.mozilla.org/l10n/gecko-strings-quarantine/",
+ "heads": {"default": "default"},
+ "update_on_pull": True,
+ "push_url": "ssh://hg.mozilla.org/l10n/gecko-strings-quarantine/",
+ },
+ "source": {
+ "mozilla-unified": {
+ "path": topsrcdir,
+ "url": "https://hg.mozilla.org/mozilla-unified/",
+ "heads": {
+ # This list of repositories is ordered, starting with the
+ # one with the most recent content (central) to the oldest
+ # (ESR). In case two ESR versions are supported, the oldest
+ # ESR goes last (e.g. esr78 goes after esr91).
+ "central": "mozilla-central",
+ "beta": "releases/mozilla-beta",
+ "release": "releases/mozilla-release",
+ "esr102": "releases/mozilla-esr102",
+ },
+ "config_files": [
+ "browser/locales/l10n.toml",
+ "mobile/android/locales/l10n.toml",
+ ],
+ },
+ },
+ }
+
+
+@dataclass
+class TargetRevs:
+ target: bytes = None
+ revs: list = field(default_factory=list)
+
+
+@dataclass
+class CommitRev:
+ repo: str
+ rev: bytes
+
+ @property
+ def message(self):
+ return (
+ f"X-Channel-Repo: {self.repo}\n"
+ f'X-Channel-Revision: {self.rev.decode("ascii")}'
+ )
+
+
+class CrossChannelCreator:
+ def __init__(self, config):
+ self.config = config
+ self.strings_path = config["strings"]["path"]
+ self.message = (
+ f"cross-channel content for {datetime.utcnow().strftime('%Y-%m-%d %H:%M')}"
+ )
+
+ def create_content(self):
+ self.prune_target()
+ revs = []
+ for repo_name, repo_config in self.config["source"].items():
+ with hglib.open(repo_config["path"]) as repo:
+ revs.extend(self.create_for_repo(repo, repo_name, repo_config))
+ self.commit(revs)
+ return 0
+
+ def prune_target(self):
+ for leaf in self.config["strings"]["path"].iterdir():
+ if leaf.name == ".hg":
+ continue
+ shutil.rmtree(leaf)
+
+ def create_for_repo(self, repo, repo_name, repo_config):
+ print(f"Processing {repo_name} in {repo_config['path']}")
+ source_target_revs = defaultdict(TargetRevs)
+ revs_for_commit = []
+ parse_kwargs = {
+ "env": {"l10n_base": str(self.strings_path.parent)},
+ "ignore_missing_includes": True,
+ }
+ for head, head_name in repo_config["heads"].items():
+ print(f"Gathering files for {head}")
+ rev = repo.log(revrange=head)[0].node
+ revs_for_commit.append(CommitRev(head_name, rev))
+ p = source.HgTOMLParser(repo, rev)
+ project_configs = []
+ for config_file in repo_config["config_files"]:
+ project_configs.append(p.parse(config_file, **parse_kwargs))
+ project_configs[-1].set_locales(["en-US"], deep=True)
+ hgfiles = source.HGFiles(repo, rev, project_configs)
+ for targetpath, refpath, _, _ in hgfiles:
+ source_target_revs[refpath].revs.append(rev)
+ source_target_revs[refpath].target = targetpath
+ root = repo.root()
+ print(f"Writing {repo_name} content to target")
+ for refpath, targetrevs in source_target_revs.items():
+ local_ref = mozpath.relpath(refpath, root)
+ content = self.get_content(local_ref, repo, targetrevs.revs)
+ target_dir = mozpath.dirname(targetrevs.target)
+ if not os.path.isdir(target_dir):
+ os.makedirs(target_dir)
+ with open(targetrevs.target, "wb") as fh:
+ fh.write(content)
+ return revs_for_commit
+
+ def commit(self, revs):
+ message = self.message + "\n\n"
+ if "TASK_ID" in os.environ:
+ message += f"X-Task-ID: {os.environ['TASK_ID']}\n\n"
+ message += "\n".join(rev.message for rev in revs)
+ with hglib.open(self.strings_path) as repo:
+ repo.commit(message=message, addremove=True)
+
+ def get_content(self, local_ref, repo, revs):
+ if local_ref.endswith(b".toml"):
+ return self.get_config_content(local_ref, repo, revs)
+ if len(revs) < 2:
+ return repo.cat([b"path:" + local_ref], rev=revs[0])
+ contents = [repo.cat([b"path:" + local_ref], rev=rev) for rev in revs]
+ try:
+ return merge.merge_channels(local_ref.decode("utf-8"), contents)
+ except merge.MergeNotSupportedError:
+ return contents[0]
+
+ def get_config_content(self, local_ref, repo, revs):
+ # We don't support merging toml files
+ content = repo.cat([b"path:" + local_ref], rev=revs[0])
+ return projectconfig.process_config(content)
diff --git a/python/l10n/mozxchannel/projectconfig.py b/python/l10n/mozxchannel/projectconfig.py
new file mode 100644
index 0000000000..23d8120a3c
--- /dev/null
+++ b/python/l10n/mozxchannel/projectconfig.py
@@ -0,0 +1,77 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this,
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import re
+
+from compare_locales import mozpath
+
+# The local path where we write the config files to
+TARGET_PATH = b"_configs"
+
+
+def process_config(toml_content):
+ """Process TOML configuration content to match the l10n setup for
+ the reference localization, return target_path and content.
+
+ The code adjusts basepath, [[paths]], and [[includes]]
+ """
+ # adjust basepath in content. '.' works in practice, also in theory?
+ new_base = mozpath.relpath(b".", TARGET_PATH)
+ if not new_base:
+ new_base = b"." # relpath to '.' is '', sadly
+ base_line = b'\nbasepath = "%s"' % new_base
+ content1 = re.sub(br"^\s*basepath\s*=\s*.+", base_line, toml_content, flags=re.M)
+
+ # process [[paths]]
+ start = 0
+ content2 = b""
+ for m in re.finditer(
+ br"\[\[\s*paths\s*\]\].+?(?=\[|\Z)", content1, re.M | re.DOTALL
+ ):
+ content2 += content1[start : m.start()]
+ path_content = m.group()
+ l10n_line = re.search(br"^\s*l10n\s*=.*$", path_content, flags=re.M).group()
+ # remove variable expansions
+ new_reference = re.sub(br"{\s*\S+\s*}", b"", l10n_line)
+ # make the l10n a reference line
+ new_reference = re.sub(br"^(\s*)l10n(\s*=)", br"\1reference\2", new_reference)
+ content2 += re.sub(
+ br"^\s*reference\s*=.*$", new_reference, path_content, flags=re.M
+ )
+ start = m.end()
+ content2 += content1[start:]
+
+ start = 0
+ content3 = b""
+ for m in re.finditer(
+ br"\[\[\s*includes\s*\]\].+?(?=\[|\Z)", content2, re.M | re.DOTALL
+ ):
+ content3 += content2[start : m.start()]
+ include_content = m.group()
+ m_ = re.search(br'^\s*path = "(.+?)"', include_content, flags=re.M)
+ content3 += (
+ include_content[: m_.start(1)]
+ + generate_filename(m_.group(1))
+ + include_content[m_.end(1) :]
+ )
+ start = m.end()
+ content3 += content2[start:]
+
+ return content3
+
+
+def generate_filename(path):
+ segs = path.split(b"/")
+ # strip /locales/ from filename
+ segs = [seg for seg in segs if seg != b"locales"]
+ # strip variables from filename
+ segs = [seg for seg in segs if not seg.startswith(b"{") and not seg.endswith(b"}")]
+ if segs[-1] == b"l10n.toml":
+ segs.pop()
+ segs[-1] += b".toml"
+ outpath = b"-".join(segs)
+ if TARGET_PATH != b".":
+ # prepend the target path, if it's not '.'
+ outpath = mozpath.join(TARGET_PATH, outpath)
+ return outpath
diff --git a/python/l10n/mozxchannel/source.py b/python/l10n/mozxchannel/source.py
new file mode 100644
index 0000000000..b9d2067980
--- /dev/null
+++ b/python/l10n/mozxchannel/source.py
@@ -0,0 +1,88 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this,
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import toml
+from compare_locales import mozpath, paths
+from compare_locales.paths.matcher import expand
+
+from .projectconfig import generate_filename
+
+
+class HGFiles(paths.ProjectFiles):
+ def __init__(self, repo, rev, projects):
+ self.repo = repo
+ self.ctx = repo[rev]
+ self.root = repo.root()
+ self.manifest = None
+ self.configs_map = {}
+ # get paths for our TOML files
+ for p in projects:
+ all_configpaths = {
+ mozpath.abspath(c.path).encode("utf-8") for c in p.configs
+ }
+ for refpath in all_configpaths:
+ local_path = mozpath.relpath(refpath, self.root)
+ if local_path not in self.ctx:
+ print("ignoring", refpath)
+ continue
+ targetpath = b"/".join(
+ (
+ expand(None, "{l10n_base}", p.environ).encode("utf-8"),
+ b"en-US",
+ generate_filename(local_path),
+ )
+ )
+ self.configs_map[refpath] = targetpath
+ super(HGFiles, self).__init__("en-US", projects)
+ for m in self.matchers:
+ m["l10n"].encoding = "utf-8"
+ if "reference" in m:
+ m["reference"].encoding = "utf-8"
+ if self.exclude:
+ for m in self.exclude.matchers:
+ m["l10n"].encoding = "utf-8"
+ if "reference" in m:
+ m["reference"].encoding = "utf-8"
+
+ def _files(self, matcher):
+ for f in self.ctx.manifest():
+ f = mozpath.join(self.root, f)
+ if matcher.match(f):
+ yield f
+
+ def __iter__(self):
+ for t in super(HGFiles, self).__iter__():
+ yield t
+ for refpath, targetpath in self.configs_map.items():
+ yield targetpath, refpath, None, set()
+
+ def match(self, path):
+ m = super(HGFiles, self).match(path)
+ if m:
+ return m
+ for refpath, targetpath in self.configs_map.items():
+ if path in [refpath, targetpath]:
+ return targetpath, refpath, None, set()
+
+
+class HgTOMLParser(paths.TOMLParser):
+ "subclass to load from our hg context"
+
+ def __init__(self, repo, rev):
+ self.repo = repo
+ self.rev = rev
+ self.root = repo.root().decode("utf-8")
+
+ def load(self, parse_ctx):
+ try:
+ path = parse_ctx.path
+ local_path = "path:" + mozpath.relpath(path, self.root)
+ data = self.repo.cat(files=[local_path.encode("utf-8")], rev=self.rev)
+ except Exception:
+ raise paths.ConfigNotFound(parse_ctx.path)
+
+ try:
+ parse_ctx.data = toml.loads(data.decode())
+ except toml.TomlDecodeError as e:
+ raise RuntimeError(f"In file '{parse_ctx.path}':\n {e!s}") from e
diff --git a/python/l10n/test_fluent_migrations/__init__.py b/python/l10n/test_fluent_migrations/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/l10n/test_fluent_migrations/__init__.py
diff --git a/python/l10n/test_fluent_migrations/fmt.py b/python/l10n/test_fluent_migrations/fmt.py
new file mode 100644
index 0000000000..150a942e78
--- /dev/null
+++ b/python/l10n/test_fluent_migrations/fmt.py
@@ -0,0 +1,188 @@
+import codecs
+import logging
+import os
+import re
+import shutil
+import sys
+from difflib import unified_diff
+
+import hglib
+import mozpack.path as mozpath
+from compare_locales.merge import merge_channels
+from compare_locales.paths.configparser import TOMLParser
+from compare_locales.paths.files import ProjectFiles
+from fluent.migrate import validator
+from fluent.syntax import FluentParser, FluentSerializer
+from mach.util import get_state_dir
+
+
+def inspect_migration(path):
+ """Validate recipe and extract some metadata."""
+ return validator.Validator.validate(path)
+
+
+def prepare_object_dir(cmd):
+ """Prepare object dir to have an up-to-date clone of gecko-strings.
+
+ We run this once per mach invocation, for all tested migrations.
+ """
+ obj_dir = mozpath.join(cmd.topobjdir, "python", "l10n")
+ if not os.path.exists(obj_dir):
+ os.makedirs(obj_dir)
+ state_dir = get_state_dir()
+ if os.path.exists(mozpath.join(state_dir, "gecko-strings")):
+ cmd.run_process(
+ ["hg", "pull", "-u"], cwd=mozpath.join(state_dir, "gecko-strings")
+ )
+ else:
+ cmd.run_process(
+ ["hg", "clone", "https://hg.mozilla.org/l10n/gecko-strings"],
+ cwd=state_dir,
+ )
+ return obj_dir
+
+
+def diff_resources(left_path, right_path):
+ parser = FluentParser(with_spans=False)
+ serializer = FluentSerializer(with_junk=True)
+ lines = []
+ for p in (left_path, right_path):
+ with codecs.open(p, encoding="utf-8") as fh:
+ res = parser.parse(fh.read())
+ lines.append(serializer.serialize(res).splitlines(True))
+ sys.stdout.writelines(
+ chunk for chunk in unified_diff(lines[0], lines[1], left_path, right_path)
+ )
+
+
+def test_migration(cmd, obj_dir, to_test, references):
+ """Test the given recipe.
+
+ This creates a workdir by l10n-merging gecko-strings and the m-c source,
+ to mimmic gecko-strings after the patch to test landed.
+ It then runs the recipe with a gecko-strings clone as localization, both
+ dry and wet.
+ It inspects the generated commits, and shows a diff between the merged
+ reference and the generated content.
+ The diff is intended to be visually inspected. Some changes might be
+ expected, in particular when formatting of the en-US strings is different.
+ """
+ rv = 0
+ migration_name = os.path.splitext(os.path.split(to_test)[1])[0]
+ work_dir = mozpath.join(obj_dir, migration_name)
+
+ paths = os.path.normpath(to_test).split(os.sep)
+ # Migration modules should be in a sub-folder of l10n.
+ migration_module = (
+ ".".join(paths[paths.index("l10n") + 1 : -1]) + "." + migration_name
+ )
+
+ if os.path.exists(work_dir):
+ shutil.rmtree(work_dir)
+ os.makedirs(mozpath.join(work_dir, "reference"))
+ l10n_toml = mozpath.join(
+ cmd.topsrcdir, cmd.substs["MOZ_BUILD_APP"], "locales", "l10n.toml"
+ )
+ pc = TOMLParser().parse(l10n_toml, env={"l10n_base": work_dir})
+ pc.set_locales(["reference"])
+ files = ProjectFiles("reference", [pc])
+ for ref in references:
+ if ref != mozpath.normpath(ref):
+ cmd.log(
+ logging.ERROR,
+ "fluent-migration-test",
+ {
+ "file": to_test,
+ "ref": ref,
+ },
+ 'Reference path "{ref}" needs to be normalized for {file}',
+ )
+ rv = 1
+ continue
+ full_ref = mozpath.join(work_dir, "reference", ref)
+ m = files.match(full_ref)
+ if m is None:
+ raise ValueError("Bad reference path: " + ref)
+ m_c_path = m[1]
+ g_s_path = mozpath.join(work_dir, "gecko-strings", ref)
+ resources = [
+ b"" if not os.path.exists(f) else open(f, "rb").read()
+ for f in (g_s_path, m_c_path)
+ ]
+ ref_dir = os.path.dirname(full_ref)
+ if not os.path.exists(ref_dir):
+ os.makedirs(ref_dir)
+ open(full_ref, "wb").write(merge_channels(ref, resources))
+ client = hglib.clone(
+ source=mozpath.join(get_state_dir(), "gecko-strings"),
+ dest=mozpath.join(work_dir, "en-US"),
+ )
+ client.open()
+ old_tip = client.tip().node
+ run_migration = [
+ cmd._virtualenv_manager.python_path,
+ "-m",
+ "fluent.migrate.tool",
+ "--lang",
+ "en-US",
+ "--reference-dir",
+ mozpath.join(work_dir, "reference"),
+ "--localization-dir",
+ mozpath.join(work_dir, "en-US"),
+ "--dry-run",
+ migration_module,
+ ]
+ cmd.run_process(
+ run_migration,
+ cwd=work_dir,
+ line_handler=print,
+ )
+ # drop --dry-run
+ run_migration.pop(-2)
+ cmd.run_process(
+ run_migration,
+ cwd=work_dir,
+ line_handler=print,
+ )
+ tip = client.tip().node
+ if old_tip == tip:
+ cmd.log(
+ logging.WARN,
+ "fluent-migration-test",
+ {
+ "file": to_test,
+ },
+ "No migration applied for {file}",
+ )
+ return rv
+ for ref in references:
+ diff_resources(
+ mozpath.join(work_dir, "reference", ref),
+ mozpath.join(work_dir, "en-US", ref),
+ )
+ messages = [
+ l.desc.decode("utf-8") for l in client.log(b"::%s - ::%s" % (tip, old_tip))
+ ]
+ bug = re.search("[0-9]{5,}", migration_name)
+ # Just check first message for bug number, they're all following the same pattern
+ if bug is None or bug.group() not in messages[0]:
+ rv = 1
+ cmd.log(
+ logging.ERROR,
+ "fluent-migration-test",
+ {
+ "file": to_test,
+ },
+ "Missing or wrong bug number for {file}",
+ )
+ if any("part {}".format(n + 1) not in msg for n, msg in enumerate(messages)):
+ rv = 1
+ cmd.log(
+ logging.ERROR,
+ "fluent-migration-test",
+ {
+ "file": to_test,
+ },
+ 'Commit messages should have "part {{index}}" for {file}',
+ )
+ return rv
diff --git a/python/lldbutils/.ruff.toml b/python/lldbutils/.ruff.toml
new file mode 100644
index 0000000000..7c7717fa13
--- /dev/null
+++ b/python/lldbutils/.ruff.toml
@@ -0,0 +1,4 @@
+extend = "../../pyproject.toml"
+
+[isort]
+known-first-party = ["lldbutils"]
diff --git a/python/lldbutils/README.txt b/python/lldbutils/README.txt
new file mode 100644
index 0000000000..fd7e90675c
--- /dev/null
+++ b/python/lldbutils/README.txt
@@ -0,0 +1,221 @@
+lldb debugging functionality for Gecko
+======================================
+
+This directory contains a module, lldbutils, which is imported by the
+in-tree .lldbinit file. The lldbutil modules define some lldb commands
+that are handy for debugging Gecko.
+
+If you want to add a new command or Python-implemented type summary, either add
+it to one of the existing broad area Python files (such as lldbutils/layout.py
+for layout-related commands) or create a new file if none of the existing files
+is appropriate. If you add a new file, make sure you add it to __all__ in
+lldbutils/__init__.py.
+
+
+Supported commands
+------------------
+
+Most commands below that can take a pointer to an object also support being
+called with a smart pointer like nsRefPtr or nsCOMPtr.
+
+
+* frametree EXPR, ft EXPR
+ frametreelimited EXPR, ftl EXPR
+
+ Shows information about a frame tree. EXPR is an expression that
+ is evaluated, and must be an nsIFrame*. frametree displays the
+ entire frame tree that contains the given frame. frametreelimited
+ displays a subtree of the frame tree rooted at the given frame.
+
+ (lldb) p this
+ (nsBlockFrame *) $4 = 0x000000011687fcb8
+ (lldb) ftl this
+ Block(div)(-1)@0x11687fcb8 {0,0,7380,690} [state=0002100000d04601] [content=0x11688c0c0] [sc=0x11687f990:-moz-scrolled-content]<
+ line 0x116899130: count=1 state=inline,clean,prevmarginclean,not impacted,not wrapped,before:nobr,after:nobr[0x100] {60,0,0,690} ink-overflow=60,510,0,0 scr-overflow=60,510,0,0 <
+ Text(0)""@0x1168990c0 {60,510,0,0} [state=0001000020404000] [content=0x11687ca10] [sc=0x11687fd88:-moz-non-element,parent=0x11687eb00] [run=0x115115e80][0,0,T]
+ >
+ >
+ (lldb) ft this
+ Viewport(-1)@0x116017430 [view=0x115efe190] {0,0,60,60} [state=000b063000002623] [sc=0x1160170f8:-moz-viewport]<
+ HTMLScroll(html)(-1)@0x1160180d0 {0,0,0,0} [state=000b020000000403] [content=0x115e4d640] [sc=0x116017768:-moz-viewport-scroll]<
+ ...
+ Canvas(html)(-1)@0x116017e08 {0,0,60,60} ink-overflow=0,0,8340,2196 scr-overflow=0,0,8220,2196 [state=000b002000000601] [content=0x115e4d640] [sc=0x11687e0f8:-moz-scrolled-canvas]<
+ Block(html)(-1)@0x11687e578 {0,0,60,2196} ink-overflow=0,0,8340,2196 scr-overflow=0,0,8220,2196 [state=000b100000d00601] [content=0x115e4d640] [sc=0x11687e4b8,parent=0x0]<
+ line 0x11687ec48: count=1 state=block,clean,prevmarginclean,not impacted,not wrapped,before:nobr,after:nobr[0x48] bm=480 {480,480,0,1236} ink-overflow=360,426,7980,1410 scr-overflow=480,480,7740,1236 <
+ Block(body)(1)@0x11687ebb0 {480,480,0,1236} ink-overflow=-120,-54,7980,1410 scr-overflow=0,0,7740,1236 [state=000b120000100601] [content=0x115ed8980] [sc=0x11687e990]<
+ line 0x116899170: count=1 state=inline,clean,prevmarginclean,not impacted,not wrapped,before:nobr,after:nobr[0x0] {0,0,7740,1236} ink-overflow=-120,-54,7980,1410 scr-overflow=0,0,7740,1236 <
+ nsTextControlFrame@0x11687f068 {0,66,7740,1170} ink-overflow=-120,-120,7980,1410 scr-overflow=0,0,7740,1170 [state=0002000000004621] [content=0x115ca2c50] [sc=0x11687ea40]<
+ HTMLScroll(div)(-1)@0x11687f6b0 {180,240,7380,690} [state=0002000000084409] [content=0x11688c0c0] [sc=0x11687eb00]<
+ Block(div)(-1)@0x11687fcb8 {0,0,7380,690} [state=0002100000d04601] [content=0x11688c0c0] [sc=0x11687f990:-moz-scrolled-content]<
+ line 0x116899130: count=1 state=inline,clean,prevmarginclean,not impacted,not wrapped,before:nobr,after:nobr[0x100] {60,0,0,690} ink-overflow=60,510,0,0 scr-overflow=60,510,0,0 <
+ Text(0)""@0x1168990c0 {60,510,0,0} [state=0001000020404000] [content=0x11687ca10] [sc=0x11687fd88:-moz-non-element,parent=0x11687eb00] [run=0x115115e80][0,0,T]
+ ...
+
+
+* js
+
+ Dumps the current JS stack.
+
+ (lldb) js
+ 0 anonymous(aForce = false) ["chrome://browser/content/browser.js":13414]
+ this = [object Object]
+ 1 updateAppearance() ["chrome://browser/content/browser.js":13326]
+ this = [object Object]
+ 2 handleEvent(aEvent = [object Event]) ["chrome://browser/content/tabbrowser.xml":3811]
+ this = [object XULElement]
+
+
+* prefcnt EXPR
+
+ Shows the refcount of a given object. EXPR is an expression that is
+ evaluated, and can be either a pointer to or an actual refcounted
+ object. The object can be a standard nsISupports-like refcounted
+ object, a cycle-collected object or a mozilla::RefCounted<T> object.
+
+ (lldb) p this
+ (nsHTMLDocument *) $1 = 0x0000000116e9d800
+ (lldb) prefcnt this
+ 20
+ (lldb) p mDocumentURI
+ (nsCOMPtr<nsIURI>) $3 = {
+ mRawPtr = 0x0000000117163e50
+ }
+ (lldb) prefcnt mDocumentURI
+ 11
+
+
+* pstate EXPR
+
+ Shows the frame state bits (using their symbolic names) of a given frame.
+ EXPR is an expression that is evaluated, and must be an nsIFrame*.
+
+ (lldb) p this
+ (nsTextFrame *) $1 = 0x000000011f470b10
+ (lldb) p/x mState
+ (nsFrameState) $2 = 0x0000004080604000
+ (lldb) pstate this
+ TEXT_HAS_NONCOLLAPSED_CHARACTERS | TEXT_END_OF_LINE | TEXT_START_OF_LINE | NS_FRAME_PAINTED_THEBES | NS_FRAME_INDEPENDENT_SELECTION
+
+
+* ptag EXPR
+
+ Shows the DOM tag name of a node. EXPR is an expression that is
+ evaluated, and can be either an nsINode pointer or a concrete DOM
+ object.
+
+ (lldb) p this
+ (nsHTMLDocument *) $0 = 0x0000000116e9d800
+ (lldb) ptag this
+ (PermanentAtomImpl *) $1 = 0x0000000110133ac0 u"#document"
+ (lldb) p this->GetRootElement()
+ (mozilla::dom::HTMLSharedElement *) $2 = 0x0000000118429780
+ (lldb) ptag $2
+ (PermanentAtomImpl *) $3 = 0x0000000110123b80 u"html"
+
+
+Supported type summaries and synthetic children
+-----------------------------------------------
+
+In lldb terminology, type summaries are rules for how to display a value
+when using the "expression" command (or its familiar-to-gdb-users "p" alias),
+and synthetic children are fake member variables or array elements also
+added by custom rules.
+
+For objects that do have synthetic children defined for them, like nsTArray,
+the "expr -R -- EXPR" command can be used to show its actual member variables.
+
+
+* nsAString, nsACString,
+ nsFixedString, nsFixedCString,
+ nsAutoString, nsAutoCString
+
+ Strings have a type summary that shows the actual string.
+
+ (lldb) frame info
+ frame #0: 0x000000010400cfea XUL`nsCSSParser::ParseProperty(this=0x00007fff5fbf5248, aPropID=eCSSProperty_margin_top, aPropValue=0x00007fff5fbf53f8, aSheetURI=0x0000000115ae8c00, aBaseURI=0x0000000115ae8c00, aSheetPrincipal=0x000000010ff9e040, aDeclaration=0x00000001826fd580, aChanged=0x00007fff5fbf5247, aIsImportant=false, aIsSVGMode=false) + 74 at nsCSSParser.cpp:12851
+ (lldb) p aPropValue
+ (const nsAString) $16 = u"-25px"
+
+ (lldb) p this
+ (nsHTMLDocument *) $18 = 0x0000000115b56000
+ (lldb) p mContentType
+ (nsCString) $19 = {
+ nsACString = "text/html"
+ }
+
+* nscolor
+
+ nscolors (32-bit RGBA colors) have a type summary that shows the color as
+ one of the CSS 2.1 color keywords, a six digit hex color, an rgba() color,
+ or the "transparent" keyword.
+
+ (lldb) p this
+ (nsTextFrame *) $0 = 0x00000001168245e0
+ (lldb) p *this->StyleColor()
+ (const nsStyleColor) $1 = {
+ mColor = lime
+ }
+ (lldb) expr -R -- *this->StyleColor()
+ (const nsStyleColor) $2 = {
+ mColor = 4278255360
+ }
+
+* nsIAtom
+
+ Atoms have a type summary that shows the string value inside the atom.
+
+ (lldb) frame info
+ frame #0: 0x00000001028b8c49 XUL`mozilla::dom::Element::GetBoolAttr(this=0x0000000115ca1c50, aAttr=0x000000011012a640) const + 25 at Element.h:907
+ (lldb) p aAttr
+ (PermanentAtomImpl *) $1 = 0x000000011012a640 u"readonly"
+
+* nsTArray and friends
+
+ nsTArrays and their auto and fallible varieties have synthetic children
+ for their elements. This means when displaying them with "expr" (or "p"),
+ they will be shown like regular arrays, rather than showing the mHdr and
+ other fields.
+
+ (lldb) frame info
+ frame #0: 0x00000001043eb8a8 XUL`SVGTextFrame::DoGlyphPositioning(this=0x000000012f3f8778) + 248 at SVGTextFrame.cpp:4940
+ (lldb) p charPositions
+ (nsTArray<nsPoint>) $5 = {
+ [0] = {
+ mozilla::gfx::BasePoint<int, nsPoint> = {
+ x = 0
+ y = 816
+ }
+ }
+ [1] = {
+ mozilla::gfx::BasePoint<int, nsPoint> = {
+ x = 426
+ y = 816
+ }
+ }
+ [2] = {
+ mozilla::gfx::BasePoint<int, nsPoint> = {
+ x = 906
+ y = 816
+ }
+ }
+ }
+ (lldb) expr -R -- charPositions
+ (nsTArray<nsPoint>) $4 = {
+ nsTArray_Impl<nsPoint, nsTArrayInfallibleAllocator> = {
+ nsTArray_base<nsTArrayInfallibleAllocator, nsTArray_CopyWithMemutils> = {
+ mHdr = 0x000000012f3f1b80
+ }
+ }
+ }
+
+* nsTextNode, nsTextFragment
+
+ Text nodes have a type summary that shows the nsTextFragment in the
+ nsTextNode, which itself has a type summary that shows the text
+ content.
+
+ (lldb) p this
+ (nsTextFrame *) $14 = 0x000000011811bb10
+ (lldb) p mContent
+ (nsTextNode *) $15 = 0x0000000118130110 "Search or enter address"
+
diff --git a/python/lldbutils/lldbutils/__init__.py b/python/lldbutils/lldbutils/__init__.py
new file mode 100644
index 0000000000..e1263153df
--- /dev/null
+++ b/python/lldbutils/lldbutils/__init__.py
@@ -0,0 +1,17 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+import lldb
+
+__all__ = ["content", "general", "gfx", "layout", "utils"]
+
+
+def init():
+ for name in __all__:
+ init = None
+ try:
+ init = __import__("lldbutils." + name, globals(), locals(), ["init"]).init
+ except AttributeError:
+ pass
+ if init:
+ init(lldb.debugger)
diff --git a/python/lldbutils/lldbutils/content.py b/python/lldbutils/lldbutils/content.py
new file mode 100644
index 0000000000..8f2bf22ccd
--- /dev/null
+++ b/python/lldbutils/lldbutils/content.py
@@ -0,0 +1,28 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+from lldbutils import utils
+
+
+def summarize_text_fragment(valobj, internal_dict):
+ content_union = valobj.GetChildAtIndex(0)
+ state_union = valobj.GetChildAtIndex(1).GetChildMemberWithName("mState")
+ length = state_union.GetChildMemberWithName("mLength").GetValueAsUnsigned(0)
+ if state_union.GetChildMemberWithName("mIs2b").GetValueAsUnsigned(0):
+ field = "m2b"
+ else:
+ field = "m1b"
+ ptr = content_union.GetChildMemberWithName(field)
+ return utils.format_string(ptr, length)
+
+
+def ptag(debugger, command, result, dict):
+ """Displays the tag name of a content node."""
+ debugger.HandleCommand("expr (" + command + ")->mNodeInfo.mRawPtr->mInner.mName")
+
+
+def init(debugger):
+ debugger.HandleCommand(
+ "type summary add nsTextFragment -F lldbutils.content.summarize_text_fragment"
+ )
+ debugger.HandleCommand("command script add -f lldbutils.content.ptag ptag")
diff --git a/python/lldbutils/lldbutils/general.py b/python/lldbutils/lldbutils/general.py
new file mode 100644
index 0000000000..577b3847fa
--- /dev/null
+++ b/python/lldbutils/lldbutils/general.py
@@ -0,0 +1,165 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+import lldb
+
+from lldbutils import utils
+
+
+def summarize_string(valobj, internal_dict):
+ data = valobj.GetChildMemberWithName("mData")
+ length = valobj.GetChildMemberWithName("mLength").GetValueAsUnsigned(0)
+ return utils.format_string(data, length)
+
+
+def summarize_atom(valobj, internal_dict):
+ target = lldb.debugger.GetSelectedTarget()
+ length = valobj.GetChildMemberWithName("mLength").GetValueAsUnsigned()
+ string = target.EvaluateExpression(
+ "(char16_t*)%s.GetUTF16String()" % valobj.GetName()
+ )
+ return utils.format_string(string, length)
+
+
+class TArraySyntheticChildrenProvider:
+ def __init__(self, valobj, internal_dict):
+ self.valobj = valobj
+ self.header = self.valobj.GetChildMemberWithName("mHdr")
+ self.element_type = self.valobj.GetType().GetTemplateArgumentType(0)
+ self.element_size = self.element_type.GetByteSize()
+ header_size = self.header.GetType().GetPointeeType().GetByteSize()
+ self.element_base_addr = self.header.GetValueAsUnsigned(0) + header_size
+
+ def num_children(self):
+ return (
+ self.header.Dereference()
+ .GetChildMemberWithName("mLength")
+ .GetValueAsUnsigned(0)
+ )
+
+ def get_child_index(self, name):
+ try:
+ index = int(name)
+ if index >= self.num_children():
+ return None
+ # Ideally we'd use the exception type, but it's unclear what that is
+ # without knowing how to trigger the original exception.
+ except: # NOQA: E501, E722
+ pass
+ return None
+
+ def get_child_at_index(self, index):
+ if index >= self.num_children():
+ return None
+ addr = self.element_base_addr + index * self.element_size
+ return self.valobj.CreateValueFromAddress(
+ "[%d]" % index, addr, self.element_type
+ )
+
+
+def prefcnt(debugger, command, result, dict):
+ """Displays the refcount of an object."""
+ # We handled regular nsISupports-like refcounted objects and cycle collected
+ # objects.
+ target = debugger.GetSelectedTarget()
+ process = target.GetProcess()
+ thread = process.GetSelectedThread()
+ frame = thread.GetSelectedFrame()
+ obj = frame.EvaluateExpression(command)
+ if obj.GetError().Fail():
+ print("could not evaluate expression")
+ return
+ obj = utils.dereference(obj)
+ field = obj.GetChildMemberWithName("mRefCnt")
+ if field.GetError().Fail():
+ field = obj.GetChildMemberWithName("refCnt")
+ if field.GetError().Fail():
+ print("not a refcounted object")
+ return
+ refcnt_type = field.GetType().GetCanonicalType().GetName()
+ if refcnt_type == "nsAutoRefCnt":
+ print(field.GetChildMemberWithName("mValue").GetValueAsUnsigned(0))
+ elif refcnt_type == "nsCycleCollectingAutoRefCnt":
+ print(
+ field.GetChildMemberWithName("mRefCntAndFlags").GetValueAsUnsigned(0) >> 2
+ )
+ elif refcnt_type == "mozilla::ThreadSafeAutoRefCnt":
+ print(
+ field.GetChildMemberWithName("mValue")
+ .GetChildMemberWithName("mValue")
+ .GetValueAsUnsigned(0)
+ )
+ elif refcnt_type == "int": # non-atomic mozilla::RefCounted object
+ print(field.GetValueAsUnsigned(0))
+ elif refcnt_type == "mozilla::Atomic<int>": # atomic mozilla::RefCounted object
+ print(field.GetChildMemberWithName("mValue").GetValueAsUnsigned(0))
+ else:
+ print("unknown mRefCnt type " + refcnt_type)
+
+
+# Used to work around http://llvm.org/bugs/show_bug.cgi?id=22211
+def callfunc(debugger, command, result, dict):
+ """Calls a function for which debugger information is unavailable by getting its address
+ from the symbol table. The function is assumed to return void."""
+
+ if "(" not in command:
+ print("Usage: callfunc your_function(args)")
+ return
+
+ command_parts = command.split("(")
+ funcname = command_parts[0].strip()
+ args = command_parts[1]
+
+ target = debugger.GetSelectedTarget()
+ symbols = target.FindFunctions(funcname).symbols
+ if not symbols:
+ print('Could not find a function symbol for a function called "%s"' % funcname)
+ return
+
+ sym = symbols[0]
+ arg_types = "()"
+ if sym.name and sym.name.startswith(funcname + "("):
+ arg_types = sym.name[len(funcname) :]
+ debugger.HandleCommand(
+ "print ((void(*)%s)0x%0x)(%s"
+ % (arg_types, sym.addr.GetLoadAddress(target), args)
+ )
+
+
+def init(debugger):
+ debugger.HandleCommand(
+ "type summary add nsAString -F lldbutils.general.summarize_string"
+ )
+ debugger.HandleCommand(
+ "type summary add nsACString -F lldbutils.general.summarize_string"
+ )
+ debugger.HandleCommand(
+ "type summary add nsFixedString -F lldbutils.general.summarize_string"
+ )
+ debugger.HandleCommand(
+ "type summary add nsFixedCString -F lldbutils.general.summarize_string"
+ )
+ debugger.HandleCommand(
+ "type summary add nsAutoString -F lldbutils.general.summarize_string"
+ )
+ debugger.HandleCommand(
+ "type summary add nsAutoCString -F lldbutils.general.summarize_string"
+ )
+ debugger.HandleCommand(
+ "type summary add nsAtom -F lldbutils.general.summarize_atom"
+ )
+ debugger.HandleCommand(
+ 'type synthetic add -x "nsTArray<" -l lldbutils.general.TArraySyntheticChildrenProvider'
+ )
+ debugger.HandleCommand(
+ 'type synthetic add -x "AutoTArray<" -l lldbutils.general.TArraySyntheticChildrenProvider' # NOQA: E501
+ )
+ debugger.HandleCommand(
+ 'type synthetic add -x "FallibleTArray<" -l lldbutils.general.TArraySyntheticChildrenProvider' # NOQA: E501
+ )
+ debugger.HandleCommand(
+ "command script add -f lldbutils.general.prefcnt -f lldbutils.general.prefcnt prefcnt"
+ )
+ debugger.HandleCommand(
+ "command script add -f lldbutils.general.callfunc -f lldbutils.general.callfunc callfunc"
+ )
diff --git a/python/lldbutils/lldbutils/gfx.py b/python/lldbutils/lldbutils/gfx.py
new file mode 100644
index 0000000000..7622dd4f9c
--- /dev/null
+++ b/python/lldbutils/lldbutils/gfx.py
@@ -0,0 +1,65 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+def summarize_nscolor(valobj, internal_dict):
+ colors = {
+ "#800000": "maroon",
+ "#ff0000": "red",
+ "#ffa500": "orange",
+ "#ffff00": "yellow",
+ "#808000": "olive",
+ "#800080": "purple",
+ "#ff00ff": "fuchsia",
+ "#ffffff": "white",
+ "#00ff00": "lime",
+ "#008000": "green",
+ "#000080": "navy",
+ "#0000ff": "blue",
+ "#00ffff": "aqua",
+ "#008080": "teal",
+ "#000000": "black",
+ "#c0c0c0": "silver",
+ "#808080": "gray",
+ }
+ value = valobj.GetValueAsUnsigned(0)
+ if value == 0:
+ return "transparent"
+ if value & 0xFF000000 != 0xFF000000:
+ return "rgba(%d, %d, %d, %f)" % (
+ value & 0xFF,
+ (value >> 8) & 0xFF,
+ (value >> 16) & 0xFF,
+ ((value >> 24) & 0xFF) / 255.0,
+ )
+ color = "#%02x%02x%02x" % (value & 0xFF, (value >> 8) & 0xFF, (value >> 16) & 0xFF)
+ if color in colors:
+ return colors[color]
+ return color
+
+
+def summarize_rect(valobj, internal_dict):
+ x = valobj.GetChildMemberWithName("x").GetValue()
+ y = valobj.GetChildMemberWithName("y").GetValue()
+ width = valobj.GetChildMemberWithName("width").GetValue()
+ height = valobj.GetChildMemberWithName("height").GetValue()
+ return "%s, %s, %s, %s" % (x, y, width, height)
+
+
+def rect_is_empty(valobj):
+ width = valobj.GetChildMemberWithName("width").GetValueAsSigned()
+ height = valobj.GetChildMemberWithName("height").GetValueAsSigned()
+ return width <= 0 or height <= 0
+
+
+def init(debugger):
+ debugger.HandleCommand(
+ "type summary add nscolor -v -F lldbutils.gfx.summarize_nscolor"
+ )
+ debugger.HandleCommand("type summary add nsRect -v -F lldbutils.gfx.summarize_rect")
+ debugger.HandleCommand(
+ "type summary add nsIntRect -v -F lldbutils.gfx.summarize_rect"
+ )
+ debugger.HandleCommand(
+ "type summary add gfxRect -v -F lldbutils.gfx.summarize_rect"
+ )
diff --git a/python/lldbutils/lldbutils/layout.py b/python/lldbutils/lldbutils/layout.py
new file mode 100644
index 0000000000..c70216bff5
--- /dev/null
+++ b/python/lldbutils/lldbutils/layout.py
@@ -0,0 +1,46 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+def frametree(debugger, command, result, dict):
+ """Dumps the frame tree containing the given nsIFrame*."""
+ debugger.HandleCommand("expr (" + command + ")->DumpFrameTree()")
+
+
+def frametree_pixels(debugger, command, result, dict):
+ """Dumps the frame tree containing the given nsIFrame* in CSS pixels."""
+ debugger.HandleCommand("expr (" + command + ")->DumpFrameTreeInCSSPixels()")
+
+
+def frametreelimited(debugger, command, result, dict):
+ """Dumps the subtree of a frame tree rooted at the given nsIFrame*."""
+ debugger.HandleCommand("expr (" + command + ")->DumpFrameTreeLimited()")
+
+
+def frametreelimited_pixels(debugger, command, result, dict):
+ """Dumps the subtree of a frame tree rooted at the given nsIFrame*
+ in CSS pixels."""
+ debugger.HandleCommand("expr (" + command + ")->DumpFrameTreeLimitedInCSSPixels()")
+
+
+def pstate(debugger, command, result, dict):
+ """Displays a frame's state bits symbolically."""
+ debugger.HandleCommand("expr mozilla::PrintFrameState(" + command + ")")
+
+
+def init(debugger):
+ debugger.HandleCommand("command script add -f lldbutils.layout.frametree frametree")
+ debugger.HandleCommand(
+ "command script add -f lldbutils.layout.frametree_pixels frametree_pixels"
+ )
+ debugger.HandleCommand(
+ "command script add -f lldbutils.layout.frametreelimited frametreelimited"
+ )
+ debugger.HandleCommand(
+ "command script add -f lldbutils.layout.frametreelimited_pixels frametreelimited_pixels"
+ )
+ debugger.HandleCommand("command alias ft frametree")
+ debugger.HandleCommand("command alias ftp frametree_pixels")
+ debugger.HandleCommand("command alias ftl frametreelimited")
+ debugger.HandleCommand("command alias ftlp frametreelimited_pixels")
+ debugger.HandleCommand("command script add -f lldbutils.layout.pstate pstate")
diff --git a/python/lldbutils/lldbutils/utils.py b/python/lldbutils/lldbutils/utils.py
new file mode 100644
index 0000000000..fdcf7da366
--- /dev/null
+++ b/python/lldbutils/lldbutils/utils.py
@@ -0,0 +1,86 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+def format_char(c):
+ if c == 0:
+ return "\\0"
+ elif c == 0x07:
+ return "\\a"
+ elif c == 0x08:
+ return "\\b"
+ elif c == 0x0C:
+ return "\\f"
+ elif c == 0x0A:
+ return "\\n"
+ elif c == 0x0D:
+ return "\\r"
+ elif c == 0x09:
+ return "\\t"
+ elif c == 0x0B:
+ return "\\v"
+ elif c == 0x5C:
+ return "\\"
+ elif c == 0x22:
+ return '\\"'
+ elif c == 0x27:
+ return "\\'"
+ elif c < 0x20 or c >= 0x80 and c <= 0xFF:
+ return "\\x%02x" % c
+ elif c >= 0x0100:
+ return "\\u%04x" % c
+ else:
+ return chr(c)
+
+
+# Take an SBValue that is either a char* or char16_t* and formats it like lldb
+# would when printing it.
+def format_string(lldb_value, length=100):
+ ptr = lldb_value.GetValueAsUnsigned(0)
+ char_type = lldb_value.GetType().GetPointeeType()
+ if char_type.GetByteSize() == 1:
+ s = '"'
+ size = 1
+ mask = 0xFF
+ elif char_type.GetByteSize() == 2:
+ s = 'u"'
+ size = 2
+ mask = 0xFFFF
+ else:
+ return "(cannot format string with char type %s)" % char_type.GetName()
+ i = 0
+ terminated = False
+ while i < length:
+ c = (
+ lldb_value.CreateValueFromAddress(
+ "x", ptr + i * size, char_type
+ ).GetValueAsUnsigned(0)
+ & mask
+ )
+ if c == 0:
+ terminated = True
+ break
+ s += format_char(c)
+ i = i + 1
+ s += '"'
+ if not terminated and i != length:
+ s += "..."
+ return s
+
+
+# Dereferences a raw pointer, nsCOMPtr, RefPtr, nsAutoPtr, already_AddRefed or
+# mozilla::RefPtr; otherwise returns the value unchanged.
+def dereference(lldb_value):
+ if lldb_value.TypeIsPointerType():
+ return lldb_value.Dereference()
+ name = lldb_value.GetType().GetUnqualifiedType().GetName()
+ if (
+ name.startswith("nsCOMPtr<")
+ or name.startswith("RefPtr<")
+ or name.startswith("nsAutoPtr<")
+ or name.startswith("already_AddRefed<")
+ ):
+ return lldb_value.GetChildMemberWithName("mRawPtr")
+ if name.startswith("mozilla::RefPtr<"):
+ return lldb_value.GetChildMemberWithName("ptr")
+ return lldb_value
diff --git a/python/mach/.ruff.toml b/python/mach/.ruff.toml
new file mode 100644
index 0000000000..82b1a04648
--- /dev/null
+++ b/python/mach/.ruff.toml
@@ -0,0 +1,4 @@
+extend = "../../pyproject.toml"
+
+[isort]
+known-first-party = ["mach"]
diff --git a/python/mach/README.rst b/python/mach/README.rst
new file mode 100644
index 0000000000..7c2e00becb
--- /dev/null
+++ b/python/mach/README.rst
@@ -0,0 +1,13 @@
+====
+mach
+====
+
+Mach (German for *do*) is a generic command dispatcher for the command
+line.
+
+To use mach, you install the mach core (a Python package), create an
+executable *driver* script (named whatever you want), and write mach
+commands. When the *driver* is executed, mach dispatches to the
+requested command handler automatically.
+
+To learn more, read the docs in ``docs/``.
diff --git a/python/mach/bash-completion.sh b/python/mach/bash-completion.sh
new file mode 100644
index 0000000000..13935cf88c
--- /dev/null
+++ b/python/mach/bash-completion.sh
@@ -0,0 +1,18 @@
+function _mach()
+{
+ local cur targets
+ COMPREPLY=()
+
+ # Calling `mach-completion` with -h/--help would result in the
+ # help text being used as the completion targets.
+ if [[ $COMP_LINE == *"-h"* || $COMP_LINE == *"--help"* ]]; then
+ return 0
+ fi
+
+ # Load the list of targets
+ targets=`"${COMP_WORDS[0]}" mach-completion ${COMP_LINE}`
+ cur="${COMP_WORDS[COMP_CWORD]}"
+ COMPREPLY=( $(compgen -W "$targets" -- ${cur}) )
+ return 0
+}
+complete -o default -F _mach mach
diff --git a/python/mach/docs/commands.rst b/python/mach/docs/commands.rst
new file mode 100644
index 0000000000..7547193000
--- /dev/null
+++ b/python/mach/docs/commands.rst
@@ -0,0 +1,129 @@
+.. _mach_commands:
+
+=====================
+Implementing Commands
+=====================
+
+Mach commands are defined via Python decorators.
+
+All the relevant decorators are defined in the *mach.decorators* module.
+The important decorators are as follows:
+
+:py:func:`Command <mach.decorators.Command>`
+ A function decorator that denotes that the function should be called when
+ the specified command is requested. The decorator takes a command name
+ as its first argument and a number of additional arguments to
+ configure the behavior of the command. The decorated function must take a
+ ``command_context`` argument as its first.
+ ``command_context`` is a properly configured instance of a ``MozbuildObject``
+ subclass, meaning it can be used for accessing things like the current config
+ and running processes.
+
+:py:func:`CommandArgument <mach.decorators.CommandArgument>`
+ A function decorator that defines an argument to the command. Its
+ arguments are essentially proxied to ArgumentParser.add_argument()
+
+:py:func:`SubCommand <mach.decorators.SubCommand>`
+ A function decorator that denotes that the function should be a
+ sub-command to an existing ``@Command``. The decorator takes the
+ parent command name as its first argument and the sub-command name
+ as its second argument.
+
+ ``@CommandArgument`` can be used on ``@SubCommand`` instances just
+ like they can on ``@Command`` instances.
+
+
+Here is a complete example:
+
+.. code-block:: python
+
+ from mach.decorators import (
+ CommandArgument,
+ Command,
+ )
+
+ @Command('doit', help='Do ALL OF THE THINGS.')
+ @CommandArgument('--force', '-f', action='store_true',
+ help='Force doing it.')
+ def doit(command_context, force=False):
+ # Do stuff here.
+
+When the module is loaded, the decorators tell mach about all handlers.
+When mach runs, it takes the assembled metadata from these handlers and
+hooks it up to the command line driver. Under the hood, arguments passed
+to the decorators are being used to help mach parse command arguments,
+formulate arguments to the methods, etc. See the documentation in the
+:py:mod:`mach.base` module for more.
+
+The Python modules defining mach commands do not need to live inside the
+main mach source tree.
+
+Conditionally Filtering Commands
+================================
+
+Sometimes it might only make sense to run a command given a certain
+context. For example, running tests only makes sense if the product
+they are testing has been built, and said build is available. To make
+sure a command is only runnable from within a correct context, you can
+define a series of conditions on the
+:py:func:`Command <mach.decorators.Command>` decorator.
+
+A condition is simply a function that takes an instance of the
+:py:func:`mozbuild.base.MachCommandBase` class as an argument, and
+returns ``True`` or ``False``. If any of the conditions defined on a
+command return ``False``, the command will not be runnable. The
+docstring of a condition function is used in error messages, to explain
+why the command cannot currently be run.
+
+Here is an example:
+
+.. code-block:: python
+
+ from mach.decorators import (
+ Command,
+ )
+
+ def build_available(cls):
+ """The build needs to be available."""
+ return cls.build_path is not None
+
+ @Command('run_tests', conditions=[build_available])
+ def run_tests(command_context):
+ # Do stuff here.
+
+By default all commands without any conditions applied will be runnable,
+but it is possible to change this behaviour by setting
+``require_conditions`` to ``True``:
+
+.. code-block:: python
+
+ m = mach.main.Mach()
+ m.require_conditions = True
+
+Minimizing Code in Commands
+===========================
+
+Mach command modules, classes, and methods work best when they are
+minimal dispatchers. The reason is import bloat. Currently, the mach
+core needs to import every Python file potentially containing mach
+commands for every command invocation. If you have dozens of commands or
+commands in modules that import a lot of Python code, these imports
+could slow mach down and waste memory.
+
+It is thus recommended that mach modules, classes, and methods do as
+little work as possible. Ideally the module should only import from
+the :py:mod:`mach` package. If you need external modules, you should
+import them from within the command method.
+
+To keep code size small, the body of a command method should be limited
+to:
+
+1. Obtaining user input (parsing arguments, prompting, etc)
+2. Calling into some other Python package
+3. Formatting output
+
+Of course, these recommendations can be ignored if you want to risk
+slower performance.
+
+In the future, the mach driver may cache the dispatching information or
+have it intelligently loaded to facilitate lazy loading.
diff --git a/python/mach/docs/driver.rst b/python/mach/docs/driver.rst
new file mode 100644
index 0000000000..8a2a99a2f5
--- /dev/null
+++ b/python/mach/docs/driver.rst
@@ -0,0 +1,32 @@
+.. _mach_driver:
+
+=======
+Drivers
+=======
+
+Entry Points
+============
+
+It is possible to use setuptools' entry points to load commands
+directly from python packages. A mach entry point is a function which
+returns a list of files or directories containing mach command
+providers. e.g.:
+
+.. code-block:: python
+
+ def list_providers():
+ providers = []
+ here = os.path.abspath(os.path.dirname(__file__))
+ for p in os.listdir(here):
+ if p.endswith('.py'):
+ providers.append(os.path.join(here, p))
+ return providers
+
+See http://pythonhosted.org/setuptools/setuptools.html#dynamic-discovery-of-services-and-plugins
+for more information on creating an entry point. To search for entry
+point plugins, you can call
+:py:meth:`mach.main.Mach.load_commands_from_entry_point`. e.g.:
+
+.. code-block:: python
+
+ mach.load_commands_from_entry_point("mach.external.providers")
diff --git a/python/mach/docs/faq.rst b/python/mach/docs/faq.rst
new file mode 100644
index 0000000000..a640f83e87
--- /dev/null
+++ b/python/mach/docs/faq.rst
@@ -0,0 +1,152 @@
+.. _mach_faq:
+
+==========================
+Frequently Asked Questions
+==========================
+
+How do I report bugs?
+---------------------
+
+Bugs against the ``mach`` core can be filed in Bugzilla in the `Firefox
+Build System::Mach
+Core <https://bugzilla.mozilla.org/enter_bug.cgi?product=Firefox%20Build%20System&component=Mach%20Core>`__ component.
+
+.. note::
+
+ Most ``mach`` bugs are bugs in individual commands, not bugs in the core
+ ``mach`` code. Bugs for individual commands should be filed against the
+ component that command is related to. For example, bugs in the
+ *build* command should be filed against *Firefox Build System ::
+ General*. Bugs against testing commands should be filed somewhere in
+ the *Testing* product.
+
+How do I debug a command failing with a Python exception?
+---------------------------------------------------------
+
+You can run a command and break into ``pdb``, the Python debugger,
+when the command is invoked with:
+
+.. code-block:: shell
+
+ ./mach --debug-command FAILING-COMMAND ARGS ...
+
+How do I debug ``mach`` itself?
+-------------------------------
+
+If you are editing the mach code, or other Python modules you can
+open the terminal and start debugging with pdb with the following:
+
+.. code-block:: shell
+
+ python3 -m pdb ./mach <command>
+
+How do I debug ``pytest`` tests?
+--------------------------------
+
+First, before debugging, run ``./mach python-test`` once to ensure that
+the testing virtualenv is up-to-date:
+
+.. code-block:: shell
+
+ ./mach python-test path/to/test.py
+
+Then, using the testing virtualenv, debug the test file:
+
+.. code-block:: shell
+
+ <objdir>/_virtualenvs/python-test/bin/python -m pdb path/to/test.py
+
+How do I profile a slow command?
+--------------------------------
+
+To diagnose bottlenecks, you can collect a performance profile:
+
+.. code-block:: shell
+
+ ./mach --profile-command SLOW-COMMAND ARGS ...
+
+Then, you can visualize ``mach_profile_SLOW-COMMAND.cProfile`` using
+`snakeviz <https://jiffyclub.github.io/snakeviz/>`__:
+
+.. code-block:: shell
+
+ # If you don't have snakeviz installed yet:
+ python3 -m pip install snakeviz
+ python3 -m snakeviz mach_profile_SLOW-COMMAND.cProfile
+
+How do I profile ``mach`` itself?
+---------------------------------
+
+Since ``--profile-command`` only profiles commands, you'll need to invoke ``cProfile``
+directly to profile ``mach`` itself:
+
+.. code-block:: shell
+
+ python3 -m cProfile -o mach.cProfile ./mach ...
+ python3 -m snakeviz mach.cProfile
+
+Is ``mach`` a build system?
+---------------------------
+
+No. ``mach`` is just a generic command dispatching tool that happens to have
+a few commands that interact with the real build system. Historically,
+``mach`` *was* born to become a better interface to the build system.
+However, its potential beyond just build system interaction was quickly
+realized and ``mach`` grew to fit those needs.
+
+How do I add features to ``mach``?
+----------------------------------
+If you would like to add a new feature to ``mach`` that cannot be implemented as
+a ``mach`` command, the first step is to file a bug in the
+``Firefox Build System :: Mach Core`` component.
+
+Should I implement X as a ``mach`` command?
+-------------------------------------------
+
+There are no hard or fast rules. Generally speaking, if you have some
+piece of functionality or action that is useful to multiple people
+(especially if it results in productivity wins), then you should
+consider implementing a ``mach`` command for it.
+
+Some other cases where you should consider implementing something as a
+``mach`` command:
+
+- When your tool is a random script in the tree. Random scripts are
+ hard to find and may not conform to coding conventions or best
+ practices. ``Mach`` provides a framework in which your tool can live that
+ will put it in a better position to succeed than if it were on its
+ own.
+- When the alternative is a ``make`` target. The build team generally does
+ not like one-off ``make`` targets that aren't part of building (read:
+ compiling) the tree. This includes things related to testing and
+ packaging. These weigh down ``Makefiles`` and add to the burden of
+ maintaining the build system. Instead, you are encouraged to
+ implement ancillary functionality in Python. If you do implement something
+ in Python, hooking it up to ``mach`` is often trivial.
+
+How do I use 3rd-party Python packages in my ``mach`` command?
+--------------------------------------------------------------
+
+See :ref:`Using third-party Python packages`.
+
+How does ``mach`` fit into the modules system?
+----------------------------------------------
+
+Mozilla operates with a `modules governance
+system <https://www.mozilla.org/about/governance/policies/module-ownership/>`__ where
+there are different components with different owners. There is not
+currently a ``mach`` module. There may or may never be one; currently ``mach``
+is owned by the build team.
+
+Even if a ``mach`` module were established, ``mach`` command modules would
+likely never belong to it. Instead, ``mach`` command modules are owned by the
+team/module that owns the system they interact with. In other words, ``mach``
+is not a power play to consolidate authority for tooling. Instead, it aims to
+expose that tooling through a common, shared interface.
+
+
+Who do I contact for help or to report issues?
+----------------------------------------------
+
+You can ask questions in
+`#build <https://chat.mozilla.org/#/room/#build:mozilla.org>`__.
diff --git a/python/mach/docs/index.rst b/python/mach/docs/index.rst
new file mode 100644
index 0000000000..752fe93219
--- /dev/null
+++ b/python/mach/docs/index.rst
@@ -0,0 +1,89 @@
+====
+Mach
+====
+
+Mach (German for *do*) is a generic command dispatcher for the command
+line.
+
+To use mach, you install the mach core (a Python package), create an
+executable *driver* script (named whatever you want), and write mach
+commands. When the *driver* is executed, mach dispatches to the
+requested command handler automatically.
+
+.. raw:: html
+
+ <h2>Features</h2>
+
+----
+
+On a high level, mach is similar to using argparse with subparsers (for
+command handling). When you dig deeper, mach offers a number of
+additional features:
+
+Distributed command definitions
+ With optparse/argparse, you have to define your commands on a central
+ parser instance. With mach, you annotate your command methods with
+ decorators and mach finds and dispatches to them automatically.
+
+Command categories
+ Mach commands can be grouped into categories when displayed in help.
+ This is currently not possible with argparse.
+
+Logging management
+ Mach provides a facility for logging (both classical text and
+ structured) that is available to any command handler.
+
+Settings files
+ Mach provides a facility for reading settings from an ini-like file
+ format.
+
+.. raw:: html
+
+ <h2>Components</h2>
+
+----
+
+Mach is conceptually composed of the following components:
+
+core
+ The mach core is the core code powering mach. This is a Python package
+ that contains all the business logic that makes mach work. The mach
+ core is common to all mach deployments.
+
+commands
+ These are what mach dispatches to. Commands are simply Python methods
+ registered as command names. The set of commands is unique to the
+ environment mach is deployed in.
+
+driver
+ The *driver* is the entry-point to mach. It is simply an executable
+ script that loads the mach core, tells it where commands can be found,
+ then asks the mach core to handle the current request. The driver is
+ unique to the deployed environment. But, it's usually based on an
+ example from this source tree.
+
+.. raw:: html
+
+ <h2> Project State</h2>
+
+----
+
+mach was originally written as a command dispatching framework to aid
+Firefox development. While the code is mostly generic, there are still
+some pieces that closely tie it to Mozilla/Firefox. The goal is for
+these to eventually be removed and replaced with generic features so
+mach is suitable for anybody to use. Until then, mach may not be the
+best fit for you.
+
+.. toctree::
+ :maxdepth: 1
+ :hidden:
+
+ usage
+ commands
+ driver
+ logging
+ settings
+ telemetry
+ windows-usage-outside-mozillabuild
+ faq
diff --git a/python/mach/docs/logging.rst b/python/mach/docs/logging.rst
new file mode 100644
index 0000000000..ff245cf032
--- /dev/null
+++ b/python/mach/docs/logging.rst
@@ -0,0 +1,100 @@
+.. _mach_logging:
+
+=======
+Logging
+=======
+
+Mach configures a built-in logging facility so commands can easily log
+data.
+
+What sets the logging facility apart from most loggers you've seen is
+that it encourages structured logging. Instead of conventional logging
+where simple strings are logged, the internal logging mechanism logs all
+events with the following pieces of information:
+
+* A string *action*
+* A dict of log message fields
+* A formatting string
+
+Essentially, instead of assembling a human-readable string at
+logging-time, you create an object holding all the pieces of data that
+will constitute your logged event. For each unique type of logged event,
+you assign an *action* name.
+
+Depending on how logging is configured, your logged event could get
+written a couple of different ways.
+
+JSON Logging
+============
+
+Where machines are the intended target of the logging data, a JSON
+logger is configured. The JSON logger assembles an array consisting of
+the following elements:
+
+* Decimal wall clock time in seconds since UNIX epoch
+* String *action* of message
+* Object with structured message data
+
+The JSON-serialized array is written to a configured file handle.
+Consumers of this logging stream can just perform a readline() then feed
+that into a JSON deserializer to reconstruct the original logged
+message. They can key off the *action* element to determine how to
+process individual events. There is no need to invent a parser.
+Convenient, isn't it?
+
+Logging for Humans
+==================
+
+Where humans are the intended consumer of a log message, the structured
+log message are converted to more human-friendly form. This is done by
+utilizing the *formatting* string provided at log time. The logger
+simply calls the *format* method of the formatting string, passing the
+dict containing the message's fields.
+
+When *mach* is used in a terminal that supports it, the logging facility
+also supports terminal features such as colorization. This is done
+automatically in the logging layer - there is no need to control this at
+logging time.
+
+In addition, messages intended for humans typically prepends every line
+with the time passed since the application started.
+
+Logging HOWTO
+=============
+
+Structured logging piggybacks on top of Python's built-in logging
+infrastructure provided by the *logging* package. We accomplish this by
+taking advantage of *logging.Logger.log()*'s *extra* argument. To this
+argument, we pass a dict with the fields *action* and *params*. These
+are the string *action* and dict of message fields, respectively. The
+formatting string is passed as the *msg* argument, like normal.
+
+If you were logging to a logger directly, you would do something like:
+
+.. code-block:: python
+
+ logger.log(logging.INFO, 'My name is {name}',
+ extra={'action': 'my_name', 'params': {'name': 'Gregory'}})
+
+The JSON logging would produce something like::
+
+ [1339985554.306338, "my_name", {"name": "Gregory"}]
+
+Human logging would produce something like::
+
+ 0.52 My name is Gregory
+
+Since there is a lot of complexity using logger.log directly, it is
+recommended to go through a wrapping layer that hides part of the
+complexity for you. The easiest way to do this is by utilizing the
+LoggingMixin:
+
+.. code-block:: python
+
+ import logging
+ from mach.mixin.logging import LoggingMixin
+
+ class MyClass(LoggingMixin):
+ def foo(self):
+ self.log(logging.INFO, 'foo_start', {'bar': True},
+ 'Foo performed. Bar: {bar}')
diff --git a/python/mach/docs/metrics.md b/python/mach/docs/metrics.md
new file mode 100644
index 0000000000..8c826f54a9
--- /dev/null
+++ b/python/mach/docs/metrics.md
@@ -0,0 +1,55 @@
+<!-- AUTOGENERATED BY glean_parser. DO NOT EDIT. -->
+
+# Metrics
+This document enumerates the metrics collected by this project using the [Glean SDK](https://mozilla.github.io/glean/book/index.html).
+This project may depend on other projects which also collect metrics.
+This means you might have to go searching through the dependency tree to get a full picture of everything collected by this project.
+
+# Pings
+
+ - [usage](#usage)
+
+
+## usage
+
+Sent when the mach invocation is completed (regardless of result). Contains information about the mach invocation that was made, its result, and some details about the current environment and hardware.
+
+
+This ping includes the [client id](https://mozilla.github.io/glean/book/user/pings/index.html#the-client_info-section).
+
+**Data reviews for this ping:**
+
+- <https://bugzilla.mozilla.org/show_bug.cgi?id=1291053#c34>
+
+**Bugs related to this ping:**
+
+- <https://bugzilla.mozilla.org/show_bug.cgi?id=1291053>
+
+The following metrics are added to the ping:
+
+| Name | Type | Description | Data reviews | Extras | Expiration | [Data Sensitivity](https://wiki.mozilla.org/Firefox/Data_Collection) |
+| --- | --- | --- | --- | --- | --- | --- |
+| mach.argv |[string_list](https://mozilla.github.io/glean/book/user/metrics/string_list.html) |Parameters provided to mach. Absolute paths are sanitized to be relative to one of a few key base paths, such as the "$topsrcdir", "$topobjdir", or "$HOME". For example: "/home/mozilla/dev/firefox/python/mozbuild" would be replaced with "$topsrcdir/python/mozbuild". If a valid replacement base path cannot be found, the path is replaced with "<path omitted>". |[1](https://bugzilla.mozilla.org/show_bug.cgi?id=1291053#c34)||never | |
+| mach.command |[string](https://mozilla.github.io/glean/book/user/metrics/string.html) |The name of the mach command that was invoked, such as "build", "doc", or "try". |[1](https://bugzilla.mozilla.org/show_bug.cgi?id=1291053#c34)||never | |
+| mach.duration |[timespan](https://mozilla.github.io/glean/book/user/metrics/timespan.html) |How long it took for the command to complete. |[1](https://bugzilla.mozilla.org/show_bug.cgi?id=1291053#c34)||never | |
+| mach.success |[boolean](https://mozilla.github.io/glean/book/user/metrics/boolean.html) |True if the mach invocation succeeded. |[1](https://bugzilla.mozilla.org/show_bug.cgi?id=1291053#c34)||never | |
+| mach.system.cpu_brand |[string](https://mozilla.github.io/glean/book/user/metrics/string.html) |CPU brand string from CPUID. |[1](https://bugzilla.mozilla.org/show_bug.cgi?id=1291053#c34)||never | |
+| mach.system.distro |[string](https://mozilla.github.io/glean/book/user/metrics/string.html) |The name of the operating system distribution. |[1](https://bugzilla.mozilla.org/show_bug.cgi?id=1655845#c3)||never | |
+| mach.system.distro_version |[string](https://mozilla.github.io/glean/book/user/metrics/string.html) |The high-level OS version. |[1](https://bugzilla.mozilla.org/show_bug.cgi?id=1655845#c3)||never | |
+| mach.system.logical_cores |[counter](https://mozilla.github.io/glean/book/user/metrics/counter.html) |Number of logical CPU cores present. |[1](https://bugzilla.mozilla.org/show_bug.cgi?id=1291053#c34)||never | |
+| mach.system.memory |[memory_distribution](https://mozilla.github.io/glean/book/user/metrics/memory_distribution.html) |Amount of system memory. |[1](https://bugzilla.mozilla.org/show_bug.cgi?id=1291053#c34)||never | |
+| mach.system.physical_cores |[counter](https://mozilla.github.io/glean/book/user/metrics/counter.html) |Number of physical CPU cores present. |[1](https://bugzilla.mozilla.org/show_bug.cgi?id=1291053#c34)||never | |
+| mozbuild.artifact |[boolean](https://mozilla.github.io/glean/book/user/metrics/boolean.html) |True if `--enable-artifact-builds`. |[1](https://bugzilla.mozilla.org/show_bug.cgi?id=1291053#c34)||never | |
+| mozbuild.ccache |[boolean](https://mozilla.github.io/glean/book/user/metrics/boolean.html) |True if `--with-ccache`. |[1](https://bugzilla.mozilla.org/show_bug.cgi?id=1291053#c34)||never | |
+| mozbuild.clobber |[boolean](https://mozilla.github.io/glean/book/user/metrics/boolean.html) |True if the build was a clobber/full build. |[1](https://bugzilla.mozilla.org/show_bug.cgi?id=1526072#c15)||never | |
+| mozbuild.compiler |[string](https://mozilla.github.io/glean/book/user/metrics/string.html) |The compiler type in use (CC_TYPE), such as "clang" or "gcc". |[1](https://bugzilla.mozilla.org/show_bug.cgi?id=1291053#c34)||never | |
+| mozbuild.debug |[boolean](https://mozilla.github.io/glean/book/user/metrics/boolean.html) |True if `--enable-debug`. |[1](https://bugzilla.mozilla.org/show_bug.cgi?id=1291053#c34)||never | |
+| mozbuild.icecream |[boolean](https://mozilla.github.io/glean/book/user/metrics/boolean.html) |True if icecream in use. |[1](https://bugzilla.mozilla.org/show_bug.cgi?id=1291053#c34)||never | |
+| mozbuild.opt |[boolean](https://mozilla.github.io/glean/book/user/metrics/boolean.html) |True if `--enable-optimize`. |[1](https://bugzilla.mozilla.org/show_bug.cgi?id=1291053#c34)||never | |
+| mozbuild.project |[string](https://mozilla.github.io/glean/book/user/metrics/string.html) |The project being built. |[1](https://bugzilla.mozilla.org/show_bug.cgi?id=1654084#c2)||never | |
+| mozbuild.sccache |[boolean](https://mozilla.github.io/glean/book/user/metrics/boolean.html) |True if ccache in use is sccache. |[1](https://bugzilla.mozilla.org/show_bug.cgi?id=1291053#c34)||never | |
+
+
+Data categories are [defined here](https://wiki.mozilla.org/Firefox/Data_Collection).
+
+<!-- AUTOGENERATED BY glean_parser. DO NOT EDIT. -->
diff --git a/python/mach/docs/settings.rst b/python/mach/docs/settings.rst
new file mode 100644
index 0000000000..4daba37472
--- /dev/null
+++ b/python/mach/docs/settings.rst
@@ -0,0 +1,138 @@
+.. _mach_settings:
+
+========
+Settings
+========
+
+Mach can read settings in from a set of configuration files. These
+configuration files are either named ``machrc`` or ``.machrc`` and
+are specified by the bootstrap script. In mozilla-central, these files
+can live in ``~/.mozbuild`` and/or ``topsrcdir``.
+
+Settings can be specified anywhere, and used both by mach core or
+individual commands.
+
+
+Core Settings
+=============
+
+These settings are implemented by mach core.
+
+* alias - Create a command alias. This is useful if you want to alias a command to something else, optionally including some defaults. It can either be used to create an entire new command, or provide defaults for an existing one. For example:
+
+.. parsed-literal::
+
+ [alias]
+ mochitest = mochitest -f browser
+ browser-test = mochitest -f browser
+
+
+Defining Settings
+=================
+
+Settings need to be explicitly defined, along with their type,
+otherwise mach will throw when trying to access them.
+
+To define settings, use the :func:`~decorators.SettingsProvider`
+decorator in an existing mach command module. E.g:
+
+.. code-block:: python
+
+ from mach.decorators import SettingsProvider
+ from mozbuild.base import MachCommandBase
+
+ @SettingsProvider
+ class ArbitraryClassName(MachCommandBase):
+ config_settings = [
+ ('foo.bar', 'string', "A helpful description"),
+ ('foo.baz', 'int', "Another description", 0, {'choices': set([0,1,2])}),
+ ]
+
+``@SettingsProvider``'s must specify a variable called ``config_settings``
+that returns a list of tuples. Alternatively, it can specify a function
+called ``config_settings`` that returns a list of tuples.
+
+Each tuple is of the form:
+
+.. code-block:: python
+
+ ('<section>.<option>', '<type>', '<description>', default, extra)
+
+``type`` is a string and can be one of:
+string, boolean, int, pos_int, path
+
+``description`` is a string explaining how to define the settings and
+where they get used. Descriptions should ideally be multi-line paragraphs
+where the first line acts as a short description.
+
+``default`` is optional, and provides a default value in case none was
+specified by any of the configuration files.
+
+``extra`` is also optional and is a dict containing additional key/value
+pairs to add to the setting's metadata. The following keys may be specified
+in the ``extra`` dict:
+
+ * ``choices`` - A set of allowed values for the setting.
+
+Wildcards
+---------
+
+Sometimes a section should allow arbitrarily defined options from the user, such
+as the ``alias`` section mentioned above. To define a section like this, use ``*``
+as the option name. For example:
+
+.. parsed-literal::
+
+ ('foo.*', 'string', 'desc')
+
+This allows configuration files like this:
+
+.. parsed-literal::
+
+ [foo]
+ arbitrary1 = some string
+ arbitrary2 = some other string
+
+
+Finding Settings
+================
+
+You can see which settings are available as well as their description and
+expected values by running:
+
+.. parsed-literal::
+
+ ./mach settings # or
+ ./mach settings --list
+
+
+Accessing Settings
+==================
+
+Now that the settings are defined and documented, they're accessible from
+individual mach commands from the mach command context.
+For example:
+
+.. code-block:: python
+
+ from mach.decorators import (
+ Command,
+ SettingsProvider,
+ )
+ from mozbuild.base import MachCommandBase
+
+ @SettingsProvider
+ class ExampleSettings(object):
+ config_settings = [
+ ('a.b', 'string', 'desc', 'default'),
+ ('foo.bar', 'string', 'desc',),
+ ('foo.baz', 'int', 'desc', 0, {'choices': set([0,1,2])}),
+ ]
+
+ @Command('command', category='misc',
+ description='Prints a setting')
+ def command(command_context):
+ settings = command_context._mach_context.settings
+ print(settings.a.b)
+ for option in settings.foo:
+ print(settings.foo[option])
diff --git a/python/mach/docs/telemetry.rst b/python/mach/docs/telemetry.rst
new file mode 100644
index 0000000000..2d185a970e
--- /dev/null
+++ b/python/mach/docs/telemetry.rst
@@ -0,0 +1,37 @@
+.. _mach_telemetry:
+
+==============
+Mach Telemetry
+==============
+
+`Glean <https://mozilla.github.io/glean/>`_ is used to collect telemetry, and uses the metrics
+defined in the ``metrics.yaml`` files in-tree.
+These files are all documented in a single :ref:`generated file here<metrics>`.
+
+.. toctree::
+ :maxdepth: 1
+
+ metrics
+
+Adding Metrics to a new Command
+===============================
+
+If you would like to submit telemetry metrics from your mach ``@Command``, you should take two steps:
+
+#. Parameterize your ``@Command`` annotation with ``metrics_path``.
+#. Use the ``command_context.metrics`` handle provided by ``MachCommandBase``
+
+For example::
+
+ METRICS_PATH = os.path.abspath(os.path.join(__file__, '..', '..', 'metrics.yaml'))
+
+ @Command('custom-command', metrics_path=METRICS_PATH)
+ def custom_command(command_context):
+ command_context.metrics.custom.foo.set('bar')
+
+Updating Generated Metrics Docs
+===============================
+
+When a ``metrics.yaml`` is added/changed/removed, :ref:`the metrics document<metrics>` will need to be updated::
+
+ ./mach doc mach-telemetry
diff --git a/python/mach/docs/usage.rst b/python/mach/docs/usage.rst
new file mode 100644
index 0000000000..a32b35395c
--- /dev/null
+++ b/python/mach/docs/usage.rst
@@ -0,0 +1,150 @@
+.. _mach_usage:
+
+==========
+User Guide
+==========
+
+Mach is the central entry point for most operations that can be performed in
+mozilla-central.
+
+
+Command Help
+------------
+
+To see an overview of all the available commands, run:
+
+.. code-block:: shell
+
+ $ ./mach help
+
+For more detailed information on a specific command, run:
+
+.. code-block:: shell
+
+ $ ./mach help <command>
+
+If a command has subcommands listed, you can see more details on the subcommand
+by running:
+
+.. code-block:: shell
+
+ $ ./mach help <command> <subcommand>
+
+Alternatively, you can pass ``-h/--help``. For example, all of the
+following are valid:
+
+.. code-block:: shell
+
+ $ ./mach help try
+ $ ./mach help try fuzzy
+ $ ./mach try -h
+ $ ./mach try fuzzy --help
+
+
+Tab Completion
+--------------
+
+There are commands built-in to ``mach`` that can generate a fast tab completion
+script for various shells. Supported shells are currently ``bash``, ``zsh`` and
+``fish``. These generated scripts will slowly become out of date over time, so
+you may want to create a cron task to periodically re-generate them.
+
+See below for installation instructions:
+
+Bash
+~~~~
+
+.. code-block:: shell
+
+ $ mach mach-completion bash -f _mach
+ $ sudo mv _mach /etc/bash_completion.d
+
+Bash (homebrew)
+~~~~~~~~~~~~~~~
+
+.. code-block:: shell
+
+ $ mach mach-completion bash -f $(brew --prefix)/etc/bash_completion.d/mach.bash-completion
+
+Zsh
+~~~
+
+.. code-block:: shell
+
+ $ mkdir ~/.zfunc
+ $ mach mach-completion zsh -f ~/.zfunc/_mach
+
+then edit ~/.zshrc and add:
+
+.. code-block:: shell
+
+ fpath+=~/.zfunc
+ autoload -U compinit && compinit
+
+You can use any directory of your choosing.
+
+Zsh (oh-my-zsh)
+~~~~~~~~~~~~~~~
+
+.. code-block:: shell
+
+ $ mkdir $ZSH/plugins/mach
+ $ mach mach-completion zsh -f $ZSH/plugins/mach/_mach
+
+then edit ~/.zshrc and add 'mach' to your enabled plugins:
+
+.. code-block:: shell
+
+ plugins(mach ...)
+
+Zsh (prezto)
+~~~~~~~~~~~~
+
+.. code-block:: shell
+
+ $ mach mach-completion zsh -f ~/.zprezto/modules/completion/external/src/_mach
+
+Fish
+~~~~
+
+.. code-block:: shell
+
+ $ ./mach mach-completion fish -f ~/.config/fish/completions/mach.fish
+
+Fish (homebrew)
+~~~~~~~~~~~~~~~
+
+.. code-block:: shell
+
+ $ ./mach mach-completion fish -f (brew --prefix)/share/fish/vendor_completions.d/mach.fish
+
+
+User Settings
+-------------
+
+Some mach commands can read configuration from a ``machrc`` file. The default
+location for this file is ``~/.mozbuild/machrc`` (you'll need to create it).
+This can also be set to a different location by setting the ``MACHRC``
+environment variable.
+
+For a list of all the available settings, run:
+
+.. code-block:: shell
+
+ $ ./mach settings
+
+The settings file follows the ``ini`` format, e.g:
+
+.. code-block:: ini
+
+ [alias]
+ eslint = lint -l eslint
+
+ [build]
+ telemetry = true
+
+ [try]
+ default = fuzzy
+
+
+.. _bash completion: https://searchfox.org/mozilla-central/source/python/mach/bash-completion.sh
diff --git a/python/mach/docs/windows-usage-outside-mozillabuild.rst b/python/mach/docs/windows-usage-outside-mozillabuild.rst
new file mode 100644
index 0000000000..6a034fd384
--- /dev/null
+++ b/python/mach/docs/windows-usage-outside-mozillabuild.rst
@@ -0,0 +1,124 @@
+==========================================
+Using Mach on Windows Outside MozillaBuild
+==========================================
+
+.. note::
+
+ These docs still require that you've followed the :ref:`Building Firefox On Windows` guide.
+
+`MozillaBuild <https://wiki.mozilla.org/MozillaBuild>`__ is required to build
+Firefox on Windows, because it provides necessary unix-y tools such as ``sh`` and ``awk``.
+
+Traditionally, to interact with Mach and the Firefox Build System, Windows
+developers would have to do so from within the MozillaBuild shell. This could be
+disadvantageous for two main reasons:
+
+1. The MozillaBuild environment is unix-y and based on ``bash``, which may be unfamiliar
+ for developers used to the Windows Command Prompt or Powershell.
+2. There have been long-standing stability issues with MozillaBuild - this is due to
+ the fragile interface point between the underlying "MSYS" tools and "native Windows"
+ binaries.
+
+It is now (experimentally!) possible to invoke Mach directly from other command line
+environments, such as Powershell, Command Prompt, or even a developer-managed MSYS2
+environment. Windows Terminal should work as well, for those on the "cutting edge".
+
+.. note::
+
+ If you're using a Cygwin-based environment such as MSYS2, it'll probably be
+ best to use the Windows-native version of Python (as described below) instead of a Python
+ distribution provided by the environment's package manager. Otherwise you'll likely run into
+ compatibility issues:
+
+ * Cygwin/MSYS Python will run into compatibility issues with Mach due to its unexpected Unix-y
+ conventions despite Mach assuming it's on a "Windows" platform. Additionally, there may
+ be performance issues.
+ * MinGW Python will encounter issues building native packages because they'll expect the
+ MSVC toolchain.
+
+.. warning::
+
+ This is only recommended for more advanced Windows developers: this work is experimental
+ and may run into unexpected failures!
+
+Following are steps for preparing Windows-native (Command Prompt/Powershell) usage of Mach:
+
+1. Install Python
+~~~~~~~~~~~~~~~~~
+
+Download Python from the `the official website <https://www.python.org/downloads/windows/>`__.
+
+.. note::
+
+ To avoid Mach compatibility issues with recent Python releases, it's recommended to install
+ the 2nd-most recent "major version". For example, at time of writing, the current modern Python
+ version is 3.10.1, so a safe version to install would be the most recent 3.9 release.
+
+You'll want to download the "Windows installer (64-bit)" associated with the release you've chosen.
+During installation, ensure that you check the "Add Python 3.x to PATH" option, otherwise you might
+`encounter issues running Mercurial <https://bz.mercurial-scm.org/show_bug.cgi?id=6635>`__.
+
+.. note::
+
+ Due to issues with Python DLL import failures with pip-installed binaries, it's not
+ recommended to use the Windows Store release of Python.
+
+2. Modify your PATH
+~~~~~~~~~~~~~~~~~~~
+
+The Python "user site-packages directory" needs to be added to your ``PATH`` so that packages
+installed via ``pip install --user`` (such as ``hg``) can be invoked from the command-line.
+
+1. From the Start menu, go to the Control Panel entry for "Edit environment variables
+ for your account".
+2. Double-click the ``Path`` row in the top list of variables. Click "New" to add a new item to
+ the list.
+3. In a Command Prompt window, resolve the Python directory with the command
+ ``python -c "import site; import os; print(os.path.abspath(os.path.join(site.getusersitepackages(), '..', 'Scripts')))"``.
+4. Paste the output into the new item entry in the "Edit environment variable" window.
+5. Click "New" again, and add the ``bin`` folder of MozillaBuild: probably ``C:\mozilla-build\bin``.
+6. Click "OK".
+
+3. Install Version Control System
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+If you're using Mercurial, you'll need to install it to your Windows-native Python:
+
+.. code-block:: shell
+
+ pip3 install --user mercurial windows-curses
+
+If you're using Git with Cinnabar, follow its `setup instructions <https://github.com/glandium/git-cinnabar#setup>`__.
+
+4. Set Powershell Execution Policy
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+If you're using Powershell, Windows will raise an error by default when you try to invoke
+``.\mach.ps1``:
+
+.. code::
+
+ .\mach : File <topsrcdir>\mach.ps1 cannot be loaded because running scripts is disabled on this system. For
+ more information, see about_Execution_Policies at https:/go.microsoft.com/fwlink/?LinkID=135170.
+ At line:1 char:1
+
+To work around this:
+
+1. From the Start menu, type in "Powershell", then right-click on the best match and click
+ "Run as administrator"
+2. Run the command ``Set-ExecutionPolicy RemoteSigned``
+3. Close the Administrator Powershell window, and open a regular Powershell window
+4. Go to your Firefox checkout (likely ``C:\mozilla-source\mozilla-unified``)
+5. Test the new execution policy by running ``.\mach bootstrap``. If it doesn't immediately fail
+ with the error about "Execution Policies", then the problem is resolved.
+
+Success!
+~~~~~~~~
+
+At this point, you should be able to invoke Mach and manage your version control system outside
+of MozillaBuild.
+
+.. tip::
+
+ `See here <https://crisal.io/words/2022/11/22/msys2-firefox-development.html>`__ for a detailed guide on
+ installing and customizing a development environment with MSYS2, zsh, and Windows Terminal.
diff --git a/python/mach/mach/__init__.py b/python/mach/mach/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mach/mach/__init__.py
diff --git a/python/mach/mach/base.py b/python/mach/mach/base.py
new file mode 100644
index 0000000000..fac17e9b03
--- /dev/null
+++ b/python/mach/mach/base.py
@@ -0,0 +1,73 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+class CommandContext(object):
+ """Holds run-time state so it can easily be passed to command providers."""
+
+ def __init__(
+ self, cwd: str, settings=None, log_manager=None, commands=None, **kwargs
+ ):
+ self.cwd = cwd
+ self.settings = settings
+ self.log_manager = log_manager
+ self.commands = commands
+ self.is_interactive = None # Filled in after args are parsed
+ self.telemetry = None # Filled in after args are parsed
+ self.command_attrs = {}
+
+ for k, v in kwargs.items():
+ setattr(self, k, v)
+
+
+class MachError(Exception):
+ """Base class for all errors raised by mach itself."""
+
+
+class NoCommandError(MachError):
+ """No command was passed into mach."""
+
+ def __init__(self, namespace):
+ MachError.__init__(self)
+ self.namespace = namespace
+
+
+class UnknownCommandError(MachError):
+ """Raised when we attempted to execute an unknown command."""
+
+ def __init__(self, command, verb, suggested_commands=None):
+ MachError.__init__(self)
+
+ self.command = command
+ self.verb = verb
+ self.suggested_commands = suggested_commands or []
+
+
+class UnrecognizedArgumentError(MachError):
+ """Raised when an unknown argument is passed to mach."""
+
+ def __init__(self, command, arguments):
+ MachError.__init__(self)
+
+ self.command = command
+ self.arguments = arguments
+
+
+class FailedCommandError(Exception):
+ """Raised by commands to signal a handled failure to be printed by mach
+
+ When caught by mach a FailedCommandError will print message and exit
+ with ''exit_code''. The optional ''reason'' is a string in cases where
+ other scripts may wish to handle the exception, though this is generally
+ intended to communicate failure to mach.
+ """
+
+ def __init__(self, message, exit_code=1, reason=""):
+ Exception.__init__(self, message)
+ self.exit_code = exit_code
+ self.reason = reason
+
+
+class MissingFileError(MachError):
+ """Attempted to load a mach commands file that doesn't exist."""
diff --git a/python/mach/mach/commands/__init__.py b/python/mach/mach/commands/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mach/mach/commands/__init__.py
diff --git a/python/mach/mach/commands/commandinfo.py b/python/mach/mach/commands/commandinfo.py
new file mode 100644
index 0000000000..12c4b240ea
--- /dev/null
+++ b/python/mach/mach/commands/commandinfo.py
@@ -0,0 +1,487 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, # You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import argparse
+import re
+import subprocess
+import sys
+from itertools import chain
+from pathlib import Path
+
+import attr
+from mozbuild.util import memoize
+
+from mach.decorators import Command, CommandArgument, SubCommand
+
+COMPLETION_TEMPLATES_DIR = Path(__file__).resolve().parent / "completion_templates"
+
+
+@attr.s
+class CommandInfo(object):
+ name = attr.ib(type=str)
+ description = attr.ib(type=str)
+ subcommands = attr.ib(type=list)
+ options = attr.ib(type=dict)
+ subcommand = attr.ib(type=str, default=None)
+
+
+def render_template(shell, context):
+ filename = "{}.template".format(shell)
+ with open(COMPLETION_TEMPLATES_DIR / filename) as fh:
+ template = fh.read()
+ return template % context
+
+
+@memoize
+def command_handlers(command_context):
+ """A dictionary of command handlers keyed by command name."""
+ return command_context._mach_context.commands.command_handlers
+
+
+@memoize
+def commands(command_context):
+ """A sorted list of all command names."""
+ return sorted(command_handlers(command_context))
+
+
+def _get_parser_options(parser):
+ options = {}
+ for action in parser._actions:
+ # ignore positional args
+ if not action.option_strings:
+ continue
+
+ # ignore suppressed args
+ if action.help == argparse.SUPPRESS:
+ continue
+
+ options[tuple(action.option_strings)] = action.help or ""
+ return options
+
+
+@memoize
+def global_options(command_context):
+ """Return a dict of global options.
+
+ Of the form `{("-o", "--option"): "description"}`.
+ """
+ for group in command_context._mach_context.global_parser._action_groups:
+ if group.title == "Global Arguments":
+ return _get_parser_options(group)
+
+
+@memoize
+def _get_handler_options(handler):
+ """Return a dict of options for the given handler.
+
+ Of the form `{("-o", "--option"): "description"}`.
+ """
+ options = {}
+ for option_strings, val in handler.arguments:
+ # ignore positional args
+ if option_strings[0][0] != "-":
+ continue
+
+ options[tuple(option_strings)] = val.get("help", "")
+
+ if handler._parser:
+ options.update(_get_parser_options(handler.parser))
+
+ return options
+
+
+def _get_handler_info(handler):
+ try:
+ options = _get_handler_options(handler)
+ except (Exception, SystemExit):
+ # We don't want misbehaving commands to break tab completion,
+ # ignore any exceptions.
+ options = {}
+
+ subcommands = []
+ for sub in sorted(handler.subcommand_handlers):
+ subcommands.append(_get_handler_info(handler.subcommand_handlers[sub]))
+
+ return CommandInfo(
+ name=handler.name,
+ description=handler.description or "",
+ options=options,
+ subcommands=subcommands,
+ subcommand=handler.subcommand,
+ )
+
+
+@memoize
+def commands_info(command_context):
+ """Return a list of CommandInfo objects for each command."""
+ commands_info = []
+ # Loop over self.commands() rather than self.command_handlers().items() for
+ # alphabetical order.
+ for c in commands(command_context):
+ commands_info.append(_get_handler_info(command_handlers(command_context)[c]))
+ return commands_info
+
+
+@Command("mach-commands", category="misc", description="List all mach commands.")
+def run_commands(command_context):
+ print("\n".join(commands(command_context)))
+
+
+@Command(
+ "mach-debug-commands",
+ category="misc",
+ description="Show info about available mach commands.",
+)
+@CommandArgument(
+ "match",
+ metavar="MATCH",
+ default=None,
+ nargs="?",
+ help="Only display commands containing given substring.",
+)
+def run_debug_commands(command_context, match=None):
+ import inspect
+
+ for command, handler in command_handlers(command_context).items():
+ if match and match not in command:
+ continue
+
+ func = handler.func
+
+ print(command)
+ print("=" * len(command))
+ print("")
+ print("File: %s" % inspect.getsourcefile(func))
+ print("Function: %s" % func.__name__)
+ print("")
+
+
+@Command(
+ "mach-completion",
+ category="misc",
+ description="Prints a list of completion strings for the specified command.",
+)
+@CommandArgument(
+ "args", default=None, nargs=argparse.REMAINDER, help="Command to complete."
+)
+def run_completion(command_context, args):
+ if not args:
+ print("\n".join(commands(command_context)))
+ return
+
+ is_help = "help" in args
+ command = None
+ for i, arg in enumerate(args):
+ if arg in commands(command_context):
+ command = arg
+ args = args[i + 1 :]
+ break
+
+ # If no command is typed yet, just offer the commands.
+ if not command:
+ print("\n".join(commands(command_context)))
+ return
+
+ handler = command_handlers(command_context)[command]
+ # If a subcommand was typed, update the handler.
+ for arg in args:
+ if arg in handler.subcommand_handlers:
+ handler = handler.subcommand_handlers[arg]
+ break
+
+ targets = sorted(handler.subcommand_handlers.keys())
+ if is_help:
+ print("\n".join(targets))
+ return
+
+ targets.append("help")
+ targets.extend(chain(*_get_handler_options(handler).keys()))
+ print("\n".join(targets))
+
+
+def _zsh_describe(value, description=None):
+ value = '"' + value.replace(":", "\\:")
+ if description:
+ description = subprocess.list2cmdline(
+ [re.sub(r'(["\'#&;`|*?~<>^()\[\]{}$\\\x0A\xFF])', r"\\\1", description)]
+ ).lstrip('"')
+
+ if description.endswith('"') and not description.endswith(r"\""):
+ description = description[:-1]
+
+ value += ":{}".format(description)
+
+ value += '"'
+
+ return value
+
+
+@SubCommand(
+ "mach-completion",
+ "bash",
+ description="Print mach completion script for bash shell",
+)
+@CommandArgument(
+ "-f",
+ "--file",
+ dest="outfile",
+ default=None,
+ help="File path to save completion script.",
+)
+def completion_bash(command_context, outfile):
+ commands_subcommands = []
+ case_options = []
+ case_subcommands = []
+ for i, cmd in enumerate(commands_info(command_context)):
+ # Build case statement for options.
+ options = []
+ for opt_strs, description in cmd.options.items():
+ for opt in opt_strs:
+ options.append(_zsh_describe(opt, None).strip('"'))
+
+ if options:
+ case_options.append(
+ "\n".join(
+ [
+ " ({})".format(cmd.name),
+ ' opts="${{opts}} {}"'.format(" ".join(options)),
+ " ;;",
+ "",
+ ]
+ )
+ )
+
+ # Build case statement for subcommand options.
+ for sub in cmd.subcommands:
+ options = []
+ for opt_strs, description in sub.options.items():
+ for opt in opt_strs:
+ options.append(_zsh_describe(opt, None))
+
+ if options:
+ case_options.append(
+ "\n".join(
+ [
+ ' ("{} {}")'.format(sub.name, sub.subcommand),
+ ' opts="${{opts}} {}"'.format(" ".join(options)),
+ " ;;",
+ "",
+ ]
+ )
+ )
+
+ # Build case statement for subcommands.
+ subcommands = [_zsh_describe(s.subcommand, None) for s in cmd.subcommands]
+ if subcommands:
+ commands_subcommands.append(
+ '[{}]=" {} "'.format(
+ cmd.name, " ".join([h.subcommand for h in cmd.subcommands])
+ )
+ )
+
+ case_subcommands.append(
+ "\n".join(
+ [
+ " ({})".format(cmd.name),
+ ' subs="${{subs}} {}"'.format(" ".join(subcommands)),
+ " ;;",
+ "",
+ ]
+ )
+ )
+
+ globalopts = [
+ opt for opt_strs in global_options(command_context) for opt in opt_strs
+ ]
+ context = {
+ "case_options": "\n".join(case_options),
+ "case_subcommands": "\n".join(case_subcommands),
+ "commands": " ".join(commands(command_context)),
+ "commands_subcommands": " ".join(sorted(commands_subcommands)),
+ "globalopts": " ".join(sorted(globalopts)),
+ }
+
+ outfile = open(outfile, "w") if outfile else sys.stdout
+ print(render_template("bash", context), file=outfile)
+
+
+@SubCommand(
+ "mach-completion",
+ "zsh",
+ description="Print mach completion script for zsh shell",
+)
+@CommandArgument(
+ "-f",
+ "--file",
+ dest="outfile",
+ default=None,
+ help="File path to save completion script.",
+)
+def completion_zsh(command_context, outfile):
+ commands_descriptions = []
+ commands_subcommands = []
+ case_options = []
+ case_subcommands = []
+ for i, cmd in enumerate(commands_info(command_context)):
+ commands_descriptions.append(_zsh_describe(cmd.name, cmd.description))
+
+ # Build case statement for options.
+ options = []
+ for opt_strs, description in cmd.options.items():
+ for opt in opt_strs:
+ options.append(_zsh_describe(opt, description))
+
+ if options:
+ case_options.append(
+ "\n".join(
+ [
+ " ({})".format(cmd.name),
+ " opts+=({})".format(" ".join(options)),
+ " ;;",
+ "",
+ ]
+ )
+ )
+
+ # Build case statement for subcommand options.
+ for sub in cmd.subcommands:
+ options = []
+ for opt_strs, description in sub.options.items():
+ for opt in opt_strs:
+ options.append(_zsh_describe(opt, description))
+
+ if options:
+ case_options.append(
+ "\n".join(
+ [
+ " ({} {})".format(sub.name, sub.subcommand),
+ " opts+=({})".format(" ".join(options)),
+ " ;;",
+ "",
+ ]
+ )
+ )
+
+ # Build case statement for subcommands.
+ subcommands = [
+ _zsh_describe(s.subcommand, s.description) for s in cmd.subcommands
+ ]
+ if subcommands:
+ commands_subcommands.append(
+ '[{}]=" {} "'.format(
+ cmd.name, " ".join([h.subcommand for h in cmd.subcommands])
+ )
+ )
+
+ case_subcommands.append(
+ "\n".join(
+ [
+ " ({})".format(cmd.name),
+ " subs+=({})".format(" ".join(subcommands)),
+ " ;;",
+ "",
+ ]
+ )
+ )
+
+ globalopts = []
+ for opt_strings, description in global_options(command_context).items():
+ for opt in opt_strings:
+ globalopts.append(_zsh_describe(opt, description))
+
+ context = {
+ "case_options": "\n".join(case_options),
+ "case_subcommands": "\n".join(case_subcommands),
+ "commands": " ".join(sorted(commands_descriptions)),
+ "commands_subcommands": " ".join(sorted(commands_subcommands)),
+ "globalopts": " ".join(sorted(globalopts)),
+ }
+
+ outfile = open(outfile, "w") if outfile else sys.stdout
+ print(render_template("zsh", context), file=outfile)
+
+
+@SubCommand(
+ "mach-completion",
+ "fish",
+ description="Print mach completion script for fish shell",
+)
+@CommandArgument(
+ "-f",
+ "--file",
+ dest="outfile",
+ default=None,
+ help="File path to save completion script.",
+)
+def completion_fish(command_context, outfile):
+ def _append_opt_strs(comp, opt_strs):
+ for opt in opt_strs:
+ if opt.startswith("--"):
+ comp += " -l {}".format(opt[2:])
+ elif opt.startswith("-"):
+ comp += " -s {}".format(opt[1:])
+ return comp
+
+ globalopts = []
+ for opt_strs, description in global_options(command_context).items():
+ comp = (
+ "complete -c mach -n '__fish_mach_complete_no_command' "
+ "-d '{}'".format(description.replace("'", "\\'"))
+ )
+ comp = _append_opt_strs(comp, opt_strs)
+ globalopts.append(comp)
+
+ cmds = []
+ cmds_opts = []
+ for i, cmd in enumerate(commands_info(command_context)):
+ cmds.append(
+ "complete -c mach -f -n '__fish_mach_complete_no_command' "
+ "-a {} -d '{}'".format(cmd.name, cmd.description.replace("'", "\\'"))
+ )
+
+ cmds_opts += ["# {}".format(cmd.name)]
+
+ subcommands = " ".join([s.subcommand for s in cmd.subcommands])
+ for opt_strs, description in cmd.options.items():
+ comp = (
+ "complete -c mach -A -n '__fish_mach_complete_command {} {}' "
+ "-d '{}'".format(cmd.name, subcommands, description.replace("'", "\\'"))
+ )
+ comp = _append_opt_strs(comp, opt_strs)
+ cmds_opts.append(comp)
+
+ for sub in cmd.subcommands:
+
+ for opt_strs, description in sub.options.items():
+ comp = (
+ "complete -c mach -A -n '__fish_mach_complete_subcommand {} {}' "
+ "-d '{}'".format(
+ sub.name, sub.subcommand, description.replace("'", "\\'")
+ )
+ )
+ comp = _append_opt_strs(comp, opt_strs)
+ cmds_opts.append(comp)
+
+ description = sub.description or ""
+ description = description.replace("'", "\\'")
+ comp = (
+ "complete -c mach -A -n '__fish_mach_complete_command {} {}' "
+ "-d '{}' -a {}".format(
+ cmd.name, subcommands, description, sub.subcommand
+ )
+ )
+ cmds_opts.append(comp)
+
+ if i < len(commands(command_context)) - 1:
+ cmds_opts.append("")
+
+ context = {
+ "commands": " ".join(commands(command_context)),
+ "command_completions": "\n".join(cmds),
+ "command_option_completions": "\n".join(cmds_opts),
+ "global_option_completions": "\n".join(globalopts),
+ }
+
+ outfile = open(outfile, "w") if outfile else sys.stdout
+ print(render_template("fish", context), file=outfile)
diff --git a/python/mach/mach/commands/completion_templates/bash.template b/python/mach/mach/commands/completion_templates/bash.template
new file mode 100644
index 0000000000..5372308702
--- /dev/null
+++ b/python/mach/mach/commands/completion_templates/bash.template
@@ -0,0 +1,62 @@
+_mach_complete()
+{
+ local com coms comsubs cur opts script sub subs
+ COMPREPLY=()
+ declare -A comsubs=( %(commands_subcommands)s )
+
+ _get_comp_words_by_ref -n : cur words
+ # for an alias, get the real script behind it
+ if [[ $(type -t ${words[0]}) == "alias" ]]; then
+ script=$(alias ${words[0]} | sed -E "s/alias ${words[0]}='(.*)'/\\1/")
+ else
+ script=${words[0]}
+ fi
+ # lookup for command and subcommand
+ for word in ${words[@]:1}; do
+ if [[ $word == -* ]]; then
+ continue
+ fi
+
+ if [[ -z $com ]]; then
+ com=$word
+ elif [[ "${comsubs[$com]}" == *" $word "* ]]; then
+ sub=$word
+ break
+ fi
+ done
+ # completing for an option
+ if [[ ${cur} == -* ]] ; then
+ if [[ -n $com ]]; then
+ if [[ -n $sub ]]; then
+ optkey="$com $sub"
+ else
+ optkey="$com"
+ fi
+ case $optkey in
+%(case_options)s
+ esac
+ else
+ # no command, complete global options
+ opts="%(globalopts)s"
+ fi
+ COMPREPLY=($(compgen -W "${opts}" -- ${cur}))
+ __ltrim_colon_completions "$cur"
+ return 0;
+ # completing for a command
+ elif [[ $cur == $com ]]; then
+ coms="%(commands)s"
+ COMPREPLY=($(compgen -W "${coms}" -- ${cur}))
+ __ltrim_colon_completions "$cur"
+ return 0
+ else
+ if [[ -z $sub ]]; then
+ case "$com" in
+%(case_subcommands)s
+ esac
+ COMPREPLY=($(compgen -W "${subs}" -- ${cur}))
+ __ltrim_colon_completions "$cur"
+ fi
+ return 0
+ fi
+}
+complete -o default -F _mach_complete mach
diff --git a/python/mach/mach/commands/completion_templates/fish.template b/python/mach/mach/commands/completion_templates/fish.template
new file mode 100644
index 0000000000..8373ee4080
--- /dev/null
+++ b/python/mach/mach/commands/completion_templates/fish.template
@@ -0,0 +1,64 @@
+function __fish_mach_complete_no_command
+ for i in (commandline -opc)
+ if contains -- $i %(commands)s
+ return 1
+ end
+ end
+ return 0
+end
+
+function __fish_mach_complete_command_matches
+ for i in (commandline -opc)
+ if contains -- $i %(commands)s
+ set com $i
+ break
+ end
+ end
+
+ if not set -q com
+ return 1
+ end
+
+ if test "$com" != "$argv"
+ return 1
+ end
+ return 0
+end
+
+function __fish_mach_complete_command
+ __fish_mach_complete_command_matches $argv[1]
+ if test $status -ne 0
+ return 1
+ end
+
+ # If a subcommand is already entered, don't complete, we should defer to
+ # '__fish_mach_complete_subcommand'.
+ for i in (commandline -opc)
+ if contains -- $i $argv[2..-1]
+ return 1
+ end
+ end
+ return 0
+end
+
+function __fish_mach_complete_subcommand
+ __fish_mach_complete_command_matches $argv[1]
+ if test $status -ne 0
+ return 1
+ end
+
+ # Command matches, now check for subcommand
+ for i in (commandline -opc)
+ if contains -- $i $argv[2]
+ return 0
+ end
+ end
+ return 1
+end
+
+# global options
+%(global_option_completions)s
+# commands
+%(command_completions)s
+# command options
+%(command_option_completions)s
diff --git a/python/mach/mach/commands/completion_templates/zsh.template b/python/mach/mach/commands/completion_templates/zsh.template
new file mode 100644
index 0000000000..21677841ef
--- /dev/null
+++ b/python/mach/mach/commands/completion_templates/zsh.template
@@ -0,0 +1,62 @@
+#compdef mach
+_mach_complete()
+{
+ local com coms comsubs cur optkey opts state sub subs
+ cur=${words[${#words[@]}]}
+ typeset -A comsubs
+ comsubs=( %(commands_subcommands)s )
+
+ # lookup for command and subcommand
+ for word in ${words[@]:1}; do
+ if [[ $word == -* ]]; then
+ continue
+ fi
+
+ if [[ -z $com ]]; then
+ com=$word
+ elif [[ ${comsubs[$com]} == *" $word "* ]]; then
+ sub=$word
+ break
+ fi
+ done
+
+ # check for a subcommand
+ if [[ $cur == $com ]]; then
+ state="command"
+ coms=(%(commands)s)
+ elif [[ ${cur} == -* ]]; then
+ state="option"
+ if [[ -z $com ]]; then
+ # no command, use global options
+ opts=(%(globalopts)s)
+ fi
+ fi
+ case $state in
+ (command)
+ _describe 'command' coms
+ ;;
+ (option)
+ if [[ -n $sub ]]; then
+ optkey="$com $sub"
+ else
+ optkey="$com"
+ fi
+ case $optkey in
+%(case_options)s
+ esac
+ _describe 'option' opts
+ ;;
+ *)
+ if [[ -z $sub ]]; then
+ # if we're completing a command with subcommands, add them here
+ case "$com" in
+%(case_subcommands)s
+ esac
+ _describe 'subcommand' subs
+ fi
+ # also fallback to file completion
+ _arguments '*:file:_files'
+ esac
+}
+_mach_complete "$@"
+compdef _mach_complete mach
diff --git a/python/mach/mach/commands/settings.py b/python/mach/mach/commands/settings.py
new file mode 100644
index 0000000000..8e168a3921
--- /dev/null
+++ b/python/mach/mach/commands/settings.py
@@ -0,0 +1,51 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from textwrap import TextWrapper
+
+from mach.config import TYPE_CLASSES
+from mach.decorators import Command, CommandArgument
+
+
+# Interact with settings for mach.
+
+# Currently, we only provide functionality to view what settings are
+# available. In the future, this module will be used to modify settings, help
+# people create configs via a wizard, etc.
+
+
+@Command("settings", category="devenv", description="Show available config settings.")
+@CommandArgument(
+ "-l",
+ "--list",
+ dest="short",
+ action="store_true",
+ help="Show settings in a concise list",
+)
+def run_settings(command_context, short=None):
+ """List available settings."""
+ types = {v: k for k, v in TYPE_CLASSES.items()}
+ wrapper = TextWrapper(initial_indent="# ", subsequent_indent="# ")
+ for i, section in enumerate(sorted(command_context._mach_context.settings)):
+ if not short:
+ print("%s[%s]" % ("" if i == 0 else "\n", section))
+
+ for option in sorted(command_context._mach_context.settings[section]._settings):
+ meta = command_context._mach_context.settings[section].get_meta(option)
+ desc = meta["description"]
+
+ if short:
+ print("%s.%s -- %s" % (section, option, desc.splitlines()[0]))
+ continue
+
+ if option == "*":
+ option = "<option>"
+
+ if "choices" in meta:
+ value = "{%s}" % ", ".join(meta["choices"])
+ else:
+ value = "<%s>" % types[meta["type_cls"]]
+
+ print(wrapper.fill(desc))
+ print(";%s=%s" % (option, value))
diff --git a/python/mach/mach/config.py b/python/mach/mach/config.py
new file mode 100644
index 0000000000..5428a9edad
--- /dev/null
+++ b/python/mach/mach/config.py
@@ -0,0 +1,415 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+r"""
+This file defines classes for representing config data/settings.
+
+Config data is modeled as key-value pairs. Keys are grouped together into named
+sections. Individual config settings (options) have metadata associated with
+them. This metadata includes type, default value, valid values, etc.
+
+The main interface to config data is the ConfigSettings class. 1 or more
+ConfigProvider classes are associated with ConfigSettings and define what
+settings are available.
+"""
+
+import collections
+import collections.abc
+import sys
+from functools import wraps
+from pathlib import Path
+from typing import List, Union
+
+import six
+from six import string_types
+from six.moves.configparser import NoSectionError, RawConfigParser
+
+
+class ConfigException(Exception):
+ pass
+
+
+class ConfigType(object):
+ """Abstract base class for config values."""
+
+ @staticmethod
+ def validate(value):
+ """Validates a Python value conforms to this type.
+
+ Raises a TypeError or ValueError if it doesn't conform. Does not do
+ anything if the value is valid.
+ """
+
+ @staticmethod
+ def from_config(config, section, option):
+ """Obtain the value of this type from a RawConfigParser.
+
+ Receives a RawConfigParser instance, a str section name, and the str
+ option in that section to retrieve.
+
+ The implementation may assume the option exists in the RawConfigParser
+ instance.
+
+ Implementations are not expected to validate the value. But, they
+ should return the appropriate Python type.
+ """
+
+ @staticmethod
+ def to_config(value):
+ return value
+
+
+class StringType(ConfigType):
+ @staticmethod
+ def validate(value):
+ if not isinstance(value, string_types):
+ raise TypeError()
+
+ @staticmethod
+ def from_config(config, section, option):
+ return config.get(section, option)
+
+
+class BooleanType(ConfigType):
+ @staticmethod
+ def validate(value):
+ if not isinstance(value, bool):
+ raise TypeError()
+
+ @staticmethod
+ def from_config(config, section, option):
+ return config.getboolean(section, option)
+
+ @staticmethod
+ def to_config(value):
+ return "true" if value else "false"
+
+
+class IntegerType(ConfigType):
+ @staticmethod
+ def validate(value):
+ if not isinstance(value, int):
+ raise TypeError()
+
+ @staticmethod
+ def from_config(config, section, option):
+ return config.getint(section, option)
+
+
+class PositiveIntegerType(IntegerType):
+ @staticmethod
+ def validate(value):
+ if not isinstance(value, int):
+ raise TypeError()
+
+ if value < 0:
+ raise ValueError()
+
+
+class PathType(StringType):
+ @staticmethod
+ def validate(value):
+ if not isinstance(value, string_types):
+ raise TypeError()
+
+ @staticmethod
+ def from_config(config, section, option):
+ return config.get(section, option)
+
+
+TYPE_CLASSES = {
+ "string": StringType,
+ "boolean": BooleanType,
+ "int": IntegerType,
+ "pos_int": PositiveIntegerType,
+ "path": PathType,
+}
+
+
+class DefaultValue(object):
+ pass
+
+
+def reraise_attribute_error(func):
+ """Used to make sure __getattr__ wrappers around __getitem__
+ raise AttributeError instead of KeyError.
+ """
+
+ @wraps(func)
+ def _(*args, **kwargs):
+ try:
+ return func(*args, **kwargs)
+ except KeyError:
+ exc_class, exc, tb = sys.exc_info()
+ six.reraise(AttributeError().__class__, exc, tb)
+
+ return _
+
+
+class ConfigSettings(collections.abc.Mapping):
+ """Interface for configuration settings.
+
+ This is the main interface to the configuration.
+
+ A configuration is a collection of sections. Each section contains
+ key-value pairs.
+
+ When an instance is created, the caller first registers ConfigProvider
+ instances with it. This tells the ConfigSettings what individual settings
+ are available and defines extra metadata associated with those settings.
+ This is used for validation, etc.
+
+ Once ConfigProvider instances are registered, a config is populated. It can
+ be loaded from files or populated by hand.
+
+ ConfigSettings instances are accessed like dictionaries or by using
+ attributes. e.g. the section "foo" is accessed through either
+ settings.foo or settings['foo'].
+
+ Sections are modeled by the ConfigSection class which is defined inside
+ this one. They look just like dicts or classes with attributes. To access
+ the "bar" option in the "foo" section:
+
+ value = settings.foo.bar
+ value = settings['foo']['bar']
+ value = settings.foo['bar']
+
+ Assignment is similar:
+
+ settings.foo.bar = value
+ settings['foo']['bar'] = value
+ settings['foo'].bar = value
+
+ You can even delete user-assigned values:
+
+ del settings.foo.bar
+ del settings['foo']['bar']
+
+ If there is a default, it will be returned.
+
+ When settings are mutated, they are validated against the registered
+ providers. Setting unknown settings or setting values to illegal values
+ will result in exceptions being raised.
+ """
+
+ class ConfigSection(collections.abc.MutableMapping, object):
+ """Represents an individual config section."""
+
+ def __init__(self, config, name, settings):
+ object.__setattr__(self, "_config", config)
+ object.__setattr__(self, "_name", name)
+ object.__setattr__(self, "_settings", settings)
+
+ wildcard = any(s == "*" for s in self._settings)
+ object.__setattr__(self, "_wildcard", wildcard)
+
+ @property
+ def options(self):
+ try:
+ return self._config.options(self._name)
+ except NoSectionError:
+ return []
+
+ def get_meta(self, option):
+ if option in self._settings:
+ return self._settings[option]
+ if self._wildcard:
+ return self._settings["*"]
+ raise KeyError("Option not registered with provider: %s" % option)
+
+ def _validate(self, option, value):
+ meta = self.get_meta(option)
+ meta["type_cls"].validate(value)
+
+ if "choices" in meta and value not in meta["choices"]:
+ raise ValueError(
+ "Value '%s' must be one of: %s"
+ % (value, ", ".join(sorted(meta["choices"])))
+ )
+
+ # MutableMapping interface
+ def __len__(self):
+ return len(self.options)
+
+ def __iter__(self):
+ return iter(self.options)
+
+ def __contains__(self, k):
+ return self._config.has_option(self._name, k)
+
+ def __getitem__(self, k):
+ meta = self.get_meta(k)
+
+ if self._config.has_option(self._name, k):
+ v = meta["type_cls"].from_config(self._config, self._name, k)
+ else:
+ v = meta.get("default", DefaultValue)
+
+ if v == DefaultValue:
+ raise KeyError("No default value registered: %s" % k)
+
+ self._validate(k, v)
+ return v
+
+ def __setitem__(self, k, v):
+ self._validate(k, v)
+ meta = self.get_meta(k)
+
+ if not self._config.has_section(self._name):
+ self._config.add_section(self._name)
+
+ self._config.set(self._name, k, meta["type_cls"].to_config(v))
+
+ def __delitem__(self, k):
+ self._config.remove_option(self._name, k)
+
+ # Prune empty sections.
+ if not len(self._config.options(self._name)):
+ self._config.remove_section(self._name)
+
+ @reraise_attribute_error
+ def __getattr__(self, k):
+ return self.__getitem__(k)
+
+ @reraise_attribute_error
+ def __setattr__(self, k, v):
+ self.__setitem__(k, v)
+
+ @reraise_attribute_error
+ def __delattr__(self, k):
+ self.__delitem__(k)
+
+ def __init__(self):
+ self._config = RawConfigParser()
+ self._config.optionxform = str
+
+ self._settings = {}
+ self._sections = {}
+ self._finalized = False
+
+ def load_file(self, filename: Union[str, Path]):
+ self.load_files([Path(filename)])
+
+ def load_files(self, filenames: List[Path]):
+ """Load a config from files specified by their paths.
+
+ Files are loaded in the order given. Subsequent files will overwrite
+ values from previous files. If a file does not exist, it will be
+ ignored.
+ """
+ filtered = [f for f in filenames if f.exists()]
+
+ fps = [open(f, "rt") for f in filtered]
+ self.load_fps(fps)
+ for fp in fps:
+ fp.close()
+
+ def load_fps(self, fps):
+ """Load config data by reading file objects."""
+
+ for fp in fps:
+ self._config.readfp(fp)
+
+ def write(self, fh):
+ """Write the config to a file object."""
+ self._config.write(fh)
+
+ @classmethod
+ def _format_metadata(cls, type_cls, description, default=DefaultValue, extra=None):
+ """Formats and returns the metadata for a setting.
+
+ Each setting must have:
+
+ type_cls -- a ConfigType-derived type defining the type of the setting.
+
+ description -- str describing how to use the setting and where it
+ applies.
+
+ Each setting has the following optional parameters:
+
+ default -- The default value for the setting. If None (the default)
+ there is no default.
+
+ extra -- A dict of additional key/value pairs to add to the
+ setting metadata.
+ """
+ if isinstance(type_cls, string_types):
+ type_cls = TYPE_CLASSES[type_cls]
+
+ meta = {"description": description, "type_cls": type_cls}
+
+ if default != DefaultValue:
+ meta["default"] = default
+
+ if extra:
+ meta.update(extra)
+
+ return meta
+
+ def register_provider(self, provider):
+ """Register a SettingsProvider with this settings interface."""
+
+ if self._finalized:
+ raise ConfigException("Providers cannot be registered after finalized.")
+
+ settings = provider.config_settings
+ if callable(settings):
+ settings = settings()
+
+ config_settings = collections.defaultdict(dict)
+ for setting in settings:
+ section, option = setting[0].split(".")
+
+ if option in config_settings[section]:
+ raise ConfigException(
+ "Setting has already been registered: %s.%s" % (section, option)
+ )
+
+ meta = self._format_metadata(*setting[1:])
+ config_settings[section][option] = meta
+
+ for section_name, settings in config_settings.items():
+ section = self._settings.get(section_name, {})
+
+ for k, v in settings.items():
+ if k in section:
+ raise ConfigException(
+ "Setting already registered: %s.%s" % (section_name, k)
+ )
+
+ section[k] = v
+
+ self._settings[section_name] = section
+
+ def _finalize(self):
+ if self._finalized:
+ return
+
+ for section, settings in self._settings.items():
+ s = ConfigSettings.ConfigSection(self._config, section, settings)
+ self._sections[section] = s
+
+ self._finalized = True
+
+ # Mapping interface.
+ def __len__(self):
+ return len(self._settings)
+
+ def __iter__(self):
+ self._finalize()
+
+ return iter(self._sections.keys())
+
+ def __contains__(self, k):
+ return k in self._settings
+
+ def __getitem__(self, k):
+ self._finalize()
+
+ return self._sections[k]
+
+ # Allow attribute access because it looks nice.
+ @reraise_attribute_error
+ def __getattr__(self, k):
+ return self.__getitem__(k)
diff --git a/python/mach/mach/decorators.py b/python/mach/mach/decorators.py
new file mode 100644
index 0000000000..fe4443e168
--- /dev/null
+++ b/python/mach/mach/decorators.py
@@ -0,0 +1,340 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import argparse
+import collections
+import collections.abc
+from typing import Optional
+
+from mozbuild.base import MachCommandBase
+
+from .base import MachError
+from .registrar import Registrar
+
+
+class _MachCommand(object):
+ """Container for mach command metadata."""
+
+ __slots__ = (
+ # Content from decorator arguments to define the command.
+ "name",
+ "subcommand",
+ "category",
+ "description",
+ "conditions",
+ "_parser",
+ "arguments",
+ "argument_group_names",
+ "virtualenv_name",
+ "ok_if_tests_disabled",
+ # By default, subcommands will be sorted. If this is set to
+ # 'declaration', they will be left in declaration order.
+ "order",
+ # This is the function or callable that will be called when
+ # the command is invoked
+ "func",
+ # The path to the `metrics.yaml` file that describes data that telemetry will
+ # gather for this command. This path is optional.
+ "metrics_path",
+ # Dict of string to _MachCommand defining sub-commands for this
+ # command.
+ "subcommand_handlers",
+ # For subcommands, the global order that the subcommand's declaration
+ # was seen.
+ "decl_order",
+ # Whether to disable automatic logging to last_log.json for the command.
+ "no_auto_log",
+ )
+
+ def __init__(
+ self,
+ name=None,
+ subcommand=None,
+ category=None,
+ description=None,
+ conditions=None,
+ parser=None,
+ order=None,
+ virtualenv_name=None,
+ ok_if_tests_disabled=False,
+ no_auto_log=False,
+ ):
+ self.name = name
+ self.subcommand = subcommand
+ self.category = category
+ self.description = description
+ self.conditions = conditions or []
+ self._parser = parser
+ self.arguments = []
+ self.argument_group_names = []
+ self.virtualenv_name = virtualenv_name
+ self.order = order
+ if ok_if_tests_disabled and category != "testing":
+ raise ValueError(
+ "ok_if_tests_disabled should only be set for " "`testing` mach commands"
+ )
+ self.ok_if_tests_disabled = ok_if_tests_disabled
+
+ self.func = None
+ self.metrics_path = None
+ self.subcommand_handlers = {}
+ self.decl_order = None
+ self.no_auto_log = no_auto_log
+
+ def create_instance(self, context, virtualenv_name):
+ metrics = None
+ if self.metrics_path:
+ metrics = context.telemetry.metrics(self.metrics_path)
+
+ # This ensures the resulting class is defined inside `mach` so that logging
+ # works as expected, and has a meaningful name
+ subclass = type(self.name, (MachCommandBase,), {})
+ return subclass(
+ context,
+ virtualenv_name=virtualenv_name,
+ metrics=metrics,
+ no_auto_log=self.no_auto_log,
+ )
+
+ @property
+ def parser(self):
+ # Creating CLI parsers at command dispatch time can be expensive. Make
+ # it possible to lazy load them by using functions.
+ if callable(self._parser):
+ self._parser = self._parser()
+
+ return self._parser
+
+ @property
+ def docstring(self):
+ return self.func.__doc__
+
+ def __ior__(self, other):
+ if not isinstance(other, _MachCommand):
+ raise ValueError("can only operate on _MachCommand instances")
+
+ for a in self.__slots__:
+ if not getattr(self, a):
+ setattr(self, a, getattr(other, a))
+
+ return self
+
+ def register(self, func):
+ """Register the command in the Registrar with the function to be called on invocation."""
+ if not self.subcommand:
+ if not self.conditions and Registrar.require_conditions:
+ return
+
+ msg = (
+ "Mach command '%s' implemented incorrectly. "
+ + "Conditions argument must take a list "
+ + "of functions. Found %s instead."
+ )
+
+ if not isinstance(self.conditions, collections.abc.Iterable):
+ msg = msg % (self.name, type(self.conditions))
+ raise MachError(msg)
+
+ for c in self.conditions:
+ if not hasattr(c, "__call__"):
+ msg = msg % (self.name, type(c))
+ raise MachError(msg)
+
+ self.func = func
+
+ Registrar.register_command_handler(self)
+
+ else:
+ if self.name not in Registrar.command_handlers:
+ raise MachError(
+ "Command referenced by sub-command does not exist: %s" % self.name
+ )
+
+ self.func = func
+ parent = Registrar.command_handlers[self.name]
+
+ if self.subcommand in parent.subcommand_handlers:
+ raise MachError("sub-command already defined: %s" % self.subcommand)
+
+ parent.subcommand_handlers[self.subcommand] = self
+
+
+class Command(object):
+ """Decorator for functions or methods that provide a mach command.
+
+ The decorator accepts arguments that define basic attributes of the
+ command. The following arguments are recognized:
+
+ category -- The string category to which this command belongs. Mach's
+ help will group commands by category.
+
+ description -- A brief description of what the command does.
+
+ parser -- an optional argparse.ArgumentParser instance or callable
+ that returns an argparse.ArgumentParser instance to use as the
+ basis for the command arguments.
+
+ For example:
+
+ .. code-block:: python
+
+ @Command('foo', category='misc', description='Run the foo action')
+ def foo(self, command_context):
+ pass
+ """
+
+ def __init__(self, name, metrics_path: Optional[str] = None, **kwargs):
+ self._mach_command = _MachCommand(name=name, **kwargs)
+ self._mach_command.metrics_path = metrics_path
+
+ def __call__(self, func):
+ if not hasattr(func, "_mach_command"):
+ func._mach_command = _MachCommand()
+
+ func._mach_command |= self._mach_command
+ func._mach_command.register(func)
+
+ return func
+
+
+class SubCommand(object):
+ """Decorator for functions or methods that provide a sub-command.
+
+ Mach commands can have sub-commands. e.g. ``mach command foo`` or
+ ``mach command bar``. Each sub-command has its own parser and is
+ effectively its own mach command.
+
+ The decorator accepts arguments that define basic attributes of the
+ sub command:
+
+ command -- The string of the command this sub command should be
+ attached to.
+
+ subcommand -- The string name of the sub command to register.
+
+ description -- A textual description for this sub command.
+ """
+
+ global_order = 0
+
+ def __init__(
+ self,
+ command,
+ subcommand,
+ description=None,
+ parser=None,
+ metrics_path: Optional[str] = None,
+ virtualenv_name: Optional[str] = None,
+ ):
+ self._mach_command = _MachCommand(
+ name=command,
+ subcommand=subcommand,
+ description=description,
+ parser=parser,
+ virtualenv_name=virtualenv_name,
+ )
+ self._mach_command.decl_order = SubCommand.global_order
+ SubCommand.global_order += 1
+
+ self._mach_command.metrics_path = metrics_path
+
+ def __call__(self, func):
+ if not hasattr(func, "_mach_command"):
+ func._mach_command = _MachCommand()
+
+ func._mach_command |= self._mach_command
+ func._mach_command.register(func)
+
+ return func
+
+
+class CommandArgument(object):
+ """Decorator for additional arguments to mach subcommands.
+
+ This decorator should be used to add arguments to mach commands. Arguments
+ to the decorator are proxied to ArgumentParser.add_argument().
+
+ For example:
+
+ .. code-block:: python
+
+ @Command('foo', help='Run the foo action')
+ @CommandArgument('-b', '--bar', action='store_true', default=False,
+ help='Enable bar mode.')
+ def foo(self, command_context):
+ pass
+ """
+
+ def __init__(self, *args, **kwargs):
+ if kwargs.get("nargs") == argparse.REMAINDER:
+ # These are the assertions we make in dispatcher.py about
+ # those types of CommandArguments.
+ assert len(args) == 1
+ assert all(
+ k in ("default", "nargs", "help", "group", "metavar") for k in kwargs
+ )
+ self._command_args = (args, kwargs)
+
+ def __call__(self, func):
+ if not hasattr(func, "_mach_command"):
+ func._mach_command = _MachCommand()
+
+ func._mach_command.arguments.insert(0, self._command_args)
+
+ return func
+
+
+class CommandArgumentGroup(object):
+ """Decorator for additional argument groups to mach commands.
+
+ This decorator should be used to add arguments groups to mach commands.
+ Arguments to the decorator are proxied to
+ ArgumentParser.add_argument_group().
+
+ For example:
+
+ .. code-block: python
+
+ @Command('foo', helps='Run the foo action')
+ @CommandArgumentGroup('group1')
+ @CommandArgument('-b', '--bar', group='group1', action='store_true',
+ default=False, help='Enable bar mode.')
+ def foo(self, command_context):
+ pass
+
+ The name should be chosen so that it makes sense as part of the phrase
+ 'Command Arguments for <name>' because that's how it will be shown in the
+ help message.
+ """
+
+ def __init__(self, group_name):
+ self._group_name = group_name
+
+ def __call__(self, func):
+ if not hasattr(func, "_mach_command"):
+ func._mach_command = _MachCommand()
+
+ func._mach_command.argument_group_names.insert(0, self._group_name)
+
+ return func
+
+
+def SettingsProvider(cls):
+ """Class decorator to denote that this class provides Mach settings.
+
+ When this decorator is encountered, the underlying class will automatically
+ be registered with the Mach registrar and will (likely) be hooked up to the
+ mach driver.
+ """
+ if not hasattr(cls, "config_settings"):
+ raise MachError(
+ "@SettingsProvider must contain a config_settings attribute. It "
+ "may either be a list of tuples, or a callable that returns a list "
+ "of tuples. Each tuple must be of the form:\n"
+ "(<section>.<option>, <type_cls>, <description>, <default>, <choices>)\n"
+ "as specified by ConfigSettings._format_metadata."
+ )
+
+ Registrar.register_settings_provider(cls)
+ return cls
diff --git a/python/mach/mach/dispatcher.py b/python/mach/mach/dispatcher.py
new file mode 100644
index 0000000000..95287eac40
--- /dev/null
+++ b/python/mach/mach/dispatcher.py
@@ -0,0 +1,516 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import argparse
+import difflib
+import shlex
+import sys
+from operator import itemgetter
+
+from .base import NoCommandError, UnknownCommandError, UnrecognizedArgumentError
+from .decorators import SettingsProvider
+
+
+@SettingsProvider
+class DispatchSettings:
+ config_settings = [
+ (
+ "alias.*",
+ "string",
+ """
+Create a command alias of the form `<alias>=<command> <args>`.
+Aliases can also be used to set default arguments:
+<command>=<command> <args>
+""".strip(),
+ ),
+ ]
+
+
+class CommandFormatter(argparse.HelpFormatter):
+ """Custom formatter to format just a subcommand."""
+
+ def add_usage(self, *args):
+ pass
+
+
+class CommandAction(argparse.Action):
+ """An argparse action that handles mach commands.
+
+ This class is essentially a reimplementation of argparse's sub-parsers
+ feature. We first tried to use sub-parsers. However, they were missing
+ features like grouping of commands (http://bugs.python.org/issue14037).
+
+ The way this works involves light magic and a partial understanding of how
+ argparse works.
+
+ Arguments registered with an argparse.ArgumentParser have an action
+ associated with them. An action is essentially a class that when called
+ does something with the encountered argument(s). This class is one of those
+ action classes.
+
+ An instance of this class is created doing something like:
+
+ parser.add_argument('command', action=CommandAction, registrar=r)
+
+ Note that a mach.registrar.Registrar instance is passed in. The Registrar
+ holds information on all the mach commands that have been registered.
+
+ When this argument is registered with the ArgumentParser, an instance of
+ this class is instantiated. One of the subtle but important things it does
+ is tell the argument parser that it's interested in *all* of the remaining
+ program arguments. So, when the ArgumentParser calls this action, we will
+ receive the command name plus all of its arguments.
+
+ For more, read the docs in __call__.
+ """
+
+ def __init__(
+ self,
+ option_strings,
+ dest,
+ required=True,
+ default=None,
+ registrar=None,
+ context=None,
+ ):
+ # A proper API would have **kwargs here. However, since we are a little
+ # hacky, we intentionally omit it as a way of detecting potentially
+ # breaking changes with argparse's implementation.
+ #
+ # In a similar vein, default is passed in but is not needed, so we drop
+ # it.
+ argparse.Action.__init__(
+ self,
+ option_strings,
+ dest,
+ required=required,
+ help=argparse.SUPPRESS,
+ nargs=argparse.REMAINDER,
+ )
+
+ self._mach_registrar = registrar
+ self._context = context
+
+ def __call__(self, parser, namespace, values, option_string=None):
+ """This is called when the ArgumentParser has reached our arguments.
+
+ Since we always register ourselves with nargs=argparse.REMAINDER,
+ values should be a list of remaining arguments to parse. The first
+ argument should be the name of the command to invoke and all remaining
+ arguments are arguments for that command.
+
+ The gist of the flow is that we look at the command being invoked. If
+ it's *help*, we handle that specially (because argparse's default help
+ handler isn't satisfactory). Else, we create a new, independent
+ ArgumentParser instance for just the invoked command (based on the
+ information contained in the command registrar) and feed the arguments
+ into that parser. We then merge the results with the main
+ ArgumentParser.
+ """
+ if namespace.help:
+ # -h or --help is in the global arguments.
+ self._handle_main_help(parser, namespace.verbose)
+ sys.exit(0)
+ elif values:
+ command = values[0].lower()
+ args = values[1:]
+ if command == "help":
+ if args and args[0] not in ["-h", "--help"]:
+ # Make sure args[0] is indeed a command.
+ self._handle_command_help(parser, args[0], args)
+ else:
+ self._handle_main_help(parser, namespace.verbose)
+ sys.exit(0)
+ elif "-h" in args or "--help" in args:
+ # -h or --help is in the command arguments.
+ if "--" in args:
+ # -- is in command arguments
+ if (
+ "-h" in args[: args.index("--")]
+ or "--help" in args[: args.index("--")]
+ ):
+ # Honor -h or --help only if it appears before --
+ self._handle_command_help(parser, command, args)
+ sys.exit(0)
+ else:
+ self._handle_command_help(parser, command, args)
+ sys.exit(0)
+ else:
+ raise NoCommandError(namespace)
+
+ # First see if the this is a user-defined alias
+ if command in self._context.settings.alias:
+ alias = self._context.settings.alias[command]
+ defaults = shlex.split(alias)
+ command = defaults.pop(0)
+ args = defaults + args
+
+ if command not in self._mach_registrar.command_handlers:
+ # Try to find similar commands, may raise UnknownCommandError.
+ command = self._suggest_command(command)
+
+ handler = self._mach_registrar.command_handlers.get(command)
+
+ prog = command
+ usage = "%(prog)s [global arguments] " + command + " [command arguments]"
+
+ subcommand = None
+
+ # If there are sub-commands, parse the intent out immediately.
+ if handler.subcommand_handlers and args:
+ # mach <command> help <subcommand>
+ if set(args[: args.index("--")] if "--" in args else args).intersection(
+ ("help", "--help")
+ ):
+ self._handle_subcommand_help(parser, handler, args)
+ sys.exit(0)
+ # mach <command> <subcommand> ...
+ elif args[0] in handler.subcommand_handlers:
+ subcommand = args[0]
+ handler = handler.subcommand_handlers[subcommand]
+ prog = prog + " " + subcommand
+ usage = (
+ "%(prog)s [global arguments] "
+ + command
+ + " "
+ + subcommand
+ + " [command arguments]"
+ )
+ args.pop(0)
+
+ # We create a new parser, populate it with the command's arguments,
+ # then feed all remaining arguments to it, merging the results
+ # with ourselves. This is essentially what argparse subparsers
+ # do.
+
+ parser_args = {
+ "add_help": False,
+ "usage": usage,
+ }
+
+ remainder = None
+
+ if handler.parser:
+ subparser = handler.parser
+ subparser.context = self._context
+ subparser.prog = subparser.prog + " " + prog
+ for arg in subparser._actions[:]:
+ if arg.nargs == argparse.REMAINDER:
+ subparser._actions.remove(arg)
+ remainder = (
+ (arg.dest,),
+ {"default": arg.default, "nargs": arg.nargs, "help": arg.help},
+ )
+ else:
+ subparser = argparse.ArgumentParser(**parser_args)
+
+ for arg in handler.arguments:
+ # Remove our group keyword; it's not needed here.
+ group_name = arg[1].get("group")
+ if group_name:
+ del arg[1]["group"]
+
+ if arg[1].get("nargs") == argparse.REMAINDER:
+ # parse_known_args expects all argparse.REMAINDER ('...')
+ # arguments to be all stuck together. Instead, we want them to
+ # pick any extra argument, wherever they are.
+ # Assume a limited CommandArgument for those arguments.
+ assert len(arg[0]) == 1
+ assert all(k in ("default", "nargs", "help", "metavar") for k in arg[1])
+ remainder = arg
+ else:
+ subparser.add_argument(*arg[0], **arg[1])
+
+ # We define the command information on the main parser result so as to
+ # not interfere with arguments passed to the command.
+ setattr(namespace, "mach_handler", handler)
+ setattr(namespace, "command", command)
+ setattr(namespace, "subcommand", subcommand)
+
+ command_namespace, extra = subparser.parse_known_args(args)
+ setattr(namespace, "command_args", command_namespace)
+ if remainder:
+ (name,), options = remainder
+ # parse_known_args usefully puts all arguments after '--' in
+ # extra, but also puts '--' there. We don't want to pass it down
+ # to the command handler. Note that if multiple '--' are on the
+ # command line, only the first one is removed, so that subsequent
+ # ones are passed down.
+ if "--" in extra:
+ extra.remove("--")
+
+ # Commands with argparse.REMAINDER arguments used to force the
+ # other arguments to be '+' prefixed. If a user now passes such
+ # an argument, if will silently end up in extra. So, check if any
+ # of the allowed arguments appear in a '+' prefixed form, and error
+ # out if that's the case.
+ for args, _ in handler.arguments:
+ for arg in args:
+ arg = arg.replace("-", "+", 1)
+ if arg in extra:
+ raise UnrecognizedArgumentError(command, [arg])
+
+ if extra:
+ setattr(command_namespace, name, extra)
+ else:
+ setattr(command_namespace, name, options.get("default", []))
+ elif extra:
+ raise UnrecognizedArgumentError(command, extra)
+
+ def _handle_main_help(self, parser, verbose):
+ # Since we don't need full sub-parser support for the main help output,
+ # we create groups in the ArgumentParser and populate each group with
+ # arguments corresponding to command names. This has the side-effect
+ # that argparse renders it nicely.
+ r = self._mach_registrar
+ disabled_commands = []
+
+ cats = [(k, v[2]) for k, v in r.categories.items()]
+ sorted_cats = sorted(cats, key=itemgetter(1), reverse=True)
+ for category, priority in sorted_cats:
+ group = None
+
+ for command in sorted(r.commands_by_category[category]):
+ handler = r.command_handlers[command]
+
+ # Instantiate a handler class to see if it should be filtered
+ # out for the current context or not. Condition functions can be
+ # applied to the command's decorator.
+ if handler.conditions:
+ instance = handler.create_instance(
+ self._context, handler.virtualenv_name
+ )
+
+ is_filtered = False
+ for c in handler.conditions:
+ if not c(instance):
+ is_filtered = True
+ break
+ if is_filtered:
+ description = handler.description
+ disabled_command = {
+ "command": command,
+ "description": description,
+ }
+ disabled_commands.append(disabled_command)
+ continue
+
+ if group is None:
+ title, description, _priority = r.categories[category]
+ group = parser.add_argument_group(title, description)
+
+ description = handler.description
+ group.add_argument(command, help=description, action="store_true")
+
+ if disabled_commands and "disabled" in r.categories:
+ title, description, _priority = r.categories["disabled"]
+ group = parser.add_argument_group(title, description)
+ if verbose:
+ for c in disabled_commands:
+ group.add_argument(
+ c["command"], help=c["description"], action="store_true"
+ )
+
+ parser.print_help()
+
+ def _populate_command_group(self, parser, handler, group):
+ extra_groups = {}
+ for group_name in handler.argument_group_names:
+ group_full_name = "Command Arguments for " + group_name
+ extra_groups[group_name] = parser.add_argument_group(group_full_name)
+
+ for arg in handler.arguments:
+ # Apply our group keyword.
+ group_name = arg[1].get("group")
+ if group_name:
+ del arg[1]["group"]
+ group = extra_groups[group_name]
+ group.add_argument(*arg[0], **arg[1])
+
+ def _get_command_arguments_help(self, handler):
+ # This code is worth explaining. Because we are doing funky things with
+ # argument registration to allow the same option in both global and
+ # command arguments, we can't simply put all arguments on the same
+ # parser instance because argparse would complain. We can't register an
+ # argparse subparser here because it won't properly show help for
+ # global arguments. So, we employ a strategy similar to command
+ # execution where we construct a 2nd, independent ArgumentParser for
+ # just the command data then supplement the main help's output with
+ # this 2nd parser's. We use a custom formatter class to ignore some of
+ # the help output.
+ parser_args = {
+ "formatter_class": CommandFormatter,
+ "add_help": False,
+ }
+
+ if handler.parser:
+ c_parser = handler.parser
+ c_parser.context = self._context
+ c_parser.formatter_class = NoUsageFormatter
+ # Accessing _action_groups is a bit shady. We are highly dependent
+ # on the argparse implementation not changing. We fail fast to
+ # detect upstream changes so we can intelligently react to them.
+ group = c_parser._action_groups[1]
+
+ # By default argparse adds two groups called "positional arguments"
+ # and "optional arguments". We want to rename these to reflect standard
+ # mach terminology.
+ c_parser._action_groups[0].title = "Command Parameters"
+ c_parser._action_groups[1].title = "Command Arguments"
+
+ if not handler.description:
+ handler.description = c_parser.description
+ c_parser.description = None
+ else:
+ c_parser = argparse.ArgumentParser(**parser_args)
+ group = c_parser.add_argument_group("Command Arguments")
+
+ self._populate_command_group(c_parser, handler, group)
+
+ return c_parser
+
+ def _handle_command_help(self, parser, command, args):
+ handler = self._mach_registrar.command_handlers.get(command)
+
+ if not handler:
+ raise UnknownCommandError(command, "query")
+
+ if handler.subcommand_handlers:
+ self._handle_subcommand_help(parser, handler, args)
+ return
+
+ c_parser = self._get_command_arguments_help(handler)
+
+ # Set the long help of the command to the docstring (if present) or
+ # the command decorator description argument (if present).
+ if handler.docstring:
+ parser.description = format_docstring(handler.docstring)
+ elif handler.description:
+ parser.description = handler.description
+
+ parser.usage = "%(prog)s [global arguments] " + command + " [command arguments]"
+
+ # This is needed to preserve line endings in the description field,
+ # which may be populated from a docstring.
+ parser.formatter_class = argparse.RawDescriptionHelpFormatter
+ parser.print_help()
+ print("")
+ c_parser.print_help()
+
+ def _handle_subcommand_main_help(self, parser, handler):
+ parser.usage = (
+ "%(prog)s [global arguments] "
+ + handler.name
+ + " subcommand [subcommand arguments]"
+ )
+ group = parser.add_argument_group("Sub Commands")
+
+ def by_decl_order(item):
+ return item[1].decl_order
+
+ def by_name(item):
+ return item[1].subcommand
+
+ subhandlers = handler.subcommand_handlers.items()
+ for subcommand, subhandler in sorted(
+ subhandlers,
+ key=by_decl_order if handler.order == "declaration" else by_name,
+ ):
+ group.add_argument(
+ subcommand, help=subhandler.description, action="store_true"
+ )
+
+ if handler.docstring:
+ parser.description = format_docstring(handler.docstring)
+
+ c_parser = self._get_command_arguments_help(handler)
+
+ parser.formatter_class = argparse.RawDescriptionHelpFormatter
+
+ parser.print_help()
+ print("")
+ c_parser.print_help()
+
+ def _handle_subcommand_help(self, parser, handler, args):
+ subcommand = set(args).intersection(list(handler.subcommand_handlers.keys()))
+ if not subcommand:
+ return self._handle_subcommand_main_help(parser, handler)
+
+ subcommand = subcommand.pop()
+ subhandler = handler.subcommand_handlers[subcommand]
+
+ # Initialize the parser if necessary
+ subhandler.parser
+
+ c_parser = subhandler.parser or argparse.ArgumentParser(add_help=False)
+ c_parser.formatter_class = CommandFormatter
+
+ group = c_parser.add_argument_group("Sub Command Arguments")
+ self._populate_command_group(c_parser, subhandler, group)
+
+ if subhandler.docstring:
+ parser.description = format_docstring(subhandler.docstring)
+
+ parser.formatter_class = argparse.RawDescriptionHelpFormatter
+ parser.usage = (
+ "%(prog)s [global arguments] "
+ + handler.name
+ + " "
+ + subcommand
+ + " [command arguments]"
+ )
+
+ parser.print_help()
+ print("")
+ c_parser.print_help()
+
+ def _suggest_command(self, command):
+ names = [h.name for h in self._mach_registrar.command_handlers.values()]
+ # We first try to look for a valid command that is very similar to the given command.
+ suggested_commands = difflib.get_close_matches(command, names, cutoff=0.8)
+ # If we find more than one matching command, or no command at all,
+ # we give command suggestions instead (with a lower matching threshold).
+ # All commands that start with the given command (for instance:
+ # 'mochitest-plain', 'mochitest-chrome', etc. for 'mochitest-')
+ # are also included.
+ if len(suggested_commands) != 1:
+ suggested_commands = set(
+ difflib.get_close_matches(command, names, cutoff=0.5)
+ )
+ suggested_commands |= {cmd for cmd in names if cmd.startswith(command)}
+ raise UnknownCommandError(command, "run", suggested_commands)
+ sys.stderr.write(
+ "We're assuming the '%s' command is '%s' and we're "
+ "executing it for you.\n\n" % (command, suggested_commands[0])
+ )
+ return suggested_commands[0]
+
+
+class NoUsageFormatter(argparse.HelpFormatter):
+ def _format_usage(self, *args, **kwargs):
+ return ""
+
+
+def format_docstring(docstring):
+ """Format a raw docstring into something suitable for presentation.
+
+ This function is based on the example function in PEP-0257.
+ """
+ if not docstring:
+ return ""
+ lines = docstring.expandtabs().splitlines()
+ indent = sys.maxsize
+ for line in lines[1:]:
+ stripped = line.lstrip()
+ if stripped:
+ indent = min(indent, len(line) - len(stripped))
+ trimmed = [lines[0].strip()]
+ if indent < sys.maxsize:
+ for line in lines[1:]:
+ trimmed.append(line[indent:].rstrip())
+ while trimmed and not trimmed[-1]:
+ trimmed.pop()
+ while trimmed and not trimmed[0]:
+ trimmed.pop(0)
+ return "\n".join(trimmed)
diff --git a/python/mach/mach/logging.py b/python/mach/mach/logging.py
new file mode 100644
index 0000000000..d39f336cc0
--- /dev/null
+++ b/python/mach/mach/logging.py
@@ -0,0 +1,398 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This file contains logging functionality for mach. It essentially provides
+# support for a structured logging framework built on top of Python's built-in
+# logging framework.
+
+import codecs
+import json
+import logging
+import os
+import sys
+import time
+
+import blessed
+import six
+from mozbuild.util import mozilla_build_version
+from packaging.version import Version
+
+IS_WINDOWS = sys.platform.startswith("win")
+
+if IS_WINDOWS:
+ import msvcrt
+ from ctypes import byref, windll
+ from ctypes.wintypes import DWORD
+
+ ENABLE_VIRTUAL_TERMINAL_PROCESSING = 0x0004
+
+ def enable_virtual_terminal_processing(file_descriptor):
+ handle = msvcrt.get_osfhandle(file_descriptor)
+ try:
+ mode = DWORD()
+ windll.kernel32.GetConsoleMode(handle, byref(mode))
+ mode.value |= ENABLE_VIRTUAL_TERMINAL_PROCESSING
+ windll.kernel32.SetConsoleMode(handle, mode.value)
+ except Exception as e:
+ raise e
+
+
+def enable_blessed():
+ # Only Windows has issues with enabling blessed
+ # and interpreting ANSI escape sequences
+ if not IS_WINDOWS:
+ return True
+
+ if os.environ.get("NO_ANSI"):
+ return False
+
+ # MozillaBuild 4.0.2 is the first Release that supports
+ # ANSI escape sequences, so if we're greater than that
+ # version, we can enable them (via Blessed).
+ return mozilla_build_version() >= Version("4.0.2")
+
+
+# stdout and stderr may not necessarily be set up to write Unicode output, so
+# reconfigure them if necessary.
+def _wrap_stdstream(fh):
+ if fh in (sys.stderr, sys.stdout):
+ encoding = sys.getdefaultencoding()
+ encoding = "utf-8" if encoding in ("ascii", "charmap") else encoding
+ if six.PY2:
+ return codecs.getwriter(encoding)(fh, errors="replace")
+ else:
+ return codecs.getwriter(encoding)(fh.buffer, errors="replace")
+ else:
+ return fh
+
+
+def format_seconds(total):
+ """Format number of seconds to MM:SS.DD form."""
+
+ minutes, seconds = divmod(total, 60)
+
+ return "%2d:%05.2f" % (minutes, seconds)
+
+
+class ConvertToStructuredFilter(logging.Filter):
+ """Filter that converts unstructured records into structured ones."""
+
+ def filter(self, record):
+ if hasattr(record, "action") and hasattr(record, "params"):
+ return True
+
+ record.action = "unstructured"
+ record.params = {"msg": record.getMessage()}
+ record.msg = "{msg}"
+
+ return True
+
+
+class StructuredJSONFormatter(logging.Formatter):
+ """Log formatter that writes a structured JSON entry."""
+
+ def format(self, record):
+ action = getattr(record, "action", "UNKNOWN")
+ params = getattr(record, "params", {})
+
+ return json.dumps([record.created, action, params])
+
+
+class StructuredHumanFormatter(logging.Formatter):
+ """Log formatter that writes structured messages for humans.
+
+ It is important that this formatter never be added to a logger that
+ produces unstructured/classic log messages. If it is, the call to format()
+ could fail because the string could contain things (like JSON) that look
+ like formatting character sequences.
+
+ Because of this limitation, format() will fail with a KeyError if an
+ unstructured record is passed or if the structured message is malformed.
+ """
+
+ def __init__(self, start_time, write_interval=False, write_times=True):
+ self.start_time = start_time
+ self.write_interval = write_interval
+ self.write_times = write_times
+ self.last_time = None
+
+ def format(self, record):
+ formatted_msg = record.msg.format(**getattr(record, "params", {}))
+
+ elapsed_time = (
+ format_seconds(self._time(record)) + " " if self.write_times else ""
+ )
+
+ rv = elapsed_time + formatted_msg
+ formatted_stack_trace_result = formatted_stack_trace(record, self)
+
+ if formatted_stack_trace_result != "":
+ stack_trace = "\n" + elapsed_time + formatted_stack_trace_result
+ rv += stack_trace.replace("\n", f"\n{elapsed_time}")
+
+ return rv
+
+ def _time(self, record):
+ t = record.created - self.start_time
+
+ if self.write_interval and self.last_time is not None:
+ t = record.created - self.last_time
+
+ self.last_time = record.created
+
+ return t
+
+
+class StructuredTerminalFormatter(StructuredHumanFormatter):
+ """Log formatter for structured messages writing to a terminal."""
+
+ def set_terminal(self, terminal):
+ self.terminal = terminal
+ self._sgr0 = terminal.normal if terminal else ""
+
+ def format(self, record):
+ formatted_msg = record.msg.format(**getattr(record, "params", {}))
+ elapsed_time = (
+ self.terminal.blue(format_seconds(self._time(record))) + " "
+ if self.write_times
+ else ""
+ )
+
+ rv = elapsed_time + self._colorize(formatted_msg) + self._sgr0
+ formatted_stack_trace_result = formatted_stack_trace(record, self)
+
+ if formatted_stack_trace_result != "":
+ stack_trace = "\n" + elapsed_time + formatted_stack_trace_result
+ rv += stack_trace.replace("\n", f"\n{elapsed_time}")
+
+ # Some processes (notably Clang) don't reset terminal attributes after
+ # printing newlines. This can lead to terminal attributes getting in a
+ # wonky state. Work around this by sending the sgr0 sequence after every
+ # line to reset all attributes. For programs that rely on the next line
+ # inheriting the same attributes, this will prevent that from happening.
+ # But that's better than "corrupting" the terminal.
+ return rv + self._sgr0
+
+ def _colorize(self, s):
+ if not self.terminal:
+ return s
+
+ result = s
+
+ reftest = s.startswith("REFTEST ")
+ if reftest:
+ s = s[8:]
+
+ if s.startswith("TEST-PASS"):
+ result = self.terminal.green(s[0:9]) + s[9:]
+ elif s.startswith("TEST-UNEXPECTED"):
+ result = self.terminal.red(s[0:20]) + s[20:]
+ elif s.startswith("TEST-START"):
+ result = self.terminal.yellow(s[0:10]) + s[10:]
+ elif s.startswith("TEST-INFO"):
+ result = self.terminal.yellow(s[0:9]) + s[9:]
+
+ if reftest:
+ result = "REFTEST " + result
+
+ return result
+
+
+def formatted_stack_trace(record, formatter):
+ """
+ Formatting behavior here intended to mimic a portion of the
+ standard library's logging.Formatter::format function
+ """
+ rv = ""
+
+ if record.exc_info:
+ # Cache the traceback text to avoid converting it multiple times
+ # (it's constant anyway)
+ if not record.exc_text:
+ record.exc_text = formatter.formatException(record.exc_info)
+ if record.exc_text:
+ rv = record.exc_text
+ if record.stack_info:
+ if rv[-1:] != "\n":
+ rv = rv + "\n"
+ rv = rv + formatter.formatStack(record.stack_info)
+
+ return rv
+
+
+class LoggingManager(object):
+ """Holds and controls global logging state.
+
+ An application should instantiate one of these and configure it as needed.
+
+ This class provides a mechanism to configure the output of logging data
+ both from mach and from the overall logging system (e.g. from other
+ modules).
+ """
+
+ def __init__(self):
+ self.start_time = time.time()
+
+ self.json_handlers = []
+ self.terminal_handler = None
+ self.terminal_formatter = None
+
+ self.root_logger = logging.getLogger()
+ self.root_logger.setLevel(logging.DEBUG)
+
+ # Installing NullHandler on the root logger ensures that *all* log
+ # messages have at least one handler. This prevents Python from
+ # complaining about "no handlers could be found for logger XXX."
+ self.root_logger.addHandler(logging.NullHandler())
+
+ mach_logger = logging.getLogger("mach")
+ mach_logger.setLevel(logging.DEBUG)
+
+ self.structured_filter = ConvertToStructuredFilter()
+
+ self.structured_loggers = [mach_logger]
+
+ self._terminal = None
+
+ def create_terminal(self):
+ if enable_blessed():
+ # Sometimes blessed fails to set up the terminal, in that case, silently fail.
+ try:
+ terminal = blessed.Terminal(stream=_wrap_stdstream(sys.stdout))
+
+ if terminal.is_a_tty:
+ self._terminal = terminal
+ except Exception:
+ pass
+
+ @property
+ def terminal(self):
+ return self._terminal
+
+ def add_json_handler(self, fh):
+ """Enable JSON logging on the specified file object."""
+
+ # Configure the consumer of structured messages.
+ handler = logging.StreamHandler(stream=fh)
+ handler.setFormatter(StructuredJSONFormatter())
+ handler.setLevel(logging.DEBUG)
+
+ # And hook it up.
+ for logger in self.structured_loggers:
+ logger.addHandler(handler)
+
+ self.json_handlers.append(handler)
+
+ def add_terminal_logging(
+ self, fh=sys.stdout, level=logging.INFO, write_interval=False, write_times=True
+ ):
+ """Enable logging to the terminal."""
+ self.create_terminal()
+
+ if IS_WINDOWS:
+ try:
+ # fileno() can raise in some cases, like unit tests.
+ # so we can try to enable this but if we fail it's fine
+ enable_virtual_terminal_processing(sys.stdout.fileno())
+ enable_virtual_terminal_processing(sys.stderr.fileno())
+ except Exception:
+ pass
+
+ fh = _wrap_stdstream(fh)
+ formatter = StructuredHumanFormatter(
+ self.start_time, write_interval=write_interval, write_times=write_times
+ )
+
+ if self.terminal:
+ formatter = StructuredTerminalFormatter(
+ self.start_time, write_interval=write_interval, write_times=write_times
+ )
+ formatter.set_terminal(self.terminal)
+
+ handler = logging.StreamHandler(stream=fh)
+ handler.setFormatter(formatter)
+ handler.setLevel(level)
+
+ for logger in self.structured_loggers:
+ logger.addHandler(handler)
+
+ self.terminal_handler = handler
+ self.terminal_formatter = formatter
+
+ def replace_terminal_handler(self, handler):
+ """Replace the installed terminal handler.
+
+ Returns the old handler or None if none was configured.
+ If the new handler is None, removes any existing handler and disables
+ logging to the terminal.
+ """
+ old = self.terminal_handler
+
+ if old:
+ for logger in self.structured_loggers:
+ logger.removeHandler(old)
+
+ if handler:
+ for logger in self.structured_loggers:
+ logger.addHandler(handler)
+
+ self.terminal_handler = handler
+
+ return old
+
+ def enable_unstructured(self):
+ """Enable logging of unstructured messages."""
+ if self.terminal_handler:
+ self.terminal_handler.addFilter(self.structured_filter)
+ self.root_logger.addHandler(self.terminal_handler)
+
+ def disable_unstructured(self):
+ """Disable logging of unstructured messages."""
+ if self.terminal_handler:
+ self.terminal_handler.removeFilter(self.structured_filter)
+ self.root_logger.removeHandler(self.terminal_handler)
+
+ def register_structured_logger(self, logger, terminal=True, json=True):
+ """Register a structured logger.
+
+ This needs to be called for all structured loggers that don't chain up
+ to the mach logger in order for their output to be captured.
+ """
+ self.structured_loggers.append(logger)
+
+ if terminal and self.terminal_handler:
+ logger.addHandler(self.terminal_handler)
+
+ if json:
+ for handler in self.json_handlers:
+ logger.addHandler(handler)
+
+ def enable_all_structured_loggers(self, terminal=True, json=True):
+ """Enable logging of all structured messages from all loggers.
+
+ ``terminal`` and ``json`` determine which log handlers to operate
+ on. By default, all known handlers are operated on.
+ """
+
+ # Glean makes logs that we're not interested in, so we squelch them.
+ logging.getLogger("glean").setLevel(logging.CRITICAL)
+
+ # Remove current handlers from all loggers so we don't double
+ # register handlers.
+ for logger in self.root_logger.manager.loggerDict.values():
+ # Some entries might be logging.PlaceHolder.
+ if not isinstance(logger, logging.Logger):
+ continue
+
+ if terminal:
+ logger.removeHandler(self.terminal_handler)
+
+ if json:
+ for handler in self.json_handlers:
+ logger.removeHandler(handler)
+
+ # Wipe out existing registered structured loggers since they
+ # all propagate to root logger.
+ self.structured_loggers = []
+ self.register_structured_logger(self.root_logger, terminal=terminal, json=json)
diff --git a/python/mach/mach/main.py b/python/mach/mach/main.py
new file mode 100644
index 0000000000..9ab880341d
--- /dev/null
+++ b/python/mach/mach/main.py
@@ -0,0 +1,735 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This module provides functionality for the command-line build tool
+# (mach). It is packaged as a module because everything is a library.
+
+import argparse
+import codecs
+import errno
+import imp
+import logging
+import os
+import sys
+import traceback
+import uuid
+from collections.abc import Iterable
+from pathlib import Path
+from typing import Dict, List, Union
+
+from .base import (
+ CommandContext,
+ FailedCommandError,
+ MachError,
+ MissingFileError,
+ NoCommandError,
+ UnknownCommandError,
+ UnrecognizedArgumentError,
+)
+from .config import ConfigSettings
+from .dispatcher import CommandAction
+from .logging import LoggingManager
+from .registrar import Registrar
+from .sentry import NoopErrorReporter, register_sentry
+from .telemetry import create_telemetry_from_environment, report_invocation_metrics
+from .util import UserError, setenv
+
+SUGGEST_MACH_BUSTED_TEMPLATE = r"""
+You can invoke ``./mach busted`` to check if this issue is already on file. If it
+isn't, please use ``./mach busted file %s`` to report it. If ``./mach busted`` is
+misbehaving, you can also inspect the dependencies of bug 1543241.
+""".lstrip()
+
+MACH_ERROR_TEMPLATE = (
+ r"""
+The error occurred in mach itself. This is likely a bug in mach itself or a
+fundamental problem with a loaded module.
+
+""".lstrip()
+ + SUGGEST_MACH_BUSTED_TEMPLATE
+)
+
+ERROR_FOOTER = r"""
+If filing a bug, please include the full output of mach, including this error
+message.
+
+The details of the failure are as follows:
+""".lstrip()
+
+USER_ERROR = r"""
+This is a user error and does not appear to be a bug in mach.
+""".lstrip()
+
+COMMAND_ERROR_TEMPLATE = (
+ r"""
+The error occurred in the implementation of the invoked mach command.
+
+This should never occur and is likely a bug in the implementation of that
+command.
+""".lstrip()
+ + SUGGEST_MACH_BUSTED_TEMPLATE
+)
+
+MODULE_ERROR_TEMPLATE = (
+ r"""
+The error occurred in code that was called by the mach command. This is either
+a bug in the called code itself or in the way that mach is calling it.
+""".lstrip()
+ + SUGGEST_MACH_BUSTED_TEMPLATE
+)
+
+NO_COMMAND_ERROR = r"""
+It looks like you tried to run mach without a command.
+
+Run ``mach help`` to show a list of commands.
+""".lstrip()
+
+UNKNOWN_COMMAND_ERROR = r"""
+It looks like you are trying to %s an unknown mach command: %s
+%s
+Run ``mach help`` to show a list of commands.
+""".lstrip()
+
+SUGGESTED_COMMANDS_MESSAGE = r"""
+Did you want to %s any of these commands instead: %s?
+"""
+
+UNRECOGNIZED_ARGUMENT_ERROR = r"""
+It looks like you passed an unrecognized argument into mach.
+
+The %s command does not accept the arguments: %s
+""".lstrip()
+
+INVALID_ENTRY_POINT = r"""
+Entry points should return a list of command providers or directories
+containing command providers. The following entry point is invalid:
+
+ %s
+
+You are seeing this because there is an error in an external module attempting
+to implement a mach command. Please fix the error, or uninstall the module from
+your system.
+""".lstrip()
+
+
+class ArgumentParser(argparse.ArgumentParser):
+ """Custom implementation argument parser to make things look pretty."""
+
+ def error(self, message):
+ """Custom error reporter to give more helpful text on bad commands."""
+ if not message.startswith("argument command: invalid choice"):
+ argparse.ArgumentParser.error(self, message)
+ assert False
+
+ print("Invalid command specified. The list of commands is below.\n")
+ self.print_help()
+ sys.exit(1)
+
+ def format_help(self):
+ text = argparse.ArgumentParser.format_help(self)
+
+ # Strip out the silly command list that would preceed the pretty list.
+ #
+ # Commands:
+ # {foo,bar}
+ # foo Do foo.
+ # bar Do bar.
+ search = "Commands:\n {"
+ start = text.find(search)
+
+ if start != -1:
+ end = text.find("}\n", start)
+ assert end != -1
+
+ real_start = start + len("Commands:\n")
+ real_end = end + len("}\n")
+
+ text = text[0:real_start] + text[real_end:]
+
+ return text
+
+
+class ContextWrapper(object):
+ def __init__(self, context, handler):
+ object.__setattr__(self, "_context", context)
+ object.__setattr__(self, "_handler", handler)
+
+ def __getattribute__(self, key):
+ try:
+ return getattr(object.__getattribute__(self, "_context"), key)
+ except AttributeError as e:
+ try:
+ ret = object.__getattribute__(self, "_handler")(key)
+ except (AttributeError, TypeError):
+ # TypeError is in case the handler comes from old code not
+ # taking a key argument.
+ raise e
+ setattr(self, key, ret)
+ return ret
+
+ def __setattr__(self, key, value):
+ setattr(object.__getattribute__(self, "_context"), key, value)
+
+
+class MachCommandReference:
+ """A reference to a mach command.
+
+ Holds the metadata for a mach command.
+ """
+
+ module: Path
+
+ def __init__(self, module: Union[str, Path]):
+ self.module = Path(module)
+
+
+class Mach(object):
+ """Main mach driver type.
+
+ This type is responsible for holding global mach state and dispatching
+ a command from arguments.
+
+ The following attributes may be assigned to the instance to influence
+ behavior:
+
+ populate_context_handler -- If defined, it must be a callable. The
+ callable signature is the following:
+ populate_context_handler(key=None)
+ It acts as a fallback getter for the mach.base.CommandContext
+ instance.
+ This allows to augment the context instance with arbitrary data
+ for use in command handlers.
+
+ require_conditions -- If True, commands that do not have any condition
+ functions applied will be skipped. Defaults to False.
+
+ settings_paths -- A list of files or directories in which to search
+ for settings files to load.
+
+ """
+
+ USAGE = """%(prog)s [global arguments] command [command arguments]
+
+mach (German for "do") is the main interface to the Mozilla build system and
+common developer tasks.
+
+You tell mach the command you want to perform and it does it for you.
+
+Some common commands are:
+
+ %(prog)s build Build/compile the source tree.
+ %(prog)s help Show full help, including the list of all commands.
+
+To see more help for a specific command, run:
+
+ %(prog)s help <command>
+"""
+
+ def __init__(self, cwd: str):
+ assert Path(cwd).is_dir()
+
+ self.cwd = cwd
+ self.log_manager = LoggingManager()
+ self.logger = logging.getLogger(__name__)
+ self.settings = ConfigSettings()
+ self.settings_paths = []
+
+ if "MACHRC" in os.environ:
+ self.settings_paths.append(os.environ["MACHRC"])
+
+ self.log_manager.register_structured_logger(self.logger)
+ self.populate_context_handler = None
+
+ def load_commands_from_directory(self, path: Path):
+ """Scan for mach commands from modules in a directory.
+
+ This takes a path to a directory, loads the .py files in it, and
+ registers and found mach command providers with this mach instance.
+ """
+ for f in sorted(path.iterdir()):
+ if not f.suffix == ".py" or f.name == "__init__.py":
+ continue
+
+ full_path = path / f
+ module_name = f"mach.commands.{str(f)[0:-3]}"
+
+ self.load_commands_from_file(full_path, module_name=module_name)
+
+ def load_commands_from_file(self, path: Union[str, Path], module_name=None):
+ """Scan for mach commands from a file.
+
+ This takes a path to a file and loads it as a Python module under the
+ module name specified. If no name is specified, a random one will be
+ chosen.
+ """
+ if module_name is None:
+ # Ensure parent module is present otherwise we'll (likely) get
+ # an error due to unknown parent.
+ if "mach.commands" not in sys.modules:
+ mod = imp.new_module("mach.commands")
+ sys.modules["mach.commands"] = mod
+
+ module_name = f"mach.commands.{uuid.uuid4().hex}"
+
+ try:
+ imp.load_source(module_name, str(path))
+ except IOError as e:
+ if e.errno != errno.ENOENT:
+ raise
+
+ raise MissingFileError(f"{path} does not exist")
+
+ def load_commands_from_spec(
+ self, spec: Dict[str, MachCommandReference], topsrcdir: str, missing_ok=False
+ ):
+ """Load mach commands based on the given spec.
+
+ Takes a dictionary mapping command names to their metadata.
+ """
+ modules = set(spec[command].module for command in spec)
+
+ for path in modules:
+ try:
+ self.load_commands_from_file(topsrcdir / path)
+ except MissingFileError:
+ if not missing_ok:
+ raise
+
+ def load_commands_from_entry_point(self, group="mach.providers"):
+ """Scan installed packages for mach command provider entry points. An
+ entry point is a function that returns a list of paths to files or
+ directories containing command providers.
+
+ This takes an optional group argument which specifies the entry point
+ group to use. If not specified, it defaults to 'mach.providers'.
+ """
+ try:
+ import pkg_resources
+ except ImportError:
+ print(
+ "Could not find setuptools, ignoring command entry points",
+ file=sys.stderr,
+ )
+ return
+
+ for entry in pkg_resources.iter_entry_points(group=group, name=None):
+ paths = entry.load()()
+ if not isinstance(paths, Iterable):
+ print(INVALID_ENTRY_POINT % entry)
+ sys.exit(1)
+
+ for path in paths:
+ path = Path(path)
+ if path.is_file():
+ self.load_commands_from_file(path)
+ elif path.is_dir():
+ self.load_commands_from_directory(path)
+ else:
+ print(f"command provider '{path}' does not exist")
+
+ def define_category(self, name, title, description, priority=50):
+ """Provide a description for a named command category."""
+
+ Registrar.register_category(name, title, description, priority)
+
+ @property
+ def require_conditions(self):
+ return Registrar.require_conditions
+
+ @require_conditions.setter
+ def require_conditions(self, value):
+ Registrar.require_conditions = value
+
+ def run(self, argv, stdin=None, stdout=None, stderr=None):
+ """Runs mach with arguments provided from the command line.
+
+ Returns the integer exit code that should be used. 0 means success. All
+ other values indicate failure.
+ """
+ sentry = NoopErrorReporter()
+
+ # If no encoding is defined, we default to UTF-8 because without this
+ # Python 2.7 will assume the default encoding of ASCII. This will blow
+ # up with UnicodeEncodeError as soon as it encounters a non-ASCII
+ # character in a unicode instance. We simply install a wrapper around
+ # the streams and restore once we have finished.
+ stdin = sys.stdin if stdin is None else stdin
+ stdout = sys.stdout if stdout is None else stdout
+ stderr = sys.stderr if stderr is None else stderr
+
+ orig_stdin = sys.stdin
+ orig_stdout = sys.stdout
+ orig_stderr = sys.stderr
+
+ sys.stdin = stdin
+ sys.stdout = stdout
+ sys.stderr = stderr
+
+ orig_env = dict(os.environ)
+
+ try:
+ # Load settings as early as possible so things in dispatcher.py
+ # can use them.
+ for provider in Registrar.settings_providers:
+ self.settings.register_provider(provider)
+
+ setting_paths_to_pass = [Path(path) for path in self.settings_paths]
+ self.load_settings(setting_paths_to_pass)
+
+ if sys.version_info < (3, 0):
+ if stdin.encoding is None:
+ sys.stdin = codecs.getreader("utf-8")(stdin)
+
+ if stdout.encoding is None:
+ sys.stdout = codecs.getwriter("utf-8")(stdout)
+
+ if stderr.encoding is None:
+ sys.stderr = codecs.getwriter("utf-8")(stderr)
+
+ # Allow invoked processes (which may not have a handle on the
+ # original stdout file descriptor) to know if the original stdout
+ # is a TTY. This provides a mechanism to allow said processes to
+ # enable emitting code codes, for example.
+ if os.isatty(orig_stdout.fileno()):
+ setenv("MACH_STDOUT_ISATTY", "1")
+
+ return self._run(argv)
+ except KeyboardInterrupt:
+ print("mach interrupted by signal or user action. Stopping.")
+ return 1
+
+ except Exception:
+ # _run swallows exceptions in invoked handlers and converts them to
+ # a proper exit code. So, the only scenario where we should get an
+ # exception here is if _run itself raises. If _run raises, that's a
+ # bug in mach (or a loaded command module being silly) and thus
+ # should be reported differently.
+ self._print_error_header(argv, sys.stdout)
+ print(MACH_ERROR_TEMPLATE % "general")
+
+ exc_type, exc_value, exc_tb = sys.exc_info()
+ stack = traceback.extract_tb(exc_tb)
+
+ sentry_event_id = sentry.report_exception(exc_value)
+ self._print_exception(
+ sys.stdout, exc_type, exc_value, stack, sentry_event_id=sentry_event_id
+ )
+
+ return 1
+
+ finally:
+ os.environ.clear()
+ os.environ.update(orig_env)
+
+ sys.stdin = orig_stdin
+ sys.stdout = orig_stdout
+ sys.stderr = orig_stderr
+
+ def _run(self, argv):
+ if self.populate_context_handler:
+ topsrcdir = Path(self.populate_context_handler("topdir"))
+ sentry = register_sentry(argv, self.settings, topsrcdir)
+ else:
+ sentry = NoopErrorReporter()
+
+ context = CommandContext(
+ cwd=self.cwd,
+ settings=self.settings,
+ log_manager=self.log_manager,
+ commands=Registrar,
+ )
+
+ if self.populate_context_handler:
+ context = ContextWrapper(context, self.populate_context_handler)
+
+ parser = self.get_argument_parser(context)
+ context.global_parser = parser
+
+ if not len(argv):
+ # We don't register the usage until here because if it is globally
+ # registered, argparse always prints it. This is not desired when
+ # running with --help.
+ parser.usage = Mach.USAGE
+ parser.print_usage()
+ return 0
+
+ try:
+ args = parser.parse_args(argv)
+ except NoCommandError:
+ print(NO_COMMAND_ERROR)
+ return 1
+ except UnknownCommandError as e:
+ suggestion_message = (
+ SUGGESTED_COMMANDS_MESSAGE % (e.verb, ", ".join(e.suggested_commands))
+ if e.suggested_commands
+ else ""
+ )
+ print(UNKNOWN_COMMAND_ERROR % (e.verb, e.command, suggestion_message))
+ return 1
+ except UnrecognizedArgumentError as e:
+ print(UNRECOGNIZED_ARGUMENT_ERROR % (e.command, " ".join(e.arguments)))
+ return 1
+
+ if not hasattr(args, "mach_handler"):
+ raise MachError("ArgumentParser result missing mach handler info.")
+
+ context.is_interactive = (
+ args.is_interactive
+ and sys.__stdout__.isatty()
+ and sys.__stderr__.isatty()
+ and not os.environ.get("MOZ_AUTOMATION", None)
+ )
+ context.telemetry = create_telemetry_from_environment(self.settings)
+
+ handler = getattr(args, "mach_handler")
+ report_invocation_metrics(context.telemetry, handler.name)
+
+ # Add JSON logging to a file if requested.
+ if args.logfile:
+ self.log_manager.add_json_handler(args.logfile)
+
+ # Up the logging level if requested.
+ log_level = logging.INFO
+ if args.verbose:
+ log_level = logging.DEBUG
+
+ self.log_manager.register_structured_logger(logging.getLogger("mach"))
+
+ write_times = True
+ if (
+ args.log_no_times
+ or "MACH_NO_WRITE_TIMES" in os.environ
+ or "MOZ_AUTOMATION" in os.environ
+ ):
+ write_times = False
+
+ # Always enable terminal logging. The log manager figures out if we are
+ # actually in a TTY or are a pipe and does the right thing.
+ self.log_manager.add_terminal_logging(
+ level=log_level, write_interval=args.log_interval, write_times=write_times
+ )
+
+ if args.settings_file:
+ # Argument parsing has already happened, so settings that apply
+ # to command line handling (e.g alias, defaults) will be ignored.
+ self.load_settings([Path(args.settings_file)])
+
+ try:
+ return Registrar._run_command_handler(
+ handler,
+ context,
+ debug_command=args.debug_command,
+ profile_command=args.profile_command,
+ **vars(args.command_args),
+ )
+ except KeyboardInterrupt as ki:
+ raise ki
+ except FailedCommandError as e:
+ print(e)
+ return e.exit_code
+ except UserError:
+ # We explicitly don't report UserErrors to Sentry.
+ exc_type, exc_value, exc_tb = sys.exc_info()
+ # The first two frames are us and are never used.
+ stack = traceback.extract_tb(exc_tb)[2:]
+ self._print_error_header(argv, sys.stdout)
+ print(USER_ERROR)
+ self._print_exception(sys.stdout, exc_type, exc_value, stack)
+ return 1
+ except Exception:
+ exc_type, exc_value, exc_tb = sys.exc_info()
+ sentry_event_id = sentry.report_exception(exc_value)
+
+ # The first two frames are us and are never used.
+ stack = traceback.extract_tb(exc_tb)[2:]
+
+ # If we have nothing on the stack, the exception was raised as part
+ # of calling the @Command method itself. This likely means a
+ # mismatch between @CommandArgument and arguments to the method.
+ # e.g. there exists a @CommandArgument without the corresponding
+ # argument on the method. We handle that here until the module
+ # loader grows the ability to validate better.
+ if not len(stack):
+ print(COMMAND_ERROR_TEMPLATE % handler.name)
+ self._print_exception(
+ sys.stdout,
+ exc_type,
+ exc_value,
+ traceback.extract_tb(exc_tb),
+ sentry_event_id=sentry_event_id,
+ )
+ return 1
+
+ # Split the frames into those from the module containing the
+ # command and everything else.
+ command_frames = []
+ other_frames = []
+
+ initial_file = stack[0][0]
+
+ for frame in stack:
+ if frame[0] == initial_file:
+ command_frames.append(frame)
+ else:
+ other_frames.append(frame)
+
+ # If the exception was in the module providing the command, it's
+ # likely the bug is in the mach command module, not something else.
+ # If there are other frames, the bug is likely not the mach
+ # command's fault.
+ self._print_error_header(argv, sys.stdout)
+
+ if len(other_frames):
+ print(MODULE_ERROR_TEMPLATE % handler.name)
+ else:
+ print(COMMAND_ERROR_TEMPLATE % handler.name)
+
+ self._print_exception(
+ sys.stdout, exc_type, exc_value, stack, sentry_event_id=sentry_event_id
+ )
+
+ return 1
+
+ def log(self, level, action, params, format_str):
+ """Helper method to record a structured log event."""
+ self.logger.log(level, format_str, extra={"action": action, "params": params})
+
+ def _print_error_header(self, argv, fh):
+ fh.write("Error running mach:\n\n")
+ fh.write(" ")
+ fh.write(repr(argv))
+ fh.write("\n\n")
+
+ def _print_exception(self, fh, exc_type, exc_value, stack, sentry_event_id=None):
+ fh.write(ERROR_FOOTER)
+ fh.write("\n")
+
+ for l in traceback.format_exception_only(exc_type, exc_value):
+ fh.write(l)
+
+ fh.write("\n")
+ for l in traceback.format_list(stack):
+ fh.write(l)
+
+ if not sentry_event_id:
+ return
+
+ fh.write("\nSentry event ID: {}\n".format(sentry_event_id))
+
+ def load_settings(self, paths: List[Path]):
+ """Load the specified settings files.
+
+ If a directory is specified, the following basenames will be
+ searched for in this order:
+
+ machrc, .machrc
+ """
+ valid_names = ("machrc", ".machrc")
+
+ def find_in_dir(base: Path):
+ if base.is_file():
+ return base
+
+ for name in valid_names:
+ path = base / name
+ if path.is_file():
+ return path
+
+ files = map(find_in_dir, paths)
+ files = filter(bool, files)
+
+ self.settings.load_files(list(files))
+
+ def get_argument_parser(self, context):
+ """Returns an argument parser for the command-line interface."""
+
+ parser = ArgumentParser(
+ add_help=False,
+ usage="%(prog)s [global arguments] " "command [command arguments]",
+ )
+
+ # WARNING!!! If you add a global argument here, also add it to the
+ # global argument handling in the top-level `mach` script.
+ # Order is important here as it dictates the order the auto-generated
+ # help messages are printed.
+ global_group = parser.add_argument_group("Global Arguments")
+
+ global_group.add_argument(
+ "-v",
+ "--verbose",
+ dest="verbose",
+ action="store_true",
+ default=False,
+ help="Print verbose output.",
+ )
+ global_group.add_argument(
+ "-l",
+ "--log-file",
+ dest="logfile",
+ metavar="FILENAME",
+ type=argparse.FileType("a"),
+ help="Filename to write log data to.",
+ )
+ global_group.add_argument(
+ "--log-interval",
+ dest="log_interval",
+ action="store_true",
+ default=False,
+ help="Prefix log line with interval from last message rather "
+ "than relative time. Note that this is NOT execution time "
+ "if there are parallel operations.",
+ )
+ global_group.add_argument(
+ "--no-interactive",
+ dest="is_interactive",
+ action="store_false",
+ help="Automatically selects the default option on any "
+ "interactive prompts. If the output is not a terminal, "
+ "then --no-interactive is assumed.",
+ )
+ suppress_log_by_default = False
+ if "INSIDE_EMACS" in os.environ:
+ suppress_log_by_default = True
+ global_group.add_argument(
+ "--log-no-times",
+ dest="log_no_times",
+ action="store_true",
+ default=suppress_log_by_default,
+ help="Do not prefix log lines with times. By default, "
+ "mach will prefix each output line with the time since "
+ "command start.",
+ )
+ global_group.add_argument(
+ "-h",
+ "--help",
+ dest="help",
+ action="store_true",
+ default=False,
+ help="Show this help message.",
+ )
+ global_group.add_argument(
+ "--debug-command",
+ action="store_true",
+ help="Start a Python debugger when command is dispatched.",
+ )
+ global_group.add_argument(
+ "--profile-command",
+ action="store_true",
+ help="Capture a Python profile of the mach process as command is dispatched.",
+ )
+ global_group.add_argument(
+ "--settings",
+ dest="settings_file",
+ metavar="FILENAME",
+ default=None,
+ help="Path to settings file.",
+ )
+
+ # We need to be last because CommandAction swallows all remaining
+ # arguments and argparse parses arguments in the order they were added.
+ parser.add_argument(
+ "command", action=CommandAction, registrar=Registrar, context=context
+ )
+
+ return parser
diff --git a/python/mach/mach/mixin/__init__.py b/python/mach/mach/mixin/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mach/mach/mixin/__init__.py
diff --git a/python/mach/mach/mixin/logging.py b/python/mach/mach/mixin/logging.py
new file mode 100644
index 0000000000..4ba6955a2d
--- /dev/null
+++ b/python/mach/mach/mixin/logging.py
@@ -0,0 +1,52 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import logging
+
+
+class LoggingMixin(object):
+ """Provides functionality to control logging."""
+
+ def populate_logger(self, name=None):
+ """Ensure this class instance has a logger associated with it.
+
+ Users of this mixin that call log() will need to ensure self._logger is
+ a logging.Logger instance before they call log(). This function ensures
+ self._logger is defined by populating it if it isn't.
+ """
+ if hasattr(self, "_logger"):
+ return
+
+ if name is None:
+ name = ".".join([self.__module__, self.__class__.__name__])
+
+ self._logger = logging.getLogger(name)
+
+ def log(self, level, action, params, format_str):
+ """Log a structured log event.
+
+ A structured log event consists of a logging level, a string action, a
+ dictionary of attributes, and a formatting string.
+
+ The logging level is one of the logging.* constants, such as
+ logging.INFO.
+
+ The action string is essentially the enumeration of the event. Each
+ different type of logged event should have a different action.
+
+ The params dict is the metadata constituting the logged event.
+
+ The formatting string is used to convert the structured message back to
+ human-readable format. Conversion back to human-readable form is
+ performed by calling format() on this string, feeding into it the dict
+ of attributes constituting the event.
+
+ Example Usage:
+
+ .. code-block:: python
+
+ self.log(logging.DEBUG, 'login', {'username': 'johndoe'},
+ 'User login: {username}')
+ """
+ self._logger.log(level, format_str, extra={"action": action, "params": params})
diff --git a/python/mach/mach/mixin/process.py b/python/mach/mach/mixin/process.py
new file mode 100644
index 0000000000..d5fd733a17
--- /dev/null
+++ b/python/mach/mach/mixin/process.py
@@ -0,0 +1,217 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This module provides mixins to perform process execution.
+
+import logging
+import os
+import signal
+import subprocess
+import sys
+from pathlib import Path
+from typing import Optional
+
+from mozprocess.processhandler import ProcessHandlerMixin
+
+from .logging import LoggingMixin
+
+# Perform detection of operating system environment. This is used by command
+# execution. We only do this once to save redundancy. Yes, this can fail module
+# loading. That is arguably OK.
+if "SHELL" in os.environ:
+ _current_shell = os.environ["SHELL"]
+elif "MOZILLABUILD" in os.environ:
+ mozillabuild = os.environ["MOZILLABUILD"]
+ if (Path(mozillabuild) / "msys2").exists():
+ _current_shell = mozillabuild + "/msys2/usr/bin/sh.exe"
+ else:
+ _current_shell = mozillabuild + "/msys/bin/sh.exe"
+elif "COMSPEC" in os.environ:
+ _current_shell = os.environ["COMSPEC"]
+elif sys.platform != "win32":
+ # Fall back to a standard shell.
+ _current_shell = "/bin/sh"
+else:
+ raise Exception("Could not detect environment shell!")
+
+_in_msys = False
+
+if (
+ os.environ.get("MSYSTEM", None) in ("MINGW32", "MINGW64")
+ or "MOZILLABUILD" in os.environ
+):
+ _in_msys = True
+
+ if not _current_shell.lower().endswith(".exe"):
+ _current_shell += ".exe"
+
+
+class LineHandlingEarlyReturn(Exception):
+ pass
+
+
+class ProcessExecutionMixin(LoggingMixin):
+ """Mix-in that provides process execution functionality."""
+
+ def run_process(
+ self,
+ args=None,
+ cwd: Optional[str] = None,
+ append_env=None,
+ explicit_env=None,
+ log_name=None,
+ log_level=logging.INFO,
+ line_handler=None,
+ require_unix_environment=False,
+ ensure_exit_code=0,
+ ignore_children=False,
+ pass_thru=False,
+ python_unbuffered=True,
+ ):
+ """Runs a single process to completion.
+
+ Takes a list of arguments to run where the first item is the
+ executable. Runs the command in the specified directory and
+ with optional environment variables.
+
+ append_env -- Dict of environment variables to append to the current
+ set of environment variables.
+ explicit_env -- Dict of environment variables to set for the new
+ process. Any existing environment variables will be ignored.
+
+ require_unix_environment if True will ensure the command is executed
+ within a UNIX environment. Basically, if we are on Windows, it will
+ execute the command via an appropriate UNIX-like shell.
+
+ ignore_children is proxied to mozprocess's ignore_children.
+
+ ensure_exit_code is used to ensure the exit code of a process matches
+ what is expected. If it is an integer, we raise an Exception if the
+ exit code does not match this value. If it is True, we ensure the exit
+ code is 0. If it is False, we don't perform any exit code validation.
+
+ pass_thru is a special execution mode where the child process inherits
+ this process's standard file handles (stdin, stdout, stderr) as well as
+ additional file descriptors. It should be used for interactive processes
+ where buffering from mozprocess could be an issue. pass_thru does not
+ use mozprocess. Therefore, arguments like log_name, line_handler,
+ and ignore_children have no effect.
+
+ When python_unbuffered is set, the PYTHONUNBUFFERED environment variable
+ will be set in the child process. This is normally advantageous (see bug
+ 1627873) but is detrimental in certain circumstances (specifically, we
+ have seen issues when using pass_thru mode to open a Python subshell, as
+ in bug 1628838). This variable should be set to False to avoid bustage
+ in those circumstances.
+ """
+ args = self._normalize_command(args, require_unix_environment)
+
+ self.log(logging.INFO, "new_process", {"args": " ".join(args)}, "{args}")
+
+ def handleLine(line):
+ # Converts str to unicode on Python 2 and bytes to str on Python 3.
+ if isinstance(line, bytes):
+ line = line.decode(sys.stdout.encoding or "utf-8", "replace")
+
+ if line_handler:
+ try:
+ line_handler(line)
+ except LineHandlingEarlyReturn:
+ return
+
+ if line.startswith("BUILDTASK") or not log_name:
+ return
+
+ self.log(log_level, log_name, {"line": line.rstrip()}, "{line}")
+
+ use_env = {}
+ if explicit_env:
+ use_env = explicit_env
+ else:
+ use_env.update(os.environ)
+
+ if append_env:
+ use_env.update(append_env)
+
+ if python_unbuffered:
+ use_env["PYTHONUNBUFFERED"] = "1"
+
+ self.log(logging.DEBUG, "process", {"env": str(use_env)}, "Environment: {env}")
+
+ if pass_thru:
+ proc = subprocess.Popen(args, cwd=cwd, env=use_env, close_fds=False)
+ status = None
+ # Leave it to the subprocess to handle Ctrl+C. If it terminates as
+ # a result of Ctrl+C, proc.wait() will return a status code, and,
+ # we get out of the loop. If it doesn't, like e.g. gdb, we continue
+ # waiting.
+ while status is None:
+ try:
+ status = proc.wait()
+ except KeyboardInterrupt:
+ pass
+ else:
+ p = ProcessHandlerMixin(
+ args,
+ cwd=cwd,
+ env=use_env,
+ processOutputLine=[handleLine],
+ universal_newlines=True,
+ ignore_children=ignore_children,
+ )
+ p.run()
+ p.processOutput()
+ status = None
+ sig = None
+ while status is None:
+ try:
+ if sig is None:
+ status = p.wait()
+ else:
+ status = p.kill(sig=sig)
+ except KeyboardInterrupt:
+ if sig is None:
+ sig = signal.SIGINT
+ elif sig == signal.SIGINT:
+ # If we've already tried SIGINT, escalate.
+ sig = signal.SIGKILL
+
+ if ensure_exit_code is False:
+ return status
+
+ if ensure_exit_code is True:
+ ensure_exit_code = 0
+
+ if status != ensure_exit_code:
+ raise Exception(
+ "Process executed with non-0 exit code %d: %s" % (status, args)
+ )
+
+ return status
+
+ def _normalize_command(self, args, require_unix_environment):
+ """Adjust command arguments to run in the necessary environment.
+
+ This exists mainly to facilitate execution of programs requiring a *NIX
+ shell when running on Windows. The caller specifies whether a shell
+ environment is required. If it is and we are running on Windows but
+ aren't running in the UNIX-like msys environment, then we rewrite the
+ command to execute via a shell.
+ """
+ assert isinstance(args, list) and len(args)
+
+ if not require_unix_environment or not _in_msys:
+ return args
+
+ # Always munge Windows-style into Unix style for the command.
+ prog = args[0].replace("\\", "/")
+
+ # PyMake removes the C: prefix. But, things seem to work here
+ # without it. Not sure what that's about.
+
+ # We run everything through the msys shell. We need to use
+ # '-c' and pass all the arguments as one argument because that is
+ # how sh works.
+ cline = subprocess.list2cmdline([prog] + args[1:])
+ return [_current_shell, "-c", cline]
diff --git a/python/mach/mach/python_lockfile.py b/python/mach/mach/python_lockfile.py
new file mode 100644
index 0000000000..78f201d4ed
--- /dev/null
+++ b/python/mach/mach/python_lockfile.py
@@ -0,0 +1,79 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import shutil
+import subprocess
+import sys
+from pathlib import Path
+
+import toml
+from packaging.requirements import Requirement
+
+
+class PoetryLockfiles:
+ def __init__(
+ self,
+ poetry_lockfile: Path,
+ pip_lockfile: Path,
+ ):
+ self.poetry_lockfile = poetry_lockfile
+ self.pip_lockfile = pip_lockfile
+
+
+class PoetryHandle:
+ def __init__(self, work_dir: Path):
+ self._work_dir = work_dir
+ self._dependencies = {}
+
+ def add_requirement(self, requirement: Requirement):
+ self._dependencies[requirement.name] = str(requirement.specifier)
+
+ def add_requirements_in_file(self, requirements_in: Path):
+ with open(requirements_in) as requirements_in:
+ for line in requirements_in.readlines():
+ if line.startswith("#"):
+ continue
+
+ req = Requirement(line)
+ self.add_requirement(req)
+
+ def reuse_existing_lockfile(self, lockfile_path: Path):
+ """Make minimal number of changes to the lockfile to satisfy new requirements"""
+ shutil.copy(str(lockfile_path), str(self._work_dir / "poetry.lock"))
+
+ def generate_lockfiles(self, do_update):
+ """Generate pip-style lockfiles that satisfy provided requirements
+
+ One lockfile will be made for all mandatory requirements, and then an extra,
+ compatible lockfile will be created for each optional requirement.
+
+ Args:
+ do_update: if True, then implicitly upgrade the versions of transitive
+ dependencies
+ """
+
+ poetry_config = {
+ "name": "poetry-test",
+ "description": "",
+ "version": "0",
+ "authors": [],
+ "dependencies": {"python": "^3.7"},
+ }
+ poetry_config["dependencies"].update(self._dependencies)
+
+ pyproject = {"tool": {"poetry": poetry_config}}
+ with open(self._work_dir / "pyproject.toml", "w") as pyproject_file:
+ toml.dump(pyproject, pyproject_file)
+
+ self._run_poetry(["lock"] + (["--no-update"] if not do_update else []))
+ self._run_poetry(["export", "-o", "requirements.txt"])
+
+ return PoetryLockfiles(
+ self._work_dir / "poetry.lock",
+ self._work_dir / "requirements.txt",
+ )
+
+ def _run_poetry(self, args):
+ subprocess.check_call(
+ [sys.executable, "-m", "poetry"] + args, cwd=self._work_dir
+ )
diff --git a/python/mach/mach/registrar.py b/python/mach/mach/registrar.py
new file mode 100644
index 0000000000..75481596f4
--- /dev/null
+++ b/python/mach/mach/registrar.py
@@ -0,0 +1,186 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import time
+from cProfile import Profile
+from pathlib import Path
+
+import six
+
+from .base import MachError
+
+INVALID_COMMAND_CONTEXT = r"""
+It looks like you tried to run a mach command from an invalid context. The %s
+command failed to meet the following conditions: %s
+
+Run |mach help| to show a list of all commands available to the current context.
+""".lstrip()
+
+
+class MachRegistrar(object):
+ """Container for mach command and config providers."""
+
+ def __init__(self):
+ self.command_handlers = {}
+ self.commands_by_category = {}
+ self.settings_providers = set()
+ self.categories = {}
+ self.require_conditions = False
+ self.command_depth = 0
+
+ def register_command_handler(self, handler):
+ name = handler.name
+
+ if not handler.category:
+ raise MachError(
+ "Cannot register a mach command without a " "category: %s" % name
+ )
+
+ if handler.category not in self.categories:
+ raise MachError(
+ "Cannot register a command to an undefined "
+ "category: %s -> %s" % (name, handler.category)
+ )
+
+ self.command_handlers[name] = handler
+ self.commands_by_category[handler.category].add(name)
+
+ def register_settings_provider(self, cls):
+ self.settings_providers.add(cls)
+
+ def register_category(self, name, title, description, priority=50):
+ self.categories[name] = (title, description, priority)
+ self.commands_by_category[name] = set()
+
+ @classmethod
+ def _condition_failed_message(cls, name, conditions):
+ msg = ["\n"]
+ for c in conditions:
+ part = [" %s" % getattr(c, "__name__", c)]
+ if c.__doc__ is not None:
+ part.append(c.__doc__)
+ msg.append(" - ".join(part))
+ return INVALID_COMMAND_CONTEXT % (name, "\n".join(msg))
+
+ @classmethod
+ def _instance(_, handler, context, **kwargs):
+ if context is None:
+ raise ValueError("Expected a non-None context.")
+
+ prerun = getattr(context, "pre_dispatch_handler", None)
+ if prerun:
+ prerun(context, handler, args=kwargs)
+
+ context.handler = handler
+ return handler.create_instance(context, handler.virtualenv_name)
+
+ @classmethod
+ def _fail_conditions(_, handler, instance):
+ fail_conditions = []
+ if handler.conditions:
+ for c in handler.conditions:
+ if not c(instance):
+ fail_conditions.append(c)
+
+ return fail_conditions
+
+ def _run_command_handler(
+ self, handler, context, debug_command=False, profile_command=False, **kwargs
+ ):
+ instance = MachRegistrar._instance(handler, context, **kwargs)
+ fail_conditions = MachRegistrar._fail_conditions(handler, instance)
+ if fail_conditions:
+ print(
+ MachRegistrar._condition_failed_message(handler.name, fail_conditions)
+ )
+ return 1
+
+ self.command_depth += 1
+ fn = handler.func
+ if handler.virtualenv_name:
+ instance.activate_virtualenv()
+
+ profile = None
+ if profile_command:
+ profile = Profile()
+ profile.enable()
+
+ start_time = time.monotonic()
+
+ if debug_command:
+ import pdb
+
+ result = pdb.runcall(fn, instance, **kwargs)
+ else:
+ result = fn(instance, **kwargs)
+
+ end_time = time.monotonic()
+
+ if profile_command:
+ profile.disable()
+ profile_file = (
+ Path(context.topdir) / f"mach_profile_{handler.name}.cProfile"
+ )
+ profile.dump_stats(profile_file)
+ print(
+ f'Mach command profile created at "{profile_file}". To visualize, use '
+ f"snakeviz:"
+ )
+ print("python3 -m pip install snakeviz")
+ print(f"python3 -m snakeviz {profile_file.name}")
+
+ result = result or 0
+ assert isinstance(result, six.integer_types)
+
+ if not debug_command:
+ postrun = getattr(context, "post_dispatch_handler", None)
+ if postrun:
+ postrun(
+ context,
+ handler,
+ instance,
+ not result,
+ start_time,
+ end_time,
+ self.command_depth,
+ args=kwargs,
+ )
+ self.command_depth -= 1
+
+ return result
+
+ def dispatch(self, name, context, argv=None, subcommand=None, **kwargs):
+ """Dispatch/run a command.
+
+ Commands can use this to call other commands.
+ """
+ handler = self.command_handlers[name]
+
+ if subcommand:
+ handler = handler.subcommand_handlers[subcommand]
+
+ if handler.parser:
+ parser = handler.parser
+
+ # save and restore existing defaults so **kwargs don't persist across
+ # subsequent invocations of Registrar.dispatch()
+ old_defaults = parser._defaults.copy()
+ parser.set_defaults(**kwargs)
+ kwargs, unknown = parser.parse_known_args(argv or [])
+ kwargs = vars(kwargs)
+ parser._defaults = old_defaults
+
+ if unknown:
+ if subcommand:
+ name = "{} {}".format(name, subcommand)
+ parser.error(
+ "unrecognized arguments for {}: {}".format(
+ name, ", ".join(["'{}'".format(arg) for arg in unknown])
+ )
+ )
+
+ return self._run_command_handler(handler, context, **kwargs)
+
+
+Registrar = MachRegistrar()
diff --git a/python/mach/mach/requirements.py b/python/mach/mach/requirements.py
new file mode 100644
index 0000000000..d5141e23f6
--- /dev/null
+++ b/python/mach/mach/requirements.py
@@ -0,0 +1,183 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import os
+from pathlib import Path
+
+from packaging.requirements import Requirement
+
+THUNDERBIRD_PYPI_ERROR = """
+Thunderbird requirements definitions cannot include PyPI packages.
+""".strip()
+
+
+class PthSpecifier:
+ def __init__(self, path: str):
+ self.path = path
+
+
+class PypiSpecifier:
+ def __init__(self, requirement):
+ self.requirement = requirement
+
+
+class PypiOptionalSpecifier(PypiSpecifier):
+ def __init__(self, repercussion, requirement):
+ super().__init__(requirement)
+ self.repercussion = repercussion
+
+
+class MachEnvRequirements:
+ """Requirements associated with a "site dependency manifest", as
+ defined in "python/sites/".
+
+ Represents the dependencies of a site. The source files consist
+ of colon-delimited fields. The first field
+ specifies the action. The remaining fields are arguments to that
+ action. The following actions are supported:
+
+ pth -- Adds the path given as argument to "mach.pth" under
+ the virtualenv's site packages directory.
+
+ pypi -- Fetch the package, plus dependencies, from PyPI.
+
+ pypi-optional -- Attempt to install the package and dependencies from PyPI.
+ Continue using the site, even if the package could not be installed.
+
+ packages.txt -- Denotes that the specified path is a child manifest. It
+ will be read and processed as if its contents were concatenated
+ into the manifest being read.
+
+ thunderbird-packages.txt -- Denotes a Thunderbird child manifest.
+ Thunderbird child manifests are only activated when working on Thunderbird,
+ and they can cannot have "pypi" or "pypi-optional" entries.
+ """
+
+ def __init__(self):
+ self.requirements_paths = []
+ self.pth_requirements = []
+ self.pypi_requirements = []
+ self.pypi_optional_requirements = []
+ self.vendored_requirements = []
+
+ def pths_as_absolute(self, topsrcdir: str):
+ return [
+ os.path.normcase(Path(topsrcdir) / pth.path)
+ for pth in (self.pth_requirements + self.vendored_requirements)
+ ]
+
+ @classmethod
+ def from_requirements_definition(
+ cls,
+ topsrcdir: str,
+ is_thunderbird,
+ only_strict_requirements,
+ requirements_definition,
+ ):
+ requirements = cls()
+ _parse_mach_env_requirements(
+ requirements,
+ Path(requirements_definition),
+ Path(topsrcdir),
+ is_thunderbird,
+ only_strict_requirements,
+ )
+ return requirements
+
+
+def _parse_mach_env_requirements(
+ requirements_output,
+ root_requirements_path: Path,
+ topsrcdir: Path,
+ is_thunderbird,
+ only_strict_requirements,
+):
+ def _parse_requirements_line(
+ current_requirements_path: Path, line, line_number, is_thunderbird_packages_txt
+ ):
+ line = line.strip()
+ if not line or line.startswith("#"):
+ return
+
+ action, params = line.rstrip().split(":", maxsplit=1)
+ if action == "pth":
+ path = topsrcdir / params
+ if not path.exists():
+ # In sparse checkouts, not all paths will be populated.
+ return
+
+ requirements_output.pth_requirements.append(PthSpecifier(params))
+ elif action == "vendored":
+ requirements_output.vendored_requirements.append(PthSpecifier(params))
+ elif action == "packages.txt":
+ _parse_requirements_definition_file(
+ topsrcdir / params,
+ is_thunderbird_packages_txt,
+ )
+ elif action == "pypi":
+ if is_thunderbird_packages_txt:
+ raise Exception(THUNDERBIRD_PYPI_ERROR)
+
+ requirements_output.pypi_requirements.append(
+ PypiSpecifier(
+ _parse_package_specifier(params, only_strict_requirements)
+ )
+ )
+ elif action == "pypi-optional":
+ if is_thunderbird_packages_txt:
+ raise Exception(THUNDERBIRD_PYPI_ERROR)
+
+ if len(params.split(":", maxsplit=1)) != 2:
+ raise Exception(
+ "Expected pypi-optional package to have a repercussion "
+ 'description in the format "package:fallback explanation", '
+ 'found "{}"'.format(params)
+ )
+ raw_requirement, repercussion = params.split(":")
+ requirements_output.pypi_optional_requirements.append(
+ PypiOptionalSpecifier(
+ repercussion,
+ _parse_package_specifier(raw_requirement, only_strict_requirements),
+ )
+ )
+ elif action == "thunderbird-packages.txt":
+ if is_thunderbird:
+ _parse_requirements_definition_file(
+ topsrcdir / params, is_thunderbird_packages_txt=True
+ )
+ else:
+ raise Exception("Unknown requirements definition action: %s" % action)
+
+ def _parse_requirements_definition_file(
+ requirements_path: Path, is_thunderbird_packages_txt
+ ):
+ """Parse requirements file into list of requirements"""
+ if not requirements_path.is_file():
+ raise Exception(f'Missing requirements file at "{requirements_path}"')
+
+ requirements_output.requirements_paths.append(str(requirements_path))
+
+ with open(requirements_path, "r") as requirements_file:
+ lines = [line for line in requirements_file]
+
+ for number, line in enumerate(lines, start=1):
+ _parse_requirements_line(
+ requirements_path, line, number, is_thunderbird_packages_txt
+ )
+
+ _parse_requirements_definition_file(root_requirements_path, False)
+
+
+class UnexpectedFlexibleRequirementException(Exception):
+ def __init__(self, raw_requirement):
+ self.raw_requirement = raw_requirement
+
+
+def _parse_package_specifier(raw_requirement, only_strict_requirements):
+ requirement = Requirement(raw_requirement)
+
+ if only_strict_requirements and [
+ s for s in requirement.specifier if s.operator != "=="
+ ]:
+ raise UnexpectedFlexibleRequirementException(raw_requirement)
+ return requirement
diff --git a/python/mach/mach/sentry.py b/python/mach/mach/sentry.py
new file mode 100644
index 0000000000..5008f8a40c
--- /dev/null
+++ b/python/mach/mach/sentry.py
@@ -0,0 +1,222 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import abc
+import re
+from pathlib import Path
+from threading import Thread
+
+import sentry_sdk
+from mozversioncontrol import (
+ InvalidRepoPath,
+ MissingUpstreamRepo,
+ MissingVCSTool,
+ get_repository_object,
+)
+from six import string_types
+
+from mach.telemetry import is_telemetry_enabled
+from mach.util import get_state_dir
+
+# https://sentry.io/organizations/mozilla/projects/mach/
+_SENTRY_DSN = (
+ "https://5cfe351fb3a24e8d82c751252b48722b@o1069899.ingest.sentry.io/6250014"
+)
+
+
+class ErrorReporter(object):
+ @abc.abstractmethod
+ def report_exception(self, exception):
+ """Report the exception to remote error-tracking software."""
+
+
+class SentryErrorReporter(ErrorReporter):
+ """Reports errors using Sentry."""
+
+ def report_exception(self, exception):
+ return sentry_sdk.capture_exception(exception)
+
+
+class NoopErrorReporter(ErrorReporter):
+ """Drops errors instead of reporting them.
+
+ This is useful in cases where error-reporting is specifically disabled, such as
+ when telemetry hasn't been allowed.
+ """
+
+ def report_exception(self, exception):
+ return None
+
+
+def register_sentry(argv, settings, topsrcdir: Path):
+ if not is_telemetry_enabled(settings):
+ return NoopErrorReporter()
+
+ global _is_unmodified_mach_core_thread
+ _is_unmodified_mach_core_thread = Thread(
+ target=_is_unmodified_mach_core,
+ args=[topsrcdir],
+ daemon=True,
+ )
+ _is_unmodified_mach_core_thread.start()
+
+ sentry_sdk.init(
+ _SENTRY_DSN, before_send=lambda event, _: _process_event(event, topsrcdir)
+ )
+ sentry_sdk.add_breadcrumb(message="./mach {}".format(" ".join(argv)))
+ return SentryErrorReporter()
+
+
+def _process_event(sentry_event, topsrcdir: Path):
+ # Returning nothing causes the event to be dropped:
+ # https://docs.sentry.io/platforms/python/configuration/filtering/#using-beforesend
+ repo = _get_repository_object(topsrcdir)
+ if repo is None:
+ # We don't know the repo state, so we don't know if mach files are
+ # unmodified.
+ return
+
+ base_ref = repo.base_ref_as_hg()
+ if not base_ref:
+ # If we don't know which revision this exception is attached to, then it's
+ # not worth sending
+ return
+
+ _is_unmodified_mach_core_thread.join()
+ if not _is_unmodified_mach_core_result:
+ return
+
+ for map_fn in (_settle_mach_module_id, _patch_absolute_paths, _delete_server_name):
+ sentry_event = map_fn(sentry_event, topsrcdir)
+
+ sentry_event["release"] = "hg-rev-{}".format(base_ref)
+ return sentry_event
+
+
+def _settle_mach_module_id(sentry_event, _):
+ # Sentry groups issues according to the stack frames and their associated
+ # "module" properties. However, one of the modules is being reported
+ # like "mach.commands.26a828ef5164403eaff4305ab4cb0fab" (with a generated id).
+ # This function replaces that generated id with the static string "<generated>"
+ # so that grouping behaves as expected
+
+ stacktrace_frames = sentry_event["exception"]["values"][0]["stacktrace"]["frames"]
+ for frame in stacktrace_frames:
+ module = frame.get("module")
+ if not module:
+ continue
+
+ module = re.sub(
+ "mach\\.commands\\.[a-f0-9]{32}", "mach.commands.<generated>", module
+ )
+ frame["module"] = module
+ return sentry_event
+
+
+def _patch_absolute_paths(sentry_event, topsrcdir: Path):
+ # As discussed here (https://bugzilla.mozilla.org/show_bug.cgi?id=1636251#c28),
+ # we remove usernames from file names with a best-effort basis. The most likely
+ # place for usernames to manifest in Sentry information is within absolute paths,
+ # such as: "/home/mitch/dev/firefox/mach"
+ # We replace the state_dir, obj_dir, src_dir with "<...>" placeholders.
+ # Note that we also do a blanket find-and-replace of the user's name with "<user>",
+ # which may have ill effects if the user's name is, by happenstance, a substring
+ # of some other value within the Sentry event.
+ def recursive_patch(value, needle, replacement):
+ if isinstance(value, list):
+ return [recursive_patch(v, needle, replacement) for v in value]
+ elif isinstance(value, dict):
+ for key in list(value.keys()):
+ next_value = value.pop(key)
+ key = needle.sub(replacement, key)
+ value[key] = recursive_patch(next_value, needle, replacement)
+ return value
+ elif isinstance(value, string_types):
+ return needle.sub(replacement, value)
+ else:
+ return value
+
+ for (target_path, replacement) in (
+ (get_state_dir(), "<statedir>"),
+ (str(topsrcdir), "<topsrcdir>"),
+ (str(Path.home()), "~"),
+ ):
+ # Sentry converts "vars" to their "representations". When paths are in local
+ # variables on Windows, "C:\Users\MozillaUser\Desktop" becomes
+ # "'C:\\Users\\MozillaUser\\Desktop'". To still catch this case, we "repr"
+ # the home directory and scrub the beginning and end quotes, then
+ # find-and-replace on that.
+ repr_path = repr(target_path)[1:-1]
+
+ for target in (target_path, repr_path):
+ # Paths in the Sentry event aren't consistent:
+ # * On *nix, they're mostly forward slashes.
+ # * On *nix, not all absolute paths start with a leading forward slash.
+ # * On Windows, they're mostly backslashes.
+ # * On Windows, `.extra."sys.argv"` uses forward slashes.
+ # * The Python variables in-scope captured by the Sentry report may be
+ # inconsistent, even for a single path. For example, on
+ # Windows, Mach calculates the state_dir as "C:\Users\<user>/.mozbuild".
+
+ # Handle the case where not all absolute paths start with a leading
+ # forward slash: make the initial slash optional in the search string.
+ if target.startswith("/"):
+ target = "/?" + target[1:]
+
+ # Handle all possible slash variants: our search string should match
+ # both forward slashes and backslashes. This is done by dynamically
+ # replacing each "/" and "\" with the regex "[\/\\]" (match both).
+ slash_regex = re.compile(r"[\/\\]")
+ # The regex module parses string backslash escapes before compiling the
+ # regex, so we need to add more backslashes:
+ # "[\\/\\\\]" => [\/\\] => match "/" and "\"
+ target = slash_regex.sub(r"[\\/\\\\]", target)
+
+ # Compile the regex and patch the event.
+ needle_regex = re.compile(target, re.IGNORECASE)
+ sentry_event = recursive_patch(sentry_event, needle_regex, replacement)
+ return sentry_event
+
+
+def _delete_server_name(sentry_event, _):
+ sentry_event.pop("server_name")
+ return sentry_event
+
+
+def _get_repository_object(topsrcdir: Path):
+ try:
+ return get_repository_object(str(topsrcdir))
+ except (InvalidRepoPath, MissingVCSTool):
+ return None
+
+
+def _is_unmodified_mach_core(topsrcdir: Path):
+ """True if mach is unmodified compared to the public tree.
+
+ To avoid submitting Sentry events for errors caused by user's
+ local changes, we attempt to detect if mach (or code affecting mach)
+ has been modified in the user's local state:
+ * In a revision off of a "ancestor to central" revision, or:
+ * In the working, uncommitted state.
+
+ If "$topsrcdir/mach" and "*.py" haven't been touched, then we can be
+ pretty confident that the Mach behaviour that caused the exception
+ also exists in the public tree.
+ """
+ global _is_unmodified_mach_core_result
+
+ repo = _get_repository_object(topsrcdir)
+ try:
+ files = set(repo.get_outgoing_files()) | set(repo.get_changed_files())
+ _is_unmodified_mach_core_result = not any(
+ [file for file in files if file == "mach" or file.endswith(".py")]
+ )
+ except MissingUpstreamRepo:
+ # If we don't know the upstream state, we don't know if the mach files
+ # have been unmodified.
+ _is_unmodified_mach_core_result = False
+
+
+_is_unmodified_mach_core_result = None
+_is_unmodified_mach_core_thread = None
diff --git a/python/mach/mach/site.py b/python/mach/mach/site.py
new file mode 100644
index 0000000000..58c1eac3fa
--- /dev/null
+++ b/python/mach/mach/site.py
@@ -0,0 +1,1405 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This file contains code for managing the Python import scope for Mach. This
+# generally involves populating a Python virtualenv.
+
+import ast
+import enum
+import functools
+import json
+import os
+import platform
+import shutil
+import site
+import subprocess
+import sys
+import sysconfig
+import tempfile
+from contextlib import contextmanager
+from pathlib import Path
+from typing import Callable, Optional
+
+from mach.requirements import (
+ MachEnvRequirements,
+ UnexpectedFlexibleRequirementException,
+)
+
+PTH_FILENAME = "mach.pth"
+METADATA_FILENAME = "moz_virtualenv_metadata.json"
+# The following virtualenvs *may* be used in a context where they aren't allowed to
+# install pip packages over the network. In such a case, they must access unvendored
+# python packages via the system environment.
+PIP_NETWORK_INSTALL_RESTRICTED_VIRTUALENVS = ("mach", "build", "common")
+
+_is_windows = sys.platform == "cygwin" or (sys.platform == "win32" and os.sep == "\\")
+
+
+class VenvModuleNotFoundException(Exception):
+ def __init__(self):
+ msg = (
+ 'Mach was unable to find the "venv" module, which is needed '
+ "to create virtual environments in Python. You may need to "
+ "install it manually using the package manager for your system."
+ )
+ super(Exception, self).__init__(msg)
+
+
+class VirtualenvOutOfDateException(Exception):
+ pass
+
+
+class MozSiteMetadataOutOfDateError(Exception):
+ pass
+
+
+class InstallPipRequirementsException(Exception):
+ pass
+
+
+class SiteUpToDateResult:
+ def __init__(self, is_up_to_date, reason=None):
+ self.is_up_to_date = is_up_to_date
+ self.reason = reason
+
+
+class SitePackagesSource(enum.Enum):
+ NONE = "none"
+ SYSTEM = "system"
+ VENV = "pip"
+
+ @classmethod
+ def for_mach(cls):
+ source = os.environ.get("MACH_BUILD_PYTHON_NATIVE_PACKAGE_SOURCE", "").lower()
+ if source == "system":
+ source = SitePackagesSource.SYSTEM
+ elif source == "none":
+ source = SitePackagesSource.NONE
+ elif source == "pip":
+ source = SitePackagesSource.VENV
+ elif source:
+ raise Exception(
+ "Unexpected MACH_BUILD_PYTHON_NATIVE_PACKAGE_SOURCE value, expected one "
+ 'of "system", "pip", "none", or to not be set'
+ )
+
+ mach_use_system_python = bool(os.environ.get("MACH_USE_SYSTEM_PYTHON"))
+ if source:
+ if mach_use_system_python:
+ raise Exception(
+ "The MACH_BUILD_PYTHON_NATIVE_PACKAGE_SOURCE environment variable is "
+ "set, so the MACH_USE_SYSTEM_PYTHON variable is redundant and "
+ "should be unset."
+ )
+ return source
+
+ # Only print this warning once for the Mach site, so we don't spam it every
+ # time a site handle is created.
+ if mach_use_system_python:
+ print(
+ 'The "MACH_USE_SYSTEM_PYTHON" environment variable is deprecated, '
+ "please unset it or replace it with either "
+ '"MACH_BUILD_PYTHON_NATIVE_PACKAGE_SOURCE=system" or '
+ '"MACH_BUILD_PYTHON_NATIVE_PACKAGE_SOURCE=none"'
+ )
+
+ return (
+ SitePackagesSource.NONE
+ if (mach_use_system_python or os.environ.get("MOZ_AUTOMATION"))
+ else SitePackagesSource.VENV
+ )
+
+
+class MozSiteMetadata:
+ """Details about a Moz-managed python site
+
+ When a Moz-managed site is active, its associated metadata is available
+ at "MozSiteMetadata.current".
+
+ Sites that have associated virtualenvs (so, those that aren't strictly leaning on
+ the external python packages) will have their metadata written to
+ <prefix>/moz_virtualenv_metadata.json.
+ """
+
+ # Used to track which which virtualenv has been activated in-process.
+ current: Optional["MozSiteMetadata"] = None
+
+ def __init__(
+ self,
+ hex_version: int,
+ site_name: str,
+ mach_site_packages_source: SitePackagesSource,
+ original_python: "ExternalPythonSite",
+ prefix: str,
+ ):
+ """
+ Args:
+ hex_version: The python version number from sys.hexversion
+ site_name: The name of the site this metadata is associated with
+ site_packages_source: Where this site imports its
+ pip-installed dependencies from
+ mach_site_packages_source: Where the Mach site imports
+ its pip-installed dependencies from
+ original_python: The external Python site that was
+ used to invoke Mach. Usually the system Python, such as /usr/bin/python3
+ prefix: The same value as "sys.prefix" is when running within the
+ associated Python site. The same thing as the "virtualenv root".
+ """
+
+ self.hex_version = hex_version
+ self.site_name = site_name
+ self.mach_site_packages_source = mach_site_packages_source
+ # original_python is needed for commands that tweak the system, such
+ # as "./mach install-moz-phab".
+ self.original_python = original_python
+ self.prefix = prefix
+
+ def write(self, is_finalized):
+ raw = {
+ "hex_version": self.hex_version,
+ "virtualenv_name": self.site_name,
+ "mach_site_packages_source": self.mach_site_packages_source.name,
+ "original_python_executable": self.original_python.python_path,
+ "is_finalized": is_finalized,
+ }
+ with open(os.path.join(self.prefix, METADATA_FILENAME), "w") as file:
+ json.dump(raw, file)
+
+ def __eq__(self, other):
+ return (
+ type(self) == type(other)
+ and self.hex_version == other.hex_version
+ and self.site_name == other.site_name
+ and self.mach_site_packages_source == other.mach_site_packages_source
+ # On Windows, execution environment can lead to different cases. Normalize.
+ and Path(self.original_python.python_path)
+ == Path(other.original_python.python_path)
+ )
+
+ @classmethod
+ def from_runtime(cls):
+ if cls.current:
+ return cls.current
+
+ return cls.from_path(sys.prefix)
+
+ @classmethod
+ def from_path(cls, prefix):
+ metadata_path = os.path.join(prefix, METADATA_FILENAME)
+ out_of_date_exception = MozSiteMetadataOutOfDateError(
+ f'The virtualenv at "{prefix}" is out-of-date.'
+ )
+ try:
+ with open(metadata_path, "r") as file:
+ raw = json.load(file)
+
+ if not raw.get("is_finalized", False):
+ raise out_of_date_exception
+
+ return cls(
+ raw["hex_version"],
+ raw["virtualenv_name"],
+ SitePackagesSource[raw["mach_site_packages_source"]],
+ ExternalPythonSite(raw["original_python_executable"]),
+ metadata_path,
+ )
+ except FileNotFoundError:
+ return None
+ except KeyError:
+ raise out_of_date_exception
+
+ @contextmanager
+ def update_current_site(self, executable):
+ """Updates necessary global state when a site is activated
+
+ Due to needing to fetch some state before the actual activation happens, this
+ is represented as a context manager and should be used as follows:
+
+ with metadata.update_current_site(executable):
+ # Perform the actual implementation of changing the site, whether that is
+ # by exec-ing "activate_this.py" in a virtualenv, modifying the sys.path
+ # directly, or some other means
+ ...
+ """
+
+ try:
+ import pkg_resources
+ except ModuleNotFoundError:
+ pkg_resources = None
+
+ yield
+ MozSiteMetadata.current = self
+
+ sys.executable = executable
+
+ if pkg_resources:
+ # Rebuild the working_set based on the new sys.path.
+ pkg_resources._initialize_master_working_set()
+
+
+class MachSiteManager:
+ """Represents the activate-able "import scope" Mach needs
+
+ Whether running independently, using the system packages, or automatically managing
+ dependencies with "pip install", this class provides an easy handle to verify
+ that the "site" is up-to-date (whether than means that system packages don't
+ collide with vendored packages, or that the on-disk virtualenv needs rebuilding).
+
+ Note that, this is a *virtual* site: an on-disk Python virtualenv
+ is only created if there will be "pip installs" into the Mach site.
+ """
+
+ def __init__(
+ self,
+ topsrcdir: str,
+ virtualenv_root: Optional[str],
+ requirements: MachEnvRequirements,
+ original_python: "ExternalPythonSite",
+ site_packages_source: SitePackagesSource,
+ ):
+ """
+ Args:
+ topsrcdir: The path to the Firefox repo
+ virtualenv_root: The path to the the associated Mach virtualenv,
+ if any
+ requirements: The requirements associated with the Mach site, parsed from
+ the file at python/sites/mach.txt
+ original_python: The external Python site that was used to invoke Mach.
+ If Mach invocations are nested, then "original_python" refers to
+ Python site that was used to start Mach first.
+ Usually the system Python, such as /usr/bin/python3.
+ site_packages_source: Where the Mach site will import its pip-installed
+ dependencies from
+ """
+ self._topsrcdir = topsrcdir
+ self._site_packages_source = site_packages_source
+ self._requirements = requirements
+ self._virtualenv_root = virtualenv_root
+ self._metadata = MozSiteMetadata(
+ sys.hexversion,
+ "mach",
+ site_packages_source,
+ original_python,
+ self._virtualenv_root,
+ )
+
+ @classmethod
+ def from_environment(cls, topsrcdir: str, get_state_dir: Callable[[], str]):
+ """
+ Args:
+ topsrcdir: The path to the Firefox repo
+ get_state_dir: A function that resolves the path to the checkout-scoped
+ state_dir, generally ~/.mozbuild/srcdirs/<checkout-based-dir>/
+ """
+
+ requirements = resolve_requirements(topsrcdir, "mach")
+ # Mach needs to operate in environments in which no pip packages are installed
+ # yet, and the system isn't guaranteed to have the packages we need. For example,
+ # "./mach bootstrap" can't have any dependencies.
+ # So, all external dependencies of Mach's must be optional.
+ assert (
+ not requirements.pypi_requirements
+ ), "Mach pip package requirements must be optional."
+
+ # external_python is the Python interpreter that invoked Mach for this process.
+ external_python = ExternalPythonSite(sys.executable)
+
+ # original_python is the first Python interpreter that invoked the top-level
+ # Mach process. This is different from "external_python" when there's nested
+ # Mach invocations.
+ active_metadata = MozSiteMetadata.from_runtime()
+ if active_metadata:
+ original_python = active_metadata.original_python
+ else:
+ original_python = external_python
+
+ source = SitePackagesSource.for_mach()
+ virtualenv_root = (
+ _mach_virtualenv_root(get_state_dir())
+ if source == SitePackagesSource.VENV
+ else None
+ )
+ return cls(
+ topsrcdir,
+ virtualenv_root,
+ requirements,
+ original_python,
+ source,
+ )
+
+ def _up_to_date(self):
+ if self._site_packages_source == SitePackagesSource.NONE:
+ return SiteUpToDateResult(True)
+ elif self._site_packages_source == SitePackagesSource.SYSTEM:
+ _assert_pip_check(self._sys_path(), "mach", self._requirements)
+ return SiteUpToDateResult(True)
+ elif self._site_packages_source == SitePackagesSource.VENV:
+ environment = self._virtualenv()
+ return _is_venv_up_to_date(
+ environment,
+ self._pthfile_lines(environment),
+ self._requirements,
+ self._metadata,
+ )
+
+ def ensure(self, *, force=False):
+ result = self._up_to_date()
+ if force or not result.is_up_to_date:
+ if Path(sys.prefix) == Path(self._metadata.prefix):
+ # If the Mach virtualenv is already activated, then the changes caused
+ # by rebuilding the virtualenv won't take effect until the next time
+ # Mach is used, which can lead to confusing one-off errors.
+ # Instead, request that the user resolve the out-of-date situation,
+ # *then* come back and run the intended command.
+ raise VirtualenvOutOfDateException(result.reason)
+ self._build()
+
+ def attempt_populate_optional_packages(self):
+ if self._site_packages_source != SitePackagesSource.VENV:
+ pass
+
+ self._virtualenv().install_optional_packages(
+ self._requirements.pypi_optional_requirements
+ )
+
+ def activate(self):
+ assert not MozSiteMetadata.current
+
+ self.ensure()
+ with self._metadata.update_current_site(
+ self._virtualenv().python_path
+ if self._site_packages_source == SitePackagesSource.VENV
+ else sys.executable,
+ ):
+ # Reset the sys.path to insulate ourselves from the environment.
+ # This should be safe to do, since activation of the Mach site happens so
+ # early in the Mach lifecycle that no packages should have been imported
+ # from external sources yet.
+ sys.path = self._sys_path()
+ if self._site_packages_source == SitePackagesSource.VENV:
+ # Activate the Mach virtualenv in the current Python context. This
+ # automatically adds the virtualenv's "site-packages" to our scope, in
+ # addition to our first-party/vendored modules since they're specified
+ # in the "mach.pth" file.
+ activate_virtualenv(self._virtualenv())
+
+ def _build(self):
+ if self._site_packages_source != SitePackagesSource.VENV:
+ # The Mach virtualenv doesn't have a physical virtualenv on-disk if it won't
+ # be "pip install"-ing. So, there's no build work to do.
+ return
+
+ environment = self._virtualenv()
+ _create_venv_with_pthfile(
+ environment,
+ self._pthfile_lines(environment),
+ True,
+ self._requirements,
+ self._metadata,
+ )
+
+ def _sys_path(self):
+ if self._site_packages_source == SitePackagesSource.SYSTEM:
+ stdlib_paths, system_site_paths = self._metadata.original_python.sys_path()
+ return [
+ *stdlib_paths,
+ *self._requirements.pths_as_absolute(self._topsrcdir),
+ *system_site_paths,
+ ]
+ elif self._site_packages_source == SitePackagesSource.NONE:
+ stdlib_paths = self._metadata.original_python.sys_path_stdlib()
+ return [
+ *stdlib_paths,
+ *self._requirements.pths_as_absolute(self._topsrcdir),
+ ]
+ elif self._site_packages_source == SitePackagesSource.VENV:
+ stdlib_paths = self._metadata.original_python.sys_path_stdlib()
+ return [
+ *stdlib_paths,
+ # self._requirements will be added as part of the virtualenv activation.
+ ]
+
+ def _pthfile_lines(self, environment):
+ return [
+ # Prioritize vendored and first-party modules first.
+ *self._requirements.pths_as_absolute(self._topsrcdir),
+ # Then, include the virtualenv's site-packages.
+ *_deprioritize_venv_packages(
+ environment, self._site_packages_source == SitePackagesSource.VENV
+ ),
+ ]
+
+ def _virtualenv(self):
+ assert self._site_packages_source == SitePackagesSource.VENV
+ return PythonVirtualenv(self._metadata.prefix)
+
+
+class CommandSiteManager:
+ """Activate sites and ad-hoc-install pip packages
+
+ Provides tools to ensure that a command's scope will have expected, compatible
+ packages. Manages prioritization of the import scope, and ensures consistency
+ regardless of how a virtualenv is used (whether via in-process activation, or when
+ used standalone to invoke a script).
+
+ A few notes:
+
+ * The command environment always inherits Mach's import scope. This is
+ because "unloading" packages in Python is error-prone, so in-process activations
+ will always carry Mach's dependencies along with it. Accordingly, compatibility
+ between each command environment and the Mach environment must be maintained
+
+ * Unlike the Mach environment, command environments *always* have an associated
+ physical virtualenv on-disk. This is because some commands invoke child Python
+ processes, and that child process should have the same import scope.
+
+ """
+
+ def __init__(
+ self,
+ topsrcdir: str,
+ mach_virtualenv_root: Optional[str],
+ virtualenv_root: str,
+ site_name: str,
+ active_metadata: MozSiteMetadata,
+ populate_virtualenv: bool,
+ requirements: MachEnvRequirements,
+ ):
+ """
+ Args:
+ topsrcdir: The path to the Firefox repo
+ mach_virtualenv_root: The path to the Mach virtualenv, if any
+ virtualenv_root: The path to the virtualenv associated with this site
+ site_name: The name of this site, such as "build"
+ active_metadata: The currently-active moz-managed site
+ populate_virtualenv: True if packages should be installed to the on-disk
+ virtualenv with "pip". False if the virtualenv should only include
+ sys.path modifications, and all 3rd-party packages should be imported from
+ Mach's site packages source.
+ requirements: The requirements associated with this site, parsed from
+ the file at python/sites/<site_name>.txt
+ """
+ self._topsrcdir = topsrcdir
+ self._mach_virtualenv_root = mach_virtualenv_root
+ self.virtualenv_root = virtualenv_root
+ self._site_name = site_name
+ self._virtualenv = PythonVirtualenv(self.virtualenv_root)
+ self.python_path = self._virtualenv.python_path
+ self.bin_path = self._virtualenv.bin_path
+ self._populate_virtualenv = populate_virtualenv
+ self._mach_site_packages_source = active_metadata.mach_site_packages_source
+ self._requirements = requirements
+ self._metadata = MozSiteMetadata(
+ sys.hexversion,
+ site_name,
+ active_metadata.mach_site_packages_source,
+ active_metadata.original_python,
+ virtualenv_root,
+ )
+
+ @classmethod
+ def from_environment(
+ cls,
+ topsrcdir: str,
+ get_state_dir: Callable[[], Optional[str]],
+ site_name: str,
+ command_virtualenvs_dir: str,
+ ):
+ """
+ Args:
+ topsrcdir: The path to the Firefox repo
+ get_state_dir: A function that resolves the path to the checkout-scoped
+ state_dir, generally ~/.mozbuild/srcdirs/<checkout-based-dir>/
+ site_name: The name of this site, such as "build"
+ command_virtualenvs_dir: The location under which this site's virtualenv
+ should be created
+ """
+ active_metadata = MozSiteMetadata.from_runtime()
+ assert (
+ active_metadata
+ ), "A Mach-managed site must be active before doing work with command sites"
+
+ mach_site_packages_source = active_metadata.mach_site_packages_source
+ pip_restricted_site = site_name in PIP_NETWORK_INSTALL_RESTRICTED_VIRTUALENVS
+ if (
+ not pip_restricted_site
+ and mach_site_packages_source == SitePackagesSource.SYSTEM
+ ):
+ # Sites that aren't pip-network-install-restricted are likely going to be
+ # incompatible with the system. Besides, this use case shouldn't exist, since
+ # using the system packages is supposed to only be needed to lower risk of
+ # important processes like building Firefox.
+ raise Exception(
+ 'Cannot use MACH_BUILD_PYTHON_NATIVE_PACKAGE_SOURCE="system" for any '
+ f"sites other than {PIP_NETWORK_INSTALL_RESTRICTED_VIRTUALENVS}. The "
+ f'current attempted site is "{site_name}".'
+ )
+
+ mach_virtualenv_root = (
+ _mach_virtualenv_root(get_state_dir())
+ if mach_site_packages_source == SitePackagesSource.VENV
+ else None
+ )
+ populate_virtualenv = (
+ mach_site_packages_source == SitePackagesSource.VENV
+ or not pip_restricted_site
+ )
+ return cls(
+ topsrcdir,
+ mach_virtualenv_root,
+ os.path.join(command_virtualenvs_dir, site_name),
+ site_name,
+ active_metadata,
+ populate_virtualenv,
+ resolve_requirements(topsrcdir, site_name),
+ )
+
+ def ensure(self):
+ """Ensure that this virtualenv is built, up-to-date, and ready for use
+ If using a virtualenv Python binary directly, it's useful to call this function
+ first to ensure that the virtualenv doesn't have obsolete references or packages.
+ """
+ result = self._up_to_date()
+ if not result.is_up_to_date:
+ print(f"Site not up-to-date reason: {result.reason}")
+ active_site = MozSiteMetadata.from_runtime()
+ if active_site.site_name == self._site_name:
+ print(result.reason, file=sys.stderr)
+ raise Exception(
+ f'The "{self._site_name}" site is out-of-date, even though it has '
+ f"already been activated. Was it modified while this Mach process "
+ f"was running?"
+ )
+
+ _create_venv_with_pthfile(
+ self._virtualenv,
+ self._pthfile_lines(),
+ self._populate_virtualenv,
+ self._requirements,
+ self._metadata,
+ )
+
+ def activate(self):
+ """Activate this site in the current Python context.
+
+ If you run a random Python script and wish to "activate" the
+ site, you can simply instantiate an instance of this class
+ and call .activate() to make the virtualenv active.
+ """
+
+ active_site = MozSiteMetadata.from_runtime()
+ site_is_already_active = active_site.site_name == self._site_name
+ if (
+ active_site.site_name not in ("mach", "common")
+ and not site_is_already_active
+ ):
+ raise Exception(
+ f'Activating from one command site ("{active_site.site_name}") to '
+ f'another ("{self._site_name}") is not allowed, because they may '
+ "be incompatible."
+ )
+
+ self.ensure()
+
+ if site_is_already_active:
+ return
+
+ with self._metadata.update_current_site(self._virtualenv.python_path):
+ activate_virtualenv(self._virtualenv)
+
+ def install_pip_package(self, package):
+ """Install a package via pip.
+
+ The supplied package is specified using a pip requirement specifier.
+ e.g. 'foo' or 'foo==1.0'.
+
+ If the package is already installed, this is a no-op.
+ """
+ if Path(sys.prefix) == Path(self.virtualenv_root):
+ # If we're already running in this interpreter, we can optimize in
+ # the case that the package requirement is already satisfied.
+ from pip._internal.req.constructors import install_req_from_line
+
+ req = install_req_from_line(package)
+ req.check_if_exists(use_user_site=False)
+ if req.satisfied_by is not None:
+ return
+
+ self._virtualenv.pip_install_with_constraints([package])
+
+ def install_pip_requirements(self, path, require_hashes=True, quiet=False):
+ """Install a pip requirements.txt file.
+
+ The supplied path is a text file containing pip requirement
+ specifiers.
+
+ If require_hashes is True, each specifier must contain the
+ expected hash of the downloaded package. See:
+ https://pip.pypa.io/en/stable/reference/pip_install/#hash-checking-mode
+ """
+
+ if not os.path.isabs(path):
+ path = os.path.join(self._topsrcdir, path)
+
+ args = ["--requirement", path]
+
+ if require_hashes:
+ args.append("--require-hashes")
+
+ install_result = self._virtualenv.pip_install(
+ args,
+ check=not quiet,
+ stdout=subprocess.PIPE if quiet else None,
+ )
+ if install_result.returncode:
+ print(install_result.stdout)
+ raise InstallPipRequirementsException(
+ f'Failed to install "{path}" into the "{self._site_name}" site.'
+ )
+
+ check_result = subprocess.run(
+ [self.python_path, "-m", "pip", "check"],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ universal_newlines=True,
+ )
+
+ if not check_result.returncode:
+ return
+
+ """
+ Some commands may use the "setup.py" script of first-party modules. This causes
+ a "*.egg-info" dir to be created for that module (which pip can then detect as
+ a package). Since we add all first-party module directories to the .pthfile for
+ the "mach" venv, these first-party modules are then detected by all venvs after
+ they are created. The problem is that these .egg-info directories can become
+ stale (since if the first-party module is updated it's not guaranteed that the
+ command that runs the "setup.py" was ran afterwards). This can cause
+ incompatibilities with the pip check (since the dependencies can change between
+ different versions).
+
+ These .egg-info dirs are in our VCS ignore lists (eg: ".hgignore") because they
+ are necessary to run some commands, so we don't want to always purge them, and we
+ also don't want to accidentally commit them. Given this, we can leverage our VCS
+ to find all the current first-party .egg-info dirs.
+
+ If we're in the case where 'pip check' fails, then we can try purging the
+ first-party .egg-info dirs, then run the 'pip check' again afterwards. If it's
+ still failing, then we know the .egg-info dirs weren't the problem. If that's
+ the case we can just raise the error encountered, which is the same as before.
+ """
+
+ def _delete_ignored_egg_info_dirs():
+ from pathlib import Path
+
+ from mozversioncontrol import (
+ MissingConfigureInfo,
+ MissingVCSInfo,
+ get_repository_from_env,
+ )
+
+ try:
+ with get_repository_from_env() as repo:
+ ignored_file_finder = repo.get_ignored_files_finder().find(
+ "**/*.egg-info"
+ )
+
+ unique_egg_info_dirs = {
+ Path(found[0]).parent for found in ignored_file_finder
+ }
+
+ for egg_info_dir in unique_egg_info_dirs:
+ shutil.rmtree(egg_info_dir)
+
+ except (MissingVCSInfo, MissingConfigureInfo):
+ pass
+
+ _delete_ignored_egg_info_dirs()
+
+ check_result = subprocess.run(
+ [self.python_path, "-m", "pip", "check"],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ universal_newlines=True,
+ )
+
+ if check_result.returncode:
+ if quiet:
+ # If "quiet" was specified, then the "pip install" output wasn't printed
+ # earlier, and was buffered instead. Print that buffer so that debugging
+ # the "pip check" failure is easier.
+ print(install_result.stdout)
+
+ subprocess.check_call(
+ [self.python_path, "-m", "pip", "list", "-v"], stdout=sys.stderr
+ )
+ print(check_result.stdout, file=sys.stderr)
+ raise InstallPipRequirementsException(
+ f'As part of validation after installing "{path}" into the '
+ f'"{self._site_name}" site, the site appears to contain installed '
+ "packages that are incompatible with each other."
+ )
+
+ def _pthfile_lines(self):
+ """Generate the prioritized import scope to encode in the venv's pthfile
+
+ The import priority looks like this:
+ 1. Mach's vendored/first-party modules
+ 2. Mach's site-package source (the Mach virtualenv, the system Python, or neither)
+ 3. The command's vendored/first-party modules
+ 4. The command's site-package source (either the virtualenv or the system Python,
+ if it's not already added)
+
+ Note that, when using the system Python, it may either be prioritized before or
+ after the command's vendored/first-party modules. This is a symptom of us
+ attempting to avoid conflicting with the system packages.
+
+ For example, there's at least one job in CI that operates with an ancient
+ environment with a bunch of old packages, many of whom conflict with our vendored
+ packages. However, the specific command that we're running for the job doesn't
+ need any of the system's packages, so we're safe to insulate ourselves.
+
+ Mach doesn't know the command being run when it's preparing its import scope,
+ so it has to be defensive. Therefore:
+ 1. If Mach needs a system package: system packages are higher priority.
+ 2. If Mach doesn't need a system package, but the current command does: system
+ packages are still be in the list, albeit at a lower priority.
+ """
+
+ # Prioritize Mach's vendored and first-party modules first.
+ lines = resolve_requirements(self._topsrcdir, "mach").pths_as_absolute(
+ self._topsrcdir
+ )
+ mach_site_packages_source = self._mach_site_packages_source
+ if mach_site_packages_source == SitePackagesSource.SYSTEM:
+ # When Mach is using the system environment, add it next.
+ _, system_site_paths = self._metadata.original_python.sys_path()
+ lines.extend(system_site_paths)
+ elif mach_site_packages_source == SitePackagesSource.VENV:
+ # When Mach is using its on-disk virtualenv, add its site-packages directory.
+ assert self._mach_virtualenv_root
+ lines.extend(
+ PythonVirtualenv(self._mach_virtualenv_root).site_packages_dirs()
+ )
+
+ # Add this command's vendored and first-party modules.
+ lines.extend(self._requirements.pths_as_absolute(self._topsrcdir))
+ # Finally, ensure that pip-installed packages are the lowest-priority
+ # source to import from.
+ lines.extend(
+ _deprioritize_venv_packages(self._virtualenv, self._populate_virtualenv)
+ )
+
+ # Note that an on-disk virtualenv is always created for commands, even if they
+ # are using the system as their site-packages source. This is to support use
+ # cases where a fresh Python process must be created, but it also must have
+ # access to <site>'s 1st- and 3rd-party packages.
+ return lines
+
+ def _up_to_date(self):
+ pthfile_lines = self._pthfile_lines()
+ if self._mach_site_packages_source == SitePackagesSource.SYSTEM:
+ _assert_pip_check(
+ pthfile_lines,
+ self._site_name,
+ self._requirements if not self._populate_virtualenv else None,
+ )
+
+ return _is_venv_up_to_date(
+ self._virtualenv,
+ pthfile_lines,
+ self._requirements,
+ self._metadata,
+ )
+
+
+class PythonVirtualenv:
+ """Calculates paths of interest for general python virtual environments"""
+
+ def __init__(self, prefix):
+ if _is_windows:
+ self.bin_path = os.path.join(prefix, "Scripts")
+ self.python_path = os.path.join(self.bin_path, "python.exe")
+ else:
+ self.bin_path = os.path.join(prefix, "bin")
+ self.python_path = os.path.join(self.bin_path, "python")
+ self.prefix = os.path.realpath(prefix)
+
+ @functools.lru_cache(maxsize=None)
+ def resolve_sysconfig_packages_path(self, sysconfig_path):
+ # macOS uses a different default sysconfig scheme based on whether it's using the
+ # system Python or running in a virtualenv.
+ # Manually define the scheme (following the implementation in
+ # "sysconfig._get_default_scheme()") so that we're always following the
+ # code path for a virtualenv directory structure.
+ if os.name == "posix":
+ scheme = "posix_prefix"
+ else:
+ scheme = os.name
+
+ sysconfig_paths = sysconfig.get_paths(scheme)
+ data_path = Path(sysconfig_paths["data"])
+ path = Path(sysconfig_paths[sysconfig_path])
+ relative_path = path.relative_to(data_path)
+
+ # Path to virtualenv's "site-packages" directory for provided sysconfig path
+ return os.path.normpath(os.path.normcase(Path(self.prefix) / relative_path))
+
+ def site_packages_dirs(self):
+ dirs = []
+ if sys.platform.startswith("win"):
+ dirs.append(os.path.normpath(os.path.normcase(self.prefix)))
+ purelib = self.resolve_sysconfig_packages_path("purelib")
+ platlib = self.resolve_sysconfig_packages_path("platlib")
+
+ dirs.append(purelib)
+ if platlib != purelib:
+ dirs.append(platlib)
+
+ return dirs
+
+ def pip_install_with_constraints(self, pip_args):
+ """Create a pip constraints file or existing packages
+
+ When pip installing an incompatible package, pip will follow through with
+ the install but raise a warning afterwards.
+
+ To defend our environment from breakage, we run "pip install" but add all
+ existing packages to a "constraints file". This ensures that conflicts are
+ raised as errors up-front, and the virtual environment doesn't have conflicting
+ packages installed.
+
+ Note: pip_args is expected to contain either the requested package or
+ requirements file.
+ """
+ existing_packages = self._resolve_installed_packages()
+
+ with tempfile.TemporaryDirectory() as tempdir:
+ constraints_path = os.path.join(tempdir, "site-constraints.txt")
+ with open(constraints_path, "w") as file:
+ file.write(
+ "\n".join(
+ [
+ f"{name}=={version}"
+ for name, version in existing_packages.items()
+ ]
+ )
+ )
+
+ return self.pip_install(["--constraint", constraints_path] + pip_args)
+
+ def pip_install(self, pip_install_args, **kwargs):
+ # setuptools will use the architecture of the running Python instance when
+ # building packages. However, it's possible for the Xcode Python to be a universal
+ # binary (x86_64 and arm64) without the associated macOS SDK supporting arm64,
+ # thereby causing a build failure. To avoid this, we explicitly influence the
+ # build to only target a single architecture - our current architecture.
+ kwargs.setdefault("env", os.environ.copy()).setdefault(
+ "ARCHFLAGS", "-arch {}".format(platform.machine())
+ )
+ kwargs.setdefault("check", True)
+ kwargs.setdefault("stderr", subprocess.STDOUT)
+ kwargs.setdefault("universal_newlines", True)
+
+ # It's tempting to call pip natively via pip.main(). However,
+ # the current Python interpreter may not be the virtualenv python.
+ # This will confuse pip and cause the package to attempt to install
+ # against the executing interpreter. By creating a new process, we
+ # force the virtualenv's interpreter to be used and all is well.
+ # It /might/ be possible to cheat and set sys.executable to
+ # self.python_path. However, this seems more risk than it's worth.
+ return subprocess.run(
+ [self.python_path, "-m", "pip", "install"] + pip_install_args,
+ **kwargs,
+ )
+
+ def install_optional_packages(self, optional_requirements):
+ for requirement in optional_requirements:
+ try:
+ self.pip_install_with_constraints([str(requirement.requirement)])
+ except subprocess.CalledProcessError:
+ print(
+ f"Could not install {requirement.requirement.name}, so "
+ f"{requirement.repercussion}. Continuing."
+ )
+
+ def _resolve_installed_packages(self):
+ return _resolve_installed_packages(self.python_path)
+
+
+class RequirementsValidationResult:
+ def __init__(self):
+ self._package_discrepancies = []
+ self.has_all_packages = True
+ self.provides_any_package = False
+
+ def add_discrepancy(self, requirement, found):
+ self._package_discrepancies.append((requirement, found))
+ self.has_all_packages = False
+
+ def report(self):
+ lines = []
+ for requirement, found in self._package_discrepancies:
+ if found:
+ error = f'Installed with unexpected version "{found}"'
+ else:
+ error = "Not installed"
+ lines.append(f"{requirement}: {error}")
+ return "\n".join(lines)
+
+ @classmethod
+ def from_packages(cls, packages, requirements):
+ result = cls()
+ for pkg in requirements.pypi_requirements:
+ installed_version = packages.get(pkg.requirement.name)
+ if not installed_version or not pkg.requirement.specifier.contains(
+ installed_version
+ ):
+ result.add_discrepancy(pkg.requirement, installed_version)
+ elif installed_version:
+ result.provides_any_package = True
+
+ for pkg in requirements.pypi_optional_requirements:
+ installed_version = packages.get(pkg.requirement.name)
+ if installed_version and not pkg.requirement.specifier.contains(
+ installed_version
+ ):
+ result.add_discrepancy(pkg.requirement, installed_version)
+ elif installed_version:
+ result.provides_any_package = True
+
+ return result
+
+
+class ExternalPythonSite:
+ """Represents the Python site that is executing Mach
+
+ The external Python site could be a virtualenv (created by venv or virtualenv) or
+ the system Python itself, so we can't make any significant assumptions on its
+ structure.
+ """
+
+ def __init__(self, python_executable):
+ self._prefix = os.path.dirname(os.path.dirname(python_executable))
+ self.python_path = python_executable
+
+ @functools.lru_cache(maxsize=None)
+ def sys_path(self):
+ """Return lists of sys.path entries: one for standard library, one for the site
+
+ These two lists are calculated at the same time so that we can interpret them
+ in a single Python subprocess, as running a whole Python instance is
+ very expensive in the context of Mach initialization.
+ """
+ env = {
+ k: v
+ for k, v in os.environ.items()
+ # Don't include items injected by IDEs into the system path.
+ if k not in ("PYTHONPATH", "PYDEVD_LOAD_VALUES_ASYNC")
+ }
+ stdlib = subprocess.Popen(
+ [
+ self.python_path,
+ # Don't "import site" right away, so we can split the standard library
+ # paths from the site paths.
+ "-S",
+ "-c",
+ "import sys; from collections import OrderedDict; "
+ # Skip the first item in the sys.path, as it's the working directory
+ # of the invoked script (so, in this case, "").
+ # Use list(OrderectDict...) to de-dupe items, such as when using
+ # pyenv on Linux.
+ "print(list(OrderedDict.fromkeys(sys.path[1:])))",
+ ],
+ universal_newlines=True,
+ env=env,
+ stdout=subprocess.PIPE,
+ )
+ system = subprocess.Popen(
+ [
+ self.python_path,
+ "-c",
+ "import os; import sys; import site; "
+ "packages = site.getsitepackages(); "
+ # Only add the "user site packages" if not in a virtualenv (which is
+ # identified by the prefix == base_prefix check
+ "packages.insert(0, site.getusersitepackages()) if "
+ " sys.prefix == sys.base_prefix else None; "
+ # When a Python instance launches, it only adds each
+ # "site.getsitepackages()" entry if it exists on the file system.
+ # Replicate that behaviour to get a more accurate list of system paths.
+ "packages = [p for p in packages if os.path.exists(p)]; "
+ "print(packages)",
+ ],
+ universal_newlines=True,
+ env=env,
+ stdout=subprocess.PIPE,
+ )
+ # Run python processes in parallel - they take roughly the same time, so this
+ # cuts this functions run time in half.
+ stdlib_out, _ = stdlib.communicate()
+ system_out, _ = system.communicate()
+ assert stdlib.returncode == 0
+ assert system.returncode == 0
+ stdlib = ast.literal_eval(stdlib_out)
+ system = ast.literal_eval(system_out)
+ # On Windows, some paths are both part of the default sys.path *and* are included
+ # in the "site packages" list. Keep the "stdlib" one, and remove the dupe from
+ # the "system packages" list.
+ system = [path for path in system if path not in stdlib]
+ return stdlib, system
+
+ def sys_path_stdlib(self):
+ """Return list of default sys.path entries for the standard library"""
+ stdlib, _ = self.sys_path()
+ return stdlib
+
+
+@functools.lru_cache(maxsize=None)
+def resolve_requirements(topsrcdir, site_name):
+ manifest_path = os.path.join(topsrcdir, "python", "sites", f"{site_name}.txt")
+ if not os.path.exists(manifest_path):
+ raise Exception(
+ f'The current command is using the "{site_name}" '
+ "site. However, that site is missing its associated "
+ f'requirements definition file at "{manifest_path}".'
+ )
+
+ thunderbird_dir = os.path.join(topsrcdir, "comm")
+ is_thunderbird = os.path.exists(thunderbird_dir) and bool(
+ os.listdir(thunderbird_dir)
+ )
+ try:
+ return MachEnvRequirements.from_requirements_definition(
+ topsrcdir,
+ is_thunderbird,
+ site_name not in PIP_NETWORK_INSTALL_RESTRICTED_VIRTUALENVS,
+ manifest_path,
+ )
+ except UnexpectedFlexibleRequirementException as e:
+ raise Exception(
+ f'The "{site_name}" site does not have all pypi packages pinned '
+ f'in the format "package==version" (found "{e.raw_requirement}").\n'
+ f"Only the {PIP_NETWORK_INSTALL_RESTRICTED_VIRTUALENVS} sites are "
+ "allowed to have unpinned packages."
+ )
+
+
+def _resolve_installed_packages(python_executable):
+ pip_json = subprocess.check_output(
+ [
+ python_executable,
+ "-m",
+ "pip",
+ "list",
+ "--format",
+ "json",
+ "--disable-pip-version-check",
+ ],
+ universal_newlines=True,
+ )
+
+ installed_packages = json.loads(pip_json)
+ return {package["name"]: package["version"] for package in installed_packages}
+
+
+def _ensure_python_exe(python_exe_root: Path):
+ """On some machines in CI venv does not behave consistently. Sometimes
+ only a "python3" executable is created, but we expect "python". Since
+ they are functionally identical, we can just copy "python3" to "python"
+ (and vice-versa) to solve the problem.
+ """
+ python3_exe_path = python_exe_root / "python3"
+ python_exe_path = python_exe_root / "python"
+
+ if _is_windows:
+ python3_exe_path = python3_exe_path.with_suffix(".exe")
+ python_exe_path = python_exe_path.with_suffix(".exe")
+
+ if python3_exe_path.exists() and not python_exe_path.exists():
+ shutil.copy(str(python3_exe_path), str(python_exe_path))
+
+ if python_exe_path.exists() and not python3_exe_path.exists():
+ shutil.copy(str(python_exe_path), str(python3_exe_path))
+
+ if not python_exe_path.exists() and not python3_exe_path.exists():
+ raise Exception(
+ f'Neither a "{python_exe_path.name}" or "{python3_exe_path.name}" '
+ f"were found. This means something unexpected happened during the "
+ f"virtual environment creation and we cannot proceed."
+ )
+
+
+def _ensure_pyvenv_cfg(venv_root: Path):
+ # We can work around a bug on some versions of Python 3.6 on
+ # Windows by copying the 'pyvenv.cfg' of the current venv
+ # to the new venv. This will make the new venv reference
+ # the original Python install instead of the current venv,
+ # which resolves the issue. There shouldn't be any harm in
+ # always doing this, but we'll play it safe and restrict it
+ # to Windows Python 3.6 anyway.
+ if _is_windows and sys.version_info[:2] == (3, 6):
+ this_venv = Path(sys.executable).parent.parent
+ this_venv_config = this_venv / "pyvenv.cfg"
+ if this_venv_config.exists():
+ new_venv_config = Path(venv_root) / "pyvenv.cfg"
+ shutil.copyfile(str(this_venv_config), str(new_venv_config))
+
+
+def _assert_pip_check(pthfile_lines, virtualenv_name, requirements):
+ """Check if the provided pthfile lines have a package incompatibility
+
+ If there's an incompatibility, raise an exception and allow it to bubble up since
+ it will require user intervention to resolve.
+
+ If requirements aren't provided (such as when Mach is using SYSTEM, but the command
+ site is using VENV), then skip the "pthfile satisfies requirements" step.
+ """
+ if os.environ.get(
+ f"MACH_SYSTEM_ASSERTED_COMPATIBLE_WITH_{virtualenv_name.upper()}_SITE", None
+ ):
+ # Don't re-assert compatibility against the system python within Mach subshells.
+ return
+
+ print(
+ 'Running "pip check" to verify compatibility between the system Python and the '
+ f'"{virtualenv_name}" site.'
+ )
+
+ with tempfile.TemporaryDirectory() as check_env_path:
+ # Pip detects packages on the "sys.path" that have a ".dist-info" or
+ # a ".egg-info" directory. The majority of our Python dependencies are
+ # vendored as extracted wheels or sdists, so they are automatically picked up.
+ # This gives us sufficient confidence to do a `pip check` with both vendored
+ # packages + system packages in scope, and trust the results.
+ # Note: rather than just running the system pip with a modified "sys.path",
+ # we create a new virtualenv that has our pinned pip version, so that
+ # we get consistent results (there's been lots of pip resolver behaviour
+ # changes recently).
+ process = subprocess.run(
+ [sys.executable, "-m", "venv", "--without-pip", check_env_path],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ encoding="UTF-8",
+ )
+
+ _ensure_pyvenv_cfg(Path(check_env_path))
+
+ if process.returncode != 0:
+ if "No module named venv" in process.stderr:
+ raise VenvModuleNotFoundException()
+ else:
+ raise subprocess.CalledProcessError(
+ process.returncode,
+ process.args,
+ output=process.stdout,
+ stderr=process.stderr,
+ )
+
+ if process.stdout:
+ print(process.stdout)
+
+ check_env = PythonVirtualenv(check_env_path)
+ _ensure_python_exe(Path(check_env.python_path).parent)
+
+ with open(
+ os.path.join(
+ os.path.join(check_env.resolve_sysconfig_packages_path("platlib")),
+ PTH_FILENAME,
+ ),
+ "w",
+ ) as f:
+ f.write("\n".join(pthfile_lines))
+
+ pip = [check_env.python_path, "-m", "pip"]
+ if requirements:
+ packages = _resolve_installed_packages(check_env.python_path)
+ validation_result = RequirementsValidationResult.from_packages(
+ packages, requirements
+ )
+ if not validation_result.has_all_packages:
+ subprocess.check_call(pip + ["list", "-v"], stdout=sys.stderr)
+ print(validation_result.report(), file=sys.stderr)
+ raise Exception(
+ f'The "{virtualenv_name}" site is not compatible with the installed '
+ "system Python packages."
+ )
+
+ check_result = subprocess.run(
+ pip + ["check"],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ universal_newlines=True,
+ )
+ if check_result.returncode:
+ subprocess.check_call(pip + ["list", "-v"], stdout=sys.stderr)
+ print(check_result.stdout, file=sys.stderr)
+ raise Exception(
+ 'According to "pip check", the current Python '
+ "environment has package-compatibility issues."
+ )
+
+ os.environ[
+ f"MACH_SYSTEM_ASSERTED_COMPATIBLE_WITH_{virtualenv_name.upper()}_SITE"
+ ] = "1"
+
+
+def _deprioritize_venv_packages(virtualenv, populate_virtualenv):
+ # Virtualenvs implicitly add some "site packages" to the sys.path upon being
+ # activated. However, Mach generally wants to prioritize the existing sys.path
+ # (such as vendored packages) over packages installed to virtualenvs.
+ # So, this function moves the virtualenv's site-packages to the bottom of the sys.path
+ # at activation-time.
+
+ return [
+ line
+ for site_packages_dir in virtualenv.site_packages_dirs()
+ # repr(...) is needed to ensure Windows path backslashes aren't mistaken for
+ # escape sequences.
+ # Additionally, when removing the existing "site-packages" folder's entry, we have
+ # to do it in a case-insensitive way because, on Windows:
+ # * Python adds it as <venv>/lib/site-packages
+ # * While sysconfig tells us it's <venv>/Lib/site-packages
+ # * (note: on-disk, it's capitalized, so sysconfig is slightly more accurate).
+ for line in filter(
+ None,
+ (
+ "import sys; sys.path = [p for p in sys.path if "
+ f"p.lower() != {repr(site_packages_dir)}.lower()]",
+ f"import sys; sys.path.append({repr(site_packages_dir)})"
+ if populate_virtualenv
+ else None,
+ ),
+ )
+ ]
+
+
+def _create_venv_with_pthfile(
+ target_venv,
+ pthfile_lines,
+ populate_with_pip,
+ requirements,
+ metadata,
+):
+ virtualenv_root = target_venv.prefix
+ if os.path.exists(virtualenv_root):
+ shutil.rmtree(virtualenv_root)
+
+ os.makedirs(virtualenv_root)
+ metadata.write(is_finalized=False)
+
+ process = subprocess.run(
+ [sys.executable, "-m", "venv", "--without-pip", virtualenv_root],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ encoding="UTF-8",
+ )
+
+ _ensure_pyvenv_cfg(Path(virtualenv_root))
+
+ if process.returncode != 0:
+ if "No module named venv" in process.stderr:
+ raise VenvModuleNotFoundException()
+ else:
+ raise subprocess.CalledProcessError(
+ process.returncode,
+ process.args,
+ output=process.stdout,
+ stderr=process.stderr,
+ )
+
+ if process.stdout:
+ print(process.stdout)
+
+ _ensure_python_exe(Path(target_venv.python_path).parent)
+
+ platlib_site_packages_dir = target_venv.resolve_sysconfig_packages_path("platlib")
+ pthfile_contents = "\n".join(pthfile_lines)
+ with open(os.path.join(platlib_site_packages_dir, PTH_FILENAME), "w") as f:
+ f.write(pthfile_contents)
+
+ if populate_with_pip:
+ for requirement in requirements.pypi_requirements:
+ target_venv.pip_install([str(requirement.requirement)])
+ target_venv.install_optional_packages(requirements.pypi_optional_requirements)
+
+ metadata.write(is_finalized=True)
+
+
+def _is_venv_up_to_date(
+ target_venv,
+ expected_pthfile_lines,
+ requirements,
+ expected_metadata,
+):
+ if not os.path.exists(target_venv.prefix):
+ return SiteUpToDateResult(False, f'"{target_venv.prefix}" does not exist')
+
+ # Modifications to any of the requirements manifest files mean the virtualenv should
+ # be rebuilt:
+ metadata_mtime = os.path.getmtime(
+ os.path.join(target_venv.prefix, METADATA_FILENAME)
+ )
+ for dep_file in requirements.requirements_paths:
+ if os.path.getmtime(dep_file) > metadata_mtime:
+ return SiteUpToDateResult(
+ False, f'"{dep_file}" has changed since the virtualenv was created'
+ )
+
+ try:
+ existing_metadata = MozSiteMetadata.from_path(target_venv.prefix)
+ except MozSiteMetadataOutOfDateError as e:
+ # The metadata is missing required fields, so must be out-of-date.
+ return SiteUpToDateResult(False, str(e))
+
+ if existing_metadata != expected_metadata:
+ # The metadata doesn't exist or some fields have different values.
+ return SiteUpToDateResult(
+ False,
+ f"The existing metadata on-disk ({vars(existing_metadata)}) does not match "
+ f"the expected metadata ({vars(expected_metadata)}",
+ )
+
+ platlib_site_packages_dir = target_venv.resolve_sysconfig_packages_path("platlib")
+ pthfile_path = os.path.join(platlib_site_packages_dir, PTH_FILENAME)
+ try:
+ with open(pthfile_path) as file:
+ current_pthfile_contents = file.read().strip()
+ except FileNotFoundError:
+ return SiteUpToDateResult(False, f'No pthfile found at "{pthfile_path}"')
+
+ expected_pthfile_contents = "\n".join(expected_pthfile_lines)
+ if current_pthfile_contents != expected_pthfile_contents:
+ return SiteUpToDateResult(
+ False,
+ f'The pthfile at "{pthfile_path}" does not match the expected value.\n'
+ f"# --- on-disk pthfile: ---\n"
+ f"{current_pthfile_contents}\n"
+ f"# --- expected pthfile contents ---\n"
+ f"{expected_pthfile_contents}\n"
+ f"# ---",
+ )
+
+ return SiteUpToDateResult(True)
+
+
+def activate_virtualenv(virtualenv: PythonVirtualenv):
+ os.environ["PATH"] = os.pathsep.join(
+ [virtualenv.bin_path] + os.environ.get("PATH", "").split(os.pathsep)
+ )
+ os.environ["VIRTUAL_ENV"] = virtualenv.prefix
+
+ for path in virtualenv.site_packages_dirs():
+ site.addsitedir(os.path.realpath(path))
+
+ sys.prefix = virtualenv.prefix
+
+
+def _mach_virtualenv_root(checkout_scoped_state_dir):
+ workspace = os.environ.get("WORKSPACE")
+ if os.environ.get("MOZ_AUTOMATION") and workspace:
+ # In CI, put Mach virtualenv in the $WORKSPACE dir, which should be cleaned
+ # between jobs.
+ return os.path.join(workspace, "mach_virtualenv")
+ return os.path.join(checkout_scoped_state_dir, "_virtualenvs", "mach")
diff --git a/python/mach/mach/telemetry.py b/python/mach/mach/telemetry.py
new file mode 100644
index 0000000000..233556550d
--- /dev/null
+++ b/python/mach/mach/telemetry.py
@@ -0,0 +1,305 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import json
+import os
+import subprocess
+import sys
+from pathlib import Path
+from textwrap import dedent
+
+import requests
+import six.moves.urllib.parse as urllib_parse
+from mozbuild.base import BuildEnvironmentNotFoundException, MozbuildObject
+from mozbuild.settings import TelemetrySettings
+from mozbuild.telemetry import filter_args
+from mozversioncontrol import InvalidRepoPath, get_repository_object
+from six.moves import configparser, input
+
+from mach.config import ConfigSettings
+from mach.site import MozSiteMetadata
+from mach.telemetry_interface import GleanTelemetry, NoopTelemetry
+from mach.util import get_state_dir
+
+MACH_METRICS_PATH = (Path(__file__) / ".." / ".." / "metrics.yaml").resolve()
+
+
+def create_telemetry_from_environment(settings):
+ """Creates and a Telemetry instance based on system details.
+
+ If telemetry isn't enabled, the current interpreter isn't Python 3, or Glean
+ can't be imported, then a "mock" telemetry instance is returned that doesn't
+ set or record any data. This allows consumers to optimistically set telemetry
+ data without needing to specifically handle the case where the current system
+ doesn't support it.
+ """
+
+ active_metadata = MozSiteMetadata.from_runtime()
+ is_mach_virtualenv = active_metadata and active_metadata.site_name == "mach"
+
+ if not (
+ is_applicable_telemetry_environment()
+ # Glean is not compatible with Python 2
+ and sys.version_info >= (3, 0)
+ # If not using the mach virtualenv (e.g.: bootstrap uses native python)
+ # then we can't guarantee that the glean package that we import is a
+ # compatible version. Therefore, don't use glean.
+ and is_mach_virtualenv
+ ):
+ return NoopTelemetry(False)
+
+ is_enabled = is_telemetry_enabled(settings)
+
+ try:
+ from glean import Glean
+ except ImportError:
+ return NoopTelemetry(is_enabled)
+
+ from pathlib import Path
+
+ Glean.initialize(
+ "mozilla.mach",
+ "Unknown",
+ is_enabled,
+ data_dir=Path(get_state_dir()) / "glean",
+ )
+ return GleanTelemetry()
+
+
+def report_invocation_metrics(telemetry, command):
+ metrics = telemetry.metrics(MACH_METRICS_PATH)
+ metrics.mach.command.set(command)
+ metrics.mach.duration.start()
+
+ try:
+ instance = MozbuildObject.from_environment()
+ except BuildEnvironmentNotFoundException:
+ # Mach may be invoked with the state dir as the current working
+ # directory, in which case we're not able to find the topsrcdir (so
+ # we can't create a MozbuildObject instance).
+ # Without this information, we're unable to filter argv paths, so
+ # we skip submitting them to telemetry.
+ return
+ metrics.mach.argv.set(
+ filter_args(command, sys.argv, instance.topsrcdir, instance.topobjdir)
+ )
+
+
+def is_applicable_telemetry_environment():
+ if os.environ.get("MACH_MAIN_PID") != str(os.getpid()):
+ # This is a child mach process. Since we're collecting telemetry for the parent,
+ # we don't want to collect telemetry again down here.
+ return False
+
+ if any(e in os.environ for e in ("MOZ_AUTOMATION", "TASK_ID")):
+ return False
+
+ return True
+
+
+def is_telemetry_enabled(settings):
+ if os.environ.get("DISABLE_TELEMETRY") == "1":
+ return False
+
+ return settings.mach_telemetry.is_enabled
+
+
+def arcrc_path():
+ if sys.platform.startswith("win32") or sys.platform.startswith("msys"):
+ return Path(os.environ.get("APPDATA", "")) / ".arcrc"
+ else:
+ return Path("~/.arcrc").expanduser()
+
+
+def resolve_setting_from_arcconfig(topsrcdir: Path, setting):
+ git_path = topsrcdir / ".git"
+ if git_path.is_file():
+ git_path = subprocess.check_output(
+ ["git", "rev-parse", "--git-common-dir"],
+ cwd=str(topsrcdir),
+ universal_newlines=True,
+ )
+ git_path = Path(git_path)
+
+ for arcconfig_path in [
+ topsrcdir / ".hg" / ".arcconfig",
+ git_path / ".arcconfig",
+ topsrcdir / ".arcconfig",
+ ]:
+ try:
+ with open(arcconfig_path, "r") as arcconfig_file:
+ arcconfig = json.load(arcconfig_file)
+ except (json.JSONDecodeError, FileNotFoundError):
+ continue
+
+ value = arcconfig.get(setting)
+ if value:
+ return value
+
+
+def resolve_is_employee_by_credentials(topsrcdir: Path):
+ phabricator_uri = resolve_setting_from_arcconfig(topsrcdir, "phabricator.uri")
+
+ if not phabricator_uri:
+ return None
+
+ try:
+ with open(arcrc_path(), "r") as arcrc_file:
+ arcrc = json.load(arcrc_file)
+ except (json.JSONDecodeError, FileNotFoundError):
+ return None
+
+ phabricator_token = (
+ arcrc.get("hosts", {})
+ .get(urllib_parse.urljoin(phabricator_uri, "api/"), {})
+ .get("token")
+ )
+
+ if not phabricator_token:
+ return None
+
+ bmo_uri = (
+ resolve_setting_from_arcconfig(topsrcdir, "bmo_url")
+ or "https://bugzilla.mozilla.org"
+ )
+ bmo_api_url = urllib_parse.urljoin(bmo_uri, "rest/whoami")
+ bmo_result = requests.get(
+ bmo_api_url, headers={"X-PHABRICATOR-TOKEN": phabricator_token}
+ )
+ return "mozilla-employee-confidential" in bmo_result.json().get("groups", [])
+
+
+def resolve_is_employee_by_vcs(topsrcdir: Path):
+ try:
+ vcs = get_repository_object(str(topsrcdir))
+ except InvalidRepoPath:
+ return None
+
+ email = vcs.get_user_email()
+ if not email:
+ return None
+
+ return "@mozilla.com" in email
+
+
+def resolve_is_employee(topsrcdir: Path):
+ """Detect whether or not the current user is a Mozilla employee.
+
+ Checks using Bugzilla authentication, if possible. Otherwise falls back to checking
+ if email configured in VCS is "@mozilla.com".
+
+ Returns True if the user could be identified as an employee, False if the user
+ is confirmed as not being an employee, or None if the user couldn't be
+ identified.
+ """
+ is_employee = resolve_is_employee_by_credentials(topsrcdir)
+ if is_employee is not None:
+ return is_employee
+
+ return resolve_is_employee_by_vcs(topsrcdir) or False
+
+
+def record_telemetry_settings(
+ main_settings,
+ state_dir: Path,
+ is_enabled,
+):
+ # We want to update the user's machrc file. However, the main settings object
+ # contains config from "$topsrcdir/machrc" (if it exists) which we don't want
+ # to accidentally include. So, we have to create a brand new mozbuild-specific
+ # settings, update it, then write to it.
+ settings_path = state_dir / "machrc"
+ file_settings = ConfigSettings()
+ file_settings.register_provider(TelemetrySettings)
+ try:
+ file_settings.load_file(settings_path)
+ except configparser.Error as error:
+ print(
+ f"Your mach configuration file at `{settings_path}` cannot be parsed:\n{error}"
+ )
+ return
+
+ file_settings.mach_telemetry.is_enabled = is_enabled
+ file_settings.mach_telemetry.is_set_up = True
+
+ with open(settings_path, "w") as f:
+ file_settings.write(f)
+
+ # Telemetry will want this elsewhere in the mach process, so we'll slap the
+ # new values on the main settings object.
+ main_settings.mach_telemetry.is_enabled = is_enabled
+ main_settings.mach_telemetry.is_set_up = True
+
+
+TELEMETRY_DESCRIPTION_PREAMBLE = """
+Mozilla collects data to improve the developer experience.
+To learn more about the data we intend to collect, read here:
+ https://firefox-source-docs.mozilla.org/build/buildsystem/telemetry.html
+If you have questions, please ask in #build on Matrix:
+ https://chat.mozilla.org/#/room/#build:mozilla.org
+""".strip()
+
+
+def print_telemetry_message_employee():
+ message_template = dedent(
+ """
+ %s
+
+ As a Mozilla employee, telemetry has been automatically enabled.
+ """
+ ).strip()
+ print(message_template % TELEMETRY_DESCRIPTION_PREAMBLE)
+ return True
+
+
+def prompt_telemetry_message_contributor():
+ while True:
+ prompt = (
+ dedent(
+ """
+ %s
+
+ If you'd like to opt out of data collection, select (N) at the prompt.
+ Would you like to enable build system telemetry? (Yn): """
+ )
+ % TELEMETRY_DESCRIPTION_PREAMBLE
+ ).strip()
+
+ choice = input(prompt)
+ choice = choice.strip().lower()
+ if choice == "":
+ return True
+ if choice not in ("y", "n"):
+ print("ERROR! Please enter y or n!")
+ else:
+ return choice == "y"
+
+
+def initialize_telemetry_setting(settings, topsrcdir: str, state_dir: str):
+ """Enables telemetry for employees or prompts the user."""
+ # If the user doesn't care about telemetry for this invocation, then
+ # don't make requests to Bugzilla and/or prompt for whether the
+ # user wants to opt-in.
+
+ if topsrcdir is not None:
+ topsrcdir = Path(topsrcdir)
+
+ if state_dir is not None:
+ state_dir = Path(state_dir)
+
+ if os.environ.get("DISABLE_TELEMETRY") == "1":
+ return
+
+ try:
+ is_employee = resolve_is_employee(topsrcdir)
+ except requests.exceptions.RequestException:
+ return
+
+ if is_employee:
+ is_enabled = True
+ print_telemetry_message_employee()
+ else:
+ is_enabled = prompt_telemetry_message_contributor()
+
+ record_telemetry_settings(settings, state_dir, is_enabled)
diff --git a/python/mach/mach/telemetry_interface.py b/python/mach/mach/telemetry_interface.py
new file mode 100644
index 0000000000..3ed8ce5674
--- /dev/null
+++ b/python/mach/mach/telemetry_interface.py
@@ -0,0 +1,77 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import sys
+from pathlib import Path
+from typing import Union
+from unittest.mock import Mock
+
+from mach.site import MozSiteMetadata, SitePackagesSource
+
+
+class NoopTelemetry(object):
+ def __init__(self, failed_glean_import):
+ self._failed_glean_import = failed_glean_import
+
+ def metrics(self, metrics_path: Union[str, Path]):
+ return Mock()
+
+ def submit(self, is_bootstrap):
+ if self._failed_glean_import and not is_bootstrap:
+ active_site = MozSiteMetadata.from_runtime()
+ if active_site.mach_site_packages_source == SitePackagesSource.SYSTEM:
+ hint = (
+ "Mach is looking for glean in the system packages. This can be "
+ "resolved by installing it there, or by allowing Mach to run "
+ "without using the system Python packages."
+ )
+ elif active_site.mach_site_packages_source == SitePackagesSource.NONE:
+ hint = (
+ "This is because Mach is currently configured without a source "
+ "for native Python packages."
+ )
+ else:
+ hint = "You may need to run |mach bootstrap|."
+
+ print(
+ f"Glean could not be found, so telemetry will not be reported. {hint}",
+ file=sys.stderr,
+ )
+
+
+class GleanTelemetry(object):
+ """Records and sends Telemetry using Glean.
+
+ Metrics are defined in python/mozbuild/metrics.yaml.
+ Pings are defined in python/mozbuild/pings.yaml.
+
+ The "metrics" and "pings" properties may be replaced with no-op implementations if
+ Glean isn't available. This allows consumers to report telemetry without having
+ to guard against incompatible environments.
+
+ Also tracks whether an employee was just automatically opted into telemetry
+ during this mach invocation.
+ """
+
+ def __init__(
+ self,
+ ):
+ self._metrics_cache = {}
+
+ def metrics(self, metrics_path: Union[str, Path]):
+ if metrics_path not in self._metrics_cache:
+ from glean import load_metrics
+
+ metrics = load_metrics(metrics_path)
+ self._metrics_cache[metrics_path] = metrics
+
+ return self._metrics_cache[metrics_path]
+
+ def submit(self, _):
+ from pathlib import Path
+
+ from glean import load_pings
+
+ pings = load_pings(Path(__file__).parent.parent / "pings.yaml")
+ pings.usage.submit()
diff --git a/python/mach/mach/terminal.py b/python/mach/mach/terminal.py
new file mode 100644
index 0000000000..a0c8d0a6ed
--- /dev/null
+++ b/python/mach/mach/terminal.py
@@ -0,0 +1,76 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""This file contains code for interacting with terminals.
+
+All the terminal interaction code is consolidated so the complexity can be in
+one place, away from code that is commonly looked at.
+"""
+
+import logging
+import sys
+
+from six.moves import range
+
+
+class LoggingHandler(logging.Handler):
+ """Custom logging handler that works with terminal window dressing.
+
+ This is alternative terminal logging handler which contains smarts for
+ emitting terminal control characters properly. Currently, it has generic
+ support for "footer" elements at the bottom of the screen. Functionality
+ can be added when needed.
+ """
+
+ def __init__(self):
+ logging.Handler.__init__(self)
+
+ self.fh = sys.stdout
+ self.footer = None
+
+ def flush(self):
+ self.acquire()
+
+ try:
+ self.fh.flush()
+ finally:
+ self.release()
+
+ def emit(self, record):
+ msg = self.format(record)
+
+ if self.footer:
+ self.footer.clear()
+
+ self.fh.write(msg)
+ self.fh.write("\n")
+
+ if self.footer:
+ self.footer.draw()
+
+ # If we don't flush, the footer may not get drawn.
+ self.flush()
+
+
+class TerminalFooter(object):
+ """Represents something drawn on the bottom of a terminal."""
+
+ def __init__(self, terminal):
+ self.t = terminal
+ self.fh = sys.stdout
+
+ def _clear_lines(self, n):
+ for i in range(n):
+ self.fh.write(self.t.move_x(0))
+ self.fh.write(self.t.clear_eol())
+ self.fh.write(self.t.move_up())
+
+ self.fh.write(self.t.move_down())
+ self.fh.write(self.t.move_x(0))
+
+ def clear(self):
+ raise Exception("clear() must be implemented.")
+
+ def draw(self):
+ raise Exception("draw() must be implemented.")
diff --git a/python/mach/mach/test/__init__.py b/python/mach/mach/test/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mach/mach/test/__init__.py
diff --git a/python/mach/mach/test/conftest.py b/python/mach/mach/test/conftest.py
new file mode 100644
index 0000000000..78129acb58
--- /dev/null
+++ b/python/mach/mach/test/conftest.py
@@ -0,0 +1,84 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import sys
+import unittest
+from collections.abc import Iterable
+from pathlib import Path
+from typing import List, Optional, Union
+
+import pytest
+from buildconfig import topsrcdir
+
+try:
+ from StringIO import StringIO
+except ImportError:
+ # TODO io.StringIO causes failures with Python 2 (needs to be sorted out)
+ from io import StringIO
+
+from mach.main import Mach
+
+PROVIDER_DIR = Path(__file__).resolve().parent / "providers"
+
+
+@pytest.fixture(scope="class")
+def get_mach(request):
+ def _populate_context(key):
+ if key == "topdir":
+ return topsrcdir
+
+ def inner(
+ provider_files: Optional[Union[Path, List[Path]]] = None,
+ entry_point=None,
+ context_handler=None,
+ ):
+ m = Mach(str(Path.cwd()))
+ m.define_category("testing", "Mach unittest", "Testing for mach core", 10)
+ m.define_category("misc", "Mach misc", "Testing for mach core", 20)
+ m.populate_context_handler = context_handler or _populate_context
+
+ if provider_files:
+ if not isinstance(provider_files, Iterable):
+ provider_files = [provider_files]
+
+ for path in provider_files:
+ m.load_commands_from_file(PROVIDER_DIR / path)
+
+ if entry_point:
+ m.load_commands_from_entry_point(entry_point)
+
+ return m
+
+ if request.cls and issubclass(request.cls, unittest.TestCase):
+ request.cls.get_mach = lambda cls, *args, **kwargs: inner(*args, **kwargs)
+ return inner
+
+
+@pytest.fixture(scope="class")
+def run_mach(request, get_mach):
+ def inner(argv, *args, **kwargs):
+ m = get_mach(*args, **kwargs)
+
+ stdout = StringIO()
+ stderr = StringIO()
+
+ if sys.version_info < (3, 0):
+ stdout.encoding = "UTF-8"
+ stderr.encoding = "UTF-8"
+
+ try:
+ result = m.run(argv, stdout=stdout, stderr=stderr)
+ except SystemExit:
+ result = None
+
+ return (result, stdout.getvalue(), stderr.getvalue())
+
+ if request.cls and issubclass(request.cls, unittest.TestCase):
+ request.cls._run_mach = lambda cls, *args, **kwargs: inner(*args, **kwargs)
+ return inner
+
+
+@pytest.mark.usefixtures("get_mach", "run_mach")
+class TestBase(unittest.TestCase):
+ pass
diff --git a/python/mach/mach/test/invoke_mach_command.py b/python/mach/mach/test/invoke_mach_command.py
new file mode 100644
index 0000000000..1efa102ef5
--- /dev/null
+++ b/python/mach/mach/test/invoke_mach_command.py
@@ -0,0 +1,4 @@
+import subprocess
+import sys
+
+subprocess.check_call([sys.executable] + sys.argv[1:])
diff --git a/python/mach/mach/test/providers/__init__.py b/python/mach/mach/test/providers/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mach/mach/test/providers/__init__.py
diff --git a/python/mach/mach/test/providers/basic.py b/python/mach/mach/test/providers/basic.py
new file mode 100644
index 0000000000..26cdfdf588
--- /dev/null
+++ b/python/mach/mach/test/providers/basic.py
@@ -0,0 +1,15 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+from mach.decorators import Command, CommandArgument
+
+
+@Command("cmd_foo", category="testing")
+def run_foo(command_context):
+ pass
+
+
+@Command("cmd_bar", category="testing")
+@CommandArgument("--baz", action="store_true", help="Run with baz")
+def run_bar(command_context, baz=None):
+ pass
diff --git a/python/mach/mach/test/providers/commands.py b/python/mach/mach/test/providers/commands.py
new file mode 100644
index 0000000000..6b8210c513
--- /dev/null
+++ b/python/mach/mach/test/providers/commands.py
@@ -0,0 +1,33 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from functools import partial
+
+from mach.decorators import Command, CommandArgument
+
+
+def is_foo(cls):
+ """Foo must be true"""
+ return cls.foo
+
+
+def is_bar(val, cls):
+ """Bar must equal val"""
+ return cls.bar == val
+
+
+@Command("cmd_foo", category="testing")
+@CommandArgument("--arg", default=None, help="Argument help.")
+def run_foo(command_context):
+ pass
+
+
+@Command("cmd_bar", category="testing", conditions=[partial(is_bar, False)])
+def run_bar(command_context):
+ pass
+
+
+@Command("cmd_foobar", category="testing", conditions=[is_foo, partial(is_bar, True)])
+def run_foobar(command_context):
+ pass
diff --git a/python/mach/mach/test/providers/conditions.py b/python/mach/mach/test/providers/conditions.py
new file mode 100644
index 0000000000..db2f3f8123
--- /dev/null
+++ b/python/mach/mach/test/providers/conditions.py
@@ -0,0 +1,55 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from mach.decorators import Command
+
+
+def is_true(cls):
+ return True
+
+
+def is_false(cls):
+ return False
+
+
+@Command("cmd_condition_true", category="testing", conditions=[is_true])
+def run_condition_true(self, command_context):
+ pass
+
+
+@Command("cmd_condition_false", category="testing", conditions=[is_false])
+def run_condition_false(self, command_context):
+ pass
+
+
+@Command(
+ "cmd_condition_true_and_false", category="testing", conditions=[is_true, is_false]
+)
+def run_condition_true_and_false(self, command_context):
+ pass
+
+
+def is_ctx_foo(cls):
+ """Foo must be true"""
+ return cls._mach_context.foo
+
+
+def is_ctx_bar(cls):
+ """Bar must be true"""
+ return cls._mach_context.bar
+
+
+@Command("cmd_foo_ctx", category="testing", conditions=[is_ctx_foo])
+def run_foo_ctx(self, command_context):
+ pass
+
+
+@Command("cmd_bar_ctx", category="testing", conditions=[is_ctx_bar])
+def run_bar_ctx(self, command_context):
+ pass
+
+
+@Command("cmd_foobar_ctx", category="testing", conditions=[is_ctx_foo, is_ctx_bar])
+def run_foobar_ctx(self, command_context):
+ pass
diff --git a/python/mach/mach/test/providers/conditions_invalid.py b/python/mach/mach/test/providers/conditions_invalid.py
new file mode 100644
index 0000000000..228c56f0bf
--- /dev/null
+++ b/python/mach/mach/test/providers/conditions_invalid.py
@@ -0,0 +1,10 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from mach.decorators import Command
+
+
+@Command("cmd_foo", category="testing", conditions=["invalid"])
+def run_foo(command_context):
+ pass
diff --git a/python/mach/mach/test/providers/throw.py b/python/mach/mach/test/providers/throw.py
new file mode 100644
index 0000000000..9ddc7653c0
--- /dev/null
+++ b/python/mach/mach/test/providers/throw.py
@@ -0,0 +1,18 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from mach.decorators import Command, CommandArgument
+from mach.test.providers import throw2
+
+
+@Command("throw", category="testing")
+@CommandArgument("--message", "-m", default="General Error")
+def throw(command_context, message):
+ raise Exception(message)
+
+
+@Command("throw_deep", category="testing")
+@CommandArgument("--message", "-m", default="General Error")
+def throw_deep(command_context, message):
+ throw2.throw_deep(message)
diff --git a/python/mach/mach/test/providers/throw2.py b/python/mach/mach/test/providers/throw2.py
new file mode 100644
index 0000000000..9ff7f2798e
--- /dev/null
+++ b/python/mach/mach/test/providers/throw2.py
@@ -0,0 +1,15 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This file exists to trigger the differences in mach error reporting between
+# exceptions that occur in mach command modules themselves and in the things
+# they call.
+
+
+def throw_deep(message):
+ return throw_real(message)
+
+
+def throw_real(message):
+ raise Exception(message)
diff --git a/python/mach/mach/test/python.ini b/python/mach/mach/test/python.ini
new file mode 100644
index 0000000000..de09924b67
--- /dev/null
+++ b/python/mach/mach/test/python.ini
@@ -0,0 +1,22 @@
+[DEFAULT]
+subsuite = mach
+
+[test_commands.py]
+[test_conditions.py]
+skip-if = python == 3
+[test_config.py]
+[test_decorators.py]
+[test_dispatcher.py]
+[test_entry_point.py]
+[test_error_output.py]
+skip-if = python == 3
+[test_logger.py]
+[test_mach.py]
+[test_site.py]
+[test_site_activation.py]
+[test_site_compatibility.py]
+# The Windows and Mac workers only use the internal PyPI mirror,
+# which will be missing packages required for this test.
+skip-if =
+ os == "win"
+ os == "mac"
diff --git a/python/mach/mach/test/script_site_activation.py b/python/mach/mach/test/script_site_activation.py
new file mode 100644
index 0000000000..8c23f1a19c
--- /dev/null
+++ b/python/mach/mach/test/script_site_activation.py
@@ -0,0 +1,67 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This script is used by "test_site_activation.py" to verify how site activations
+# affect the sys.path.
+# The sys.path is printed in three stages:
+# 1. Once at the beginning
+# 2. Once after Mach site activation
+# 3. Once after the command site activation
+# The output of this script should be an ast-parsable list with three nested lists: one
+# for each sys.path state.
+# Note that virtualenv-creation output may need to be filtered out - it can be done by
+# only ast-parsing the last line of text outputted by this script.
+
+import os
+import sys
+from unittest.mock import patch
+
+from mach.requirements import MachEnvRequirements, PthSpecifier
+from mach.site import CommandSiteManager, MachSiteManager
+
+
+def main():
+ # Should be set by calling test
+ topsrcdir = os.environ["TOPSRCDIR"]
+ command_site = os.environ["COMMAND_SITE"]
+ mach_site_requirements = os.environ["MACH_SITE_PTH_REQUIREMENTS"]
+ command_site_requirements = os.environ["COMMAND_SITE_PTH_REQUIREMENTS"]
+ work_dir = os.environ["WORK_DIR"]
+
+ def resolve_requirements(topsrcdir, site_name):
+ req = MachEnvRequirements()
+ if site_name == "mach":
+ req.pth_requirements = [
+ PthSpecifier(path) for path in mach_site_requirements.split(os.pathsep)
+ ]
+ else:
+ req.pth_requirements = [PthSpecifier(command_site_requirements)]
+ return req
+
+ with patch("mach.site.resolve_requirements", resolve_requirements):
+ initial_sys_path = sys.path.copy()
+
+ mach_site = MachSiteManager.from_environment(
+ topsrcdir,
+ lambda: work_dir,
+ )
+ mach_site.activate()
+ mach_sys_path = sys.path.copy()
+
+ command_site = CommandSiteManager.from_environment(
+ topsrcdir, lambda: work_dir, command_site, work_dir
+ )
+ command_site.activate()
+ command_sys_path = sys.path.copy()
+ print(
+ [
+ initial_sys_path,
+ mach_sys_path,
+ command_sys_path,
+ ]
+ )
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mach/mach/test/test_commands.py b/python/mach/mach/test/test_commands.py
new file mode 100644
index 0000000000..38191b0898
--- /dev/null
+++ b/python/mach/mach/test/test_commands.py
@@ -0,0 +1,79 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import sys
+from pathlib import Path
+
+import pytest
+from buildconfig import topsrcdir
+from mozunit import main
+
+import mach
+
+ALL_COMMANDS = [
+ "cmd_bar",
+ "cmd_foo",
+ "cmd_foobar",
+ "mach-commands",
+ "mach-completion",
+ "mach-debug-commands",
+]
+
+
+@pytest.fixture
+def run_completion(run_mach):
+ def inner(args=[]):
+ mach_dir = Path(mach.__file__).parent
+ providers = [
+ Path("commands.py"),
+ mach_dir / "commands" / "commandinfo.py",
+ ]
+
+ def context_handler(key):
+ if key == "topdir":
+ return topsrcdir
+
+ args = ["mach-completion"] + args
+ return run_mach(args, providers, context_handler=context_handler)
+
+ return inner
+
+
+def format(targets):
+ return "\n".join(targets) + "\n"
+
+
+def test_mach_completion(run_completion):
+ result, stdout, stderr = run_completion()
+ assert result == 0
+ assert stdout == format(ALL_COMMANDS)
+
+ result, stdout, stderr = run_completion(["cmd_f"])
+ assert result == 0
+ # While it seems like this should return only commands that have
+ # 'cmd_f' as a prefix, the completion script will handle this case
+ # properly.
+ assert stdout == format(ALL_COMMANDS)
+
+ result, stdout, stderr = run_completion(["cmd_foo"])
+ assert result == 0
+ assert stdout == format(["help", "--arg"])
+
+
+@pytest.mark.parametrize("shell", ("bash", "fish", "zsh"))
+def test_generate_mach_completion_script(run_completion, shell):
+ rv, out, err = run_completion([shell])
+ print(out)
+ print(err, file=sys.stderr)
+ assert rv == 0
+ assert err == ""
+
+ assert "cmd_foo" in out
+ assert "arg" in out
+ assert "cmd_foobar" in out
+ assert "cmd_bar" in out
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mach/mach/test/test_conditions.py b/python/mach/mach/test/test_conditions.py
new file mode 100644
index 0000000000..5775790e69
--- /dev/null
+++ b/python/mach/mach/test/test_conditions.py
@@ -0,0 +1,101 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from pathlib import Path
+
+from buildconfig import topsrcdir
+from mozunit import main
+
+from mach.base import MachError
+from mach.main import Mach
+from mach.registrar import Registrar
+from mach.test.conftest import PROVIDER_DIR, TestBase
+
+
+def _make_populate_context(include_extra_attributes):
+ def _populate_context(key=None):
+ if key is None:
+ return
+
+ attributes = {
+ "topdir": topsrcdir,
+ }
+ if include_extra_attributes:
+ attributes["foo"] = True
+ attributes["bar"] = False
+
+ try:
+ return attributes[key]
+ except KeyError:
+ raise AttributeError(key)
+
+ return _populate_context
+
+
+_populate_bare_context = _make_populate_context(False)
+_populate_context = _make_populate_context(True)
+
+
+class TestConditions(TestBase):
+ """Tests for conditionally filtering commands."""
+
+ def _run(self, args, context_handler=_populate_bare_context):
+ return self._run_mach(
+ args, Path("conditions.py"), context_handler=context_handler
+ )
+
+ def test_conditions_pass(self):
+ """Test that a command which passes its conditions is runnable."""
+
+ self.assertEqual((0, "", ""), self._run(["cmd_condition_true"]))
+ self.assertEqual((0, "", ""), self._run(["cmd_foo_ctx"], _populate_context))
+
+ def test_invalid_context_message(self):
+ """Test that commands which do not pass all their conditions
+ print the proper failure message."""
+
+ def is_bar():
+ """Bar must be true"""
+
+ fail_conditions = [is_bar]
+
+ for name in ("cmd_condition_false", "cmd_condition_true_and_false"):
+ result, stdout, stderr = self._run([name])
+ self.assertEqual(1, result)
+
+ fail_msg = Registrar._condition_failed_message(name, fail_conditions)
+ self.assertEqual(fail_msg.rstrip(), stdout.rstrip())
+
+ for name in ("cmd_bar_ctx", "cmd_foobar_ctx"):
+ result, stdout, stderr = self._run([name], _populate_context)
+ self.assertEqual(1, result)
+
+ fail_msg = Registrar._condition_failed_message(name, fail_conditions)
+ self.assertEqual(fail_msg.rstrip(), stdout.rstrip())
+
+ def test_invalid_type(self):
+ """Test that a condition which is not callable raises an exception."""
+
+ m = Mach(str(Path.cwd()))
+ m.define_category("testing", "Mach unittest", "Testing for mach core", 10)
+ self.assertRaises(
+ MachError,
+ m.load_commands_from_file,
+ PROVIDER_DIR / "conditions_invalid.py",
+ )
+
+ def test_help_message(self):
+ """Test that commands that are not runnable do not show up in help."""
+
+ result, stdout, stderr = self._run(["help"], _populate_context)
+ self.assertIn("cmd_condition_true", stdout)
+ self.assertNotIn("cmd_condition_false", stdout)
+ self.assertNotIn("cmd_condition_true_and_false", stdout)
+ self.assertIn("cmd_foo_ctx", stdout)
+ self.assertNotIn("cmd_bar_ctx", stdout)
+ self.assertNotIn("cmd_foobar_ctx", stdout)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mach/mach/test/test_config.py b/python/mach/mach/test/test_config.py
new file mode 100644
index 0000000000..25b75c8685
--- /dev/null
+++ b/python/mach/mach/test/test_config.py
@@ -0,0 +1,292 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+import sys
+import unittest
+from pathlib import Path
+
+from mozfile.mozfile import NamedTemporaryFile
+from mozunit import main
+from six import string_types
+
+from mach.config import (
+ BooleanType,
+ ConfigException,
+ ConfigSettings,
+ IntegerType,
+ PathType,
+ PositiveIntegerType,
+ StringType,
+)
+from mach.decorators import SettingsProvider
+
+CONFIG1 = r"""
+[foo]
+
+bar = bar_value
+baz = /baz/foo.c
+"""
+
+CONFIG2 = r"""
+[foo]
+
+bar = value2
+"""
+
+
+@SettingsProvider
+class Provider1(object):
+ config_settings = [
+ ("foo.bar", StringType, "desc"),
+ ("foo.baz", PathType, "desc"),
+ ]
+
+
+@SettingsProvider
+class ProviderDuplicate(object):
+ config_settings = [
+ ("dupesect.foo", StringType, "desc"),
+ ("dupesect.foo", StringType, "desc"),
+ ]
+
+
+@SettingsProvider
+class Provider2(object):
+ config_settings = [
+ ("a.string", StringType, "desc"),
+ ("a.boolean", BooleanType, "desc"),
+ ("a.pos_int", PositiveIntegerType, "desc"),
+ ("a.int", IntegerType, "desc"),
+ ("a.path", PathType, "desc"),
+ ]
+
+
+@SettingsProvider
+class Provider3(object):
+ @classmethod
+ def config_settings(cls):
+ return [
+ ("a.string", "string", "desc"),
+ ("a.boolean", "boolean", "desc"),
+ ("a.pos_int", "pos_int", "desc"),
+ ("a.int", "int", "desc"),
+ ("a.path", "path", "desc"),
+ ]
+
+
+@SettingsProvider
+class Provider4(object):
+ config_settings = [
+ ("foo.abc", StringType, "desc", "a", {"choices": set("abc")}),
+ ("foo.xyz", StringType, "desc", "w", {"choices": set("xyz")}),
+ ]
+
+
+@SettingsProvider
+class Provider5(object):
+ config_settings = [
+ ("foo.*", "string", "desc"),
+ ("foo.bar", "string", "desc"),
+ ]
+
+
+class TestConfigSettings(unittest.TestCase):
+ def test_empty(self):
+ s = ConfigSettings()
+
+ self.assertEqual(len(s), 0)
+ self.assertNotIn("foo", s)
+
+ def test_duplicate_option(self):
+ s = ConfigSettings()
+
+ with self.assertRaises(ConfigException):
+ s.register_provider(ProviderDuplicate)
+
+ def test_simple(self):
+ s = ConfigSettings()
+ s.register_provider(Provider1)
+
+ self.assertEqual(len(s), 1)
+ self.assertIn("foo", s)
+
+ foo = s["foo"]
+ foo = s.foo
+
+ self.assertEqual(len(foo), 0)
+ self.assertEqual(len(foo._settings), 2)
+
+ self.assertIn("bar", foo._settings)
+ self.assertIn("baz", foo._settings)
+
+ self.assertNotIn("bar", foo)
+ foo["bar"] = "value1"
+ self.assertIn("bar", foo)
+
+ self.assertEqual(foo["bar"], "value1")
+ self.assertEqual(foo.bar, "value1")
+
+ def test_assignment_validation(self):
+ s = ConfigSettings()
+ s.register_provider(Provider2)
+
+ a = s.a
+
+ # Assigning an undeclared setting raises.
+ exc_type = AttributeError if sys.version_info < (3, 0) else KeyError
+ with self.assertRaises(exc_type):
+ a.undefined = True
+
+ with self.assertRaises(KeyError):
+ a["undefined"] = True
+
+ # Basic type validation.
+ a.string = "foo"
+ a.string = "foo"
+
+ with self.assertRaises(TypeError):
+ a.string = False
+
+ a.boolean = True
+ a.boolean = False
+
+ with self.assertRaises(TypeError):
+ a.boolean = "foo"
+
+ a.pos_int = 5
+ a.pos_int = 0
+
+ with self.assertRaises(ValueError):
+ a.pos_int = -1
+
+ with self.assertRaises(TypeError):
+ a.pos_int = "foo"
+
+ a.int = 5
+ a.int = 0
+ a.int = -5
+
+ with self.assertRaises(TypeError):
+ a.int = 1.24
+
+ with self.assertRaises(TypeError):
+ a.int = "foo"
+
+ a.path = "/home/gps"
+ a.path = "foo.c"
+ a.path = "foo/bar"
+ a.path = "./foo"
+
+ def retrieval_type_helper(self, provider):
+ s = ConfigSettings()
+ s.register_provider(provider)
+
+ a = s.a
+
+ a.string = "foo"
+ a.boolean = True
+ a.pos_int = 12
+ a.int = -4
+ a.path = "./foo/bar"
+
+ self.assertIsInstance(a.string, string_types)
+ self.assertIsInstance(a.boolean, bool)
+ self.assertIsInstance(a.pos_int, int)
+ self.assertIsInstance(a.int, int)
+ self.assertIsInstance(a.path, string_types)
+
+ def test_retrieval_type(self):
+ self.retrieval_type_helper(Provider2)
+ self.retrieval_type_helper(Provider3)
+
+ def test_choices_validation(self):
+ s = ConfigSettings()
+ s.register_provider(Provider4)
+
+ foo = s.foo
+ foo.abc
+ with self.assertRaises(ValueError):
+ foo.xyz
+
+ with self.assertRaises(ValueError):
+ foo.abc = "e"
+
+ foo.abc = "b"
+ foo.xyz = "y"
+
+ def test_wildcard_options(self):
+ s = ConfigSettings()
+ s.register_provider(Provider5)
+
+ foo = s.foo
+
+ self.assertIn("*", foo._settings)
+ self.assertNotIn("*", foo)
+
+ foo.baz = "value1"
+ foo.bar = "value2"
+
+ self.assertIn("baz", foo)
+ self.assertEqual(foo.baz, "value1")
+
+ self.assertIn("bar", foo)
+ self.assertEqual(foo.bar, "value2")
+
+ def test_file_reading_single(self):
+ temp = NamedTemporaryFile(mode="wt")
+ temp.write(CONFIG1)
+ temp.flush()
+
+ s = ConfigSettings()
+ s.register_provider(Provider1)
+
+ s.load_file(Path(temp.name))
+
+ self.assertEqual(s.foo.bar, "bar_value")
+
+ def test_file_reading_multiple(self):
+ """Loading multiple files has proper overwrite behavior."""
+ temp1 = NamedTemporaryFile(mode="wt")
+ temp1.write(CONFIG1)
+ temp1.flush()
+
+ temp2 = NamedTemporaryFile(mode="wt")
+ temp2.write(CONFIG2)
+ temp2.flush()
+
+ s = ConfigSettings()
+ s.register_provider(Provider1)
+
+ s.load_files([Path(temp1.name), Path(temp2.name)])
+
+ self.assertEqual(s.foo.bar, "value2")
+
+ def test_file_reading_missing(self):
+ """Missing files should silently be ignored."""
+
+ s = ConfigSettings()
+
+ s.load_file("/tmp/foo.ini")
+
+ def test_file_writing(self):
+ s = ConfigSettings()
+ s.register_provider(Provider2)
+
+ s.a.string = "foo"
+ s.a.boolean = False
+
+ temp = NamedTemporaryFile("wt")
+ s.write(temp)
+ temp.flush()
+
+ s2 = ConfigSettings()
+ s2.register_provider(Provider2)
+
+ s2.load_file(temp.name)
+
+ self.assertEqual(s.a.string, s2.a.string)
+ self.assertEqual(s.a.boolean, s2.a.boolean)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mach/mach/test/test_decorators.py b/python/mach/mach/test/test_decorators.py
new file mode 100644
index 0000000000..f33b6e7d8f
--- /dev/null
+++ b/python/mach/mach/test/test_decorators.py
@@ -0,0 +1,133 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from pathlib import Path
+from unittest import mock
+from unittest.mock import Mock, patch
+
+import pytest
+from mozbuild.base import MachCommandBase
+from mozunit import main
+
+import mach.decorators
+import mach.registrar
+from mach.base import MachError
+from mach.decorators import Command, CommandArgument, SubCommand
+from mach.requirements import MachEnvRequirements
+from mach.site import CommandSiteManager, MozSiteMetadata, SitePackagesSource
+
+
+@pytest.fixture
+def registrar(monkeypatch):
+ test_registrar = mach.registrar.MachRegistrar()
+ test_registrar.register_category(
+ "testing", "Mach unittest", "Testing for mach decorators"
+ )
+ monkeypatch.setattr(mach.decorators, "Registrar", test_registrar)
+ return test_registrar
+
+
+def test_register_command_with_argument(registrar):
+ inner_function = Mock()
+ context = Mock()
+ context.cwd = "."
+
+ @Command("cmd_foo", category="testing")
+ @CommandArgument("--arg", default=None, help="Argument help.")
+ def run_foo(command_context, arg):
+ inner_function(arg)
+
+ registrar.dispatch("cmd_foo", context, arg="argument")
+
+ inner_function.assert_called_with("argument")
+
+
+def test_register_command_with_metrics_path(registrar):
+ context = Mock()
+ context.cwd = "."
+
+ metrics_path = "metrics/path"
+ metrics_mock = Mock()
+ context.telemetry.metrics.return_value = metrics_mock
+
+ @Command("cmd_foo", category="testing", metrics_path=metrics_path)
+ def run_foo(command_context):
+ assert command_context.metrics == metrics_mock
+
+ @SubCommand("cmd_foo", "sub_foo", metrics_path=metrics_path + "2")
+ def run_subfoo(command_context):
+ assert command_context.metrics == metrics_mock
+
+ registrar.dispatch("cmd_foo", context)
+
+ context.telemetry.metrics.assert_called_with(metrics_path)
+ assert context.handler.metrics_path == metrics_path
+
+ registrar.dispatch("cmd_foo", context, subcommand="sub_foo")
+ assert context.handler.metrics_path == metrics_path + "2"
+
+
+def test_register_command_sets_up_class_at_runtime(registrar):
+ inner_function = Mock()
+
+ context = Mock()
+ context.cwd = "."
+
+ # We test that the virtualenv is set up properly dynamically on
+ # the instance that actually runs the command.
+ @Command("cmd_foo", category="testing", virtualenv_name="env_foo")
+ def run_foo(command_context):
+ assert (
+ Path(command_context.virtualenv_manager.virtualenv_root).name == "env_foo"
+ )
+ inner_function("foo")
+
+ @Command("cmd_bar", category="testing", virtualenv_name="env_bar")
+ def run_bar(command_context):
+ assert (
+ Path(command_context.virtualenv_manager.virtualenv_root).name == "env_bar"
+ )
+ inner_function("bar")
+
+ def from_environment_patch(
+ topsrcdir: str, state_dir: str, virtualenv_name, directory: str
+ ):
+ return CommandSiteManager(
+ "",
+ "",
+ virtualenv_name,
+ virtualenv_name,
+ MozSiteMetadata(0, "mach", SitePackagesSource.VENV, "", ""),
+ True,
+ MachEnvRequirements(),
+ )
+
+ with mock.patch.object(
+ CommandSiteManager, "from_environment", from_environment_patch
+ ):
+ with patch.object(MachCommandBase, "activate_virtualenv"):
+ registrar.dispatch("cmd_foo", context)
+ inner_function.assert_called_with("foo")
+ registrar.dispatch("cmd_bar", context)
+ inner_function.assert_called_with("bar")
+
+
+def test_cannot_create_command_nonexisting_category(registrar):
+ with pytest.raises(MachError):
+
+ @Command("cmd_foo", category="bar")
+ def run_foo(command_context):
+ pass
+
+
+def test_subcommand_requires_parent_to_exist(registrar):
+ with pytest.raises(MachError):
+
+ @SubCommand("sub_foo", "foo")
+ def run_foo(command_context):
+ pass
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mach/mach/test/test_dispatcher.py b/python/mach/mach/test/test_dispatcher.py
new file mode 100644
index 0000000000..85c2e9a847
--- /dev/null
+++ b/python/mach/mach/test/test_dispatcher.py
@@ -0,0 +1,60 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+from io import StringIO
+from pathlib import Path
+
+import pytest
+from mozunit import main
+from six import string_types
+
+from mach.base import CommandContext
+from mach.registrar import Registrar
+
+
+@pytest.mark.usefixtures("get_mach", "run_mach")
+class TestDispatcher(unittest.TestCase):
+ """Tests dispatch related code"""
+
+ def get_parser(self, config=None):
+ mach = self.get_mach(Path("basic.py"))
+
+ for provider in Registrar.settings_providers:
+ mach.settings.register_provider(provider)
+
+ if config:
+ if isinstance(config, string_types):
+ config = StringIO(config)
+ mach.settings.load_fps([config])
+
+ context = CommandContext(cwd="", settings=mach.settings)
+ return mach.get_argument_parser(context)
+
+ def test_command_aliases(self):
+ config = """
+[alias]
+foo = cmd_foo
+bar = cmd_bar
+baz = cmd_bar --baz
+cmd_bar = cmd_bar --baz
+"""
+ parser = self.get_parser(config=config)
+
+ args = parser.parse_args(["foo"])
+ self.assertEqual(args.command, "cmd_foo")
+
+ def assert_bar_baz(argv):
+ args = parser.parse_args(argv)
+ self.assertEqual(args.command, "cmd_bar")
+ self.assertTrue(args.command_args.baz)
+
+ # The following should all result in |cmd_bar --baz|
+ assert_bar_baz(["bar", "--baz"])
+ assert_bar_baz(["baz"])
+ assert_bar_baz(["cmd_bar"])
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mach/mach/test/test_entry_point.py b/python/mach/mach/test/test_entry_point.py
new file mode 100644
index 0000000000..1129eba476
--- /dev/null
+++ b/python/mach/mach/test/test_entry_point.py
@@ -0,0 +1,59 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import imp
+import sys
+from pathlib import Path
+from unittest.mock import patch
+
+from mozunit import main
+
+from mach.base import MachError
+from mach.test.conftest import TestBase
+
+
+class Entry:
+ """Stub replacement for pkg_resources.EntryPoint"""
+
+ def __init__(self, providers):
+ self.providers = providers
+
+ def load(self):
+ def _providers():
+ return self.providers
+
+ return _providers
+
+
+class TestEntryPoints(TestBase):
+ """Test integrating with setuptools entry points"""
+
+ provider_dir = Path(__file__).parent.resolve() / "providers"
+
+ def _run_help(self):
+ return self._run_mach(["help"], entry_point="mach.providers")
+
+ @patch("pkg_resources.iter_entry_points")
+ def test_load_entry_point_from_directory(self, mock):
+ # Ensure parent module is present otherwise we'll (likely) get
+ # an error due to unknown parent.
+ if "mach.commands" not in sys.modules:
+ mod = imp.new_module("mach.commands")
+ sys.modules["mach.commands"] = mod
+
+ mock.return_value = [Entry([self.provider_dir])]
+ # Mach error raised due to conditions_invalid.py
+ with self.assertRaises(MachError):
+ self._run_help()
+
+ @patch("pkg_resources.iter_entry_points")
+ def test_load_entry_point_from_file(self, mock):
+ mock.return_value = [Entry([self.provider_dir / "basic.py"])]
+
+ result, stdout, stderr = self._run_help()
+ self.assertIsNone(result)
+ self.assertIn("cmd_foo", stdout)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mach/mach/test/test_error_output.py b/python/mach/mach/test/test_error_output.py
new file mode 100644
index 0000000000..12eab65856
--- /dev/null
+++ b/python/mach/mach/test/test_error_output.py
@@ -0,0 +1,29 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from pathlib import Path
+
+from mozunit import main
+
+from mach.main import COMMAND_ERROR_TEMPLATE, MODULE_ERROR_TEMPLATE
+
+
+def test_command_error(run_mach):
+ result, stdout, stderr = run_mach(
+ ["throw", "--message", "Command Error"], provider_files=Path("throw.py")
+ )
+ assert result == 1
+ assert COMMAND_ERROR_TEMPLATE % "throw" in stdout
+
+
+def test_invoked_error(run_mach):
+ result, stdout, stderr = run_mach(
+ ["throw_deep", "--message", "Deep stack"], provider_files=Path("throw.py")
+ )
+ assert result == 1
+ assert MODULE_ERROR_TEMPLATE % "throw_deep" in stdout
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mach/mach/test/test_logger.py b/python/mach/mach/test/test_logger.py
new file mode 100644
index 0000000000..643d890de8
--- /dev/null
+++ b/python/mach/mach/test/test_logger.py
@@ -0,0 +1,48 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import logging
+import time
+import unittest
+
+from mozunit import main
+
+from mach.logging import StructuredHumanFormatter
+
+
+class DummyLogger(logging.Logger):
+ def __init__(self, cb):
+ logging.Logger.__init__(self, "test")
+
+ self._cb = cb
+
+ def handle(self, record):
+ self._cb(record)
+
+
+class TestStructuredHumanFormatter(unittest.TestCase):
+ def test_non_ascii_logging(self):
+ # Ensures the formatter doesn't choke when non-ASCII characters are
+ # present in printed parameters.
+ formatter = StructuredHumanFormatter(time.time())
+
+ def on_record(record):
+ result = formatter.format(record)
+ relevant = result[9:]
+
+ self.assertEqual(relevant, "Test: s\xe9curit\xe9")
+
+ logger = DummyLogger(on_record)
+
+ value = "s\xe9curit\xe9"
+
+ logger.log(
+ logging.INFO,
+ "Test: {utf}",
+ extra={"action": "action", "params": {"utf": value}},
+ )
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mach/mach/test/test_mach.py b/python/mach/mach/test/test_mach.py
new file mode 100644
index 0000000000..38379d1b49
--- /dev/null
+++ b/python/mach/mach/test/test_mach.py
@@ -0,0 +1,31 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+
+from mozunit import main
+
+
+def test_set_isatty_environ(monkeypatch, get_mach):
+ # Make sure the 'MACH_STDOUT_ISATTY' variable gets set.
+ monkeypatch.delenv("MACH_STDOUT_ISATTY", raising=False)
+ monkeypatch.setattr(os, "isatty", lambda fd: True)
+
+ m = get_mach()
+ orig_run = m._run
+ env_is_set = []
+
+ def wrap_run(*args, **kwargs):
+ env_is_set.append("MACH_STDOUT_ISATTY" in os.environ)
+ return orig_run(*args, **kwargs)
+
+ monkeypatch.setattr(m, "_run", wrap_run)
+
+ ret = m.run([])
+ assert ret == 0
+ assert env_is_set[0]
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mach/mach/test/test_site.py b/python/mach/mach/test/test_site.py
new file mode 100644
index 0000000000..d7c3d8c489
--- /dev/null
+++ b/python/mach/mach/test/test_site.py
@@ -0,0 +1,56 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+from unittest import mock
+
+import pytest
+from buildconfig import topsrcdir
+from mozunit import main
+
+from mach.site import (
+ PIP_NETWORK_INSTALL_RESTRICTED_VIRTUALENVS,
+ SitePackagesSource,
+ resolve_requirements,
+)
+
+
+@pytest.mark.parametrize(
+ "env_native_package_source,env_use_system_python,env_moz_automation,expected",
+ [
+ ("system", False, False, SitePackagesSource.SYSTEM),
+ ("pip", False, False, SitePackagesSource.VENV),
+ ("none", False, False, SitePackagesSource.NONE),
+ (None, False, False, SitePackagesSource.VENV),
+ (None, False, True, SitePackagesSource.NONE),
+ (None, True, False, SitePackagesSource.NONE),
+ (None, True, True, SitePackagesSource.NONE),
+ ],
+)
+def test_resolve_package_source(
+ env_native_package_source, env_use_system_python, env_moz_automation, expected
+):
+ with mock.patch.dict(
+ os.environ,
+ {
+ "MACH_BUILD_PYTHON_NATIVE_PACKAGE_SOURCE": env_native_package_source or "",
+ "MACH_USE_SYSTEM_PYTHON": "1" if env_use_system_python else "",
+ "MOZ_AUTOMATION": "1" if env_moz_automation else "",
+ },
+ ):
+ assert SitePackagesSource.for_mach() == expected
+
+
+def test_all_restricted_sites_dont_have_pypi_requirements():
+ for site_name in PIP_NETWORK_INSTALL_RESTRICTED_VIRTUALENVS:
+ requirements = resolve_requirements(topsrcdir, site_name)
+ assert not requirements.pypi_requirements, (
+ 'Sites that must be able to operate without "pip install" must not have any '
+ f'mandatory "pypi requirements". However, the "{site_name}" site depends on: '
+ f"{requirements.pypi_requirements}"
+ )
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mach/mach/test/test_site_activation.py b/python/mach/mach/test/test_site_activation.py
new file mode 100644
index 0000000000..e034a27b76
--- /dev/null
+++ b/python/mach/mach/test/test_site_activation.py
@@ -0,0 +1,463 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import ast
+import functools
+import os
+import subprocess
+import sys
+import tempfile
+from pathlib import Path
+from subprocess import CompletedProcess
+from typing import List
+
+import buildconfig
+import mozunit
+import pkg_resources
+import pytest
+
+from mach.site import MozSiteMetadata, PythonVirtualenv, activate_virtualenv
+
+
+class ActivationContext:
+ def __init__(
+ self,
+ topsrcdir: Path,
+ work_dir: Path,
+ original_python_path: str,
+ stdlib_paths: List[Path],
+ system_paths: List[Path],
+ required_mach_sys_paths: List[Path],
+ mach_requirement_paths: List[Path],
+ command_requirement_path: Path,
+ ):
+ self.topsrcdir = topsrcdir
+ self.work_dir = work_dir
+ self.original_python_path = original_python_path
+ self.stdlib_paths = stdlib_paths
+ self.system_paths = system_paths
+ self.required_moz_init_sys_paths = required_mach_sys_paths
+ self.mach_requirement_paths = mach_requirement_paths
+ self.command_requirement_path = command_requirement_path
+
+ def virtualenv(self, name: str) -> PythonVirtualenv:
+ base_path = self.work_dir
+
+ if name == "mach":
+ base_path = base_path / "_virtualenvs"
+ return PythonVirtualenv(str(base_path / name))
+
+
+def test_new_package_appears_in_pkg_resources():
+ try:
+ # "carrot" was chosen as the package to use because:
+ # * It has to be a package that doesn't exist in-scope at the start (so,
+ # all vendored modules included in the test virtualenv aren't usage).
+ # * It must be on our internal PyPI mirror.
+ # Of the options, "carrot" is a small install that fits these requirements.
+ pkg_resources.get_distribution("carrot")
+ assert False, "Expected to not find 'carrot' as the initial state of the test"
+ except pkg_resources.DistributionNotFound:
+ pass
+
+ with tempfile.TemporaryDirectory() as venv_dir:
+ subprocess.check_call(
+ [
+ sys.executable,
+ "-m",
+ "venv",
+ venv_dir,
+ ]
+ )
+
+ venv = PythonVirtualenv(venv_dir)
+ venv.pip_install(["carrot==0.10.7"])
+
+ initial_metadata = MozSiteMetadata.from_runtime()
+ try:
+ metadata = MozSiteMetadata(None, None, None, None, venv.prefix)
+ with metadata.update_current_site(venv.python_path):
+ activate_virtualenv(venv)
+
+ assert pkg_resources.get_distribution("carrot").version == "0.10.7"
+ finally:
+ MozSiteMetadata.current = initial_metadata
+
+
+def test_sys_path_source_none_build(context):
+ original, mach, command = _run_activation_script_for_paths(context, "none", "build")
+ _assert_original_python_sys_path(context, original)
+
+ assert not os.path.exists(context.virtualenv("mach").prefix)
+ assert mach == [
+ *context.stdlib_paths,
+ *context.mach_requirement_paths,
+ ]
+
+ expected_command_paths = [
+ *context.stdlib_paths,
+ *context.mach_requirement_paths,
+ context.command_requirement_path,
+ ]
+ assert command == expected_command_paths
+
+ command_venv = _sys_path_of_virtualenv(context.virtualenv("build"))
+ assert command_venv == [Path(""), *expected_command_paths]
+
+
+def test_sys_path_source_none_other(context):
+ original, mach, command = _run_activation_script_for_paths(context, "none", "other")
+ _assert_original_python_sys_path(context, original)
+
+ assert not os.path.exists(context.virtualenv("mach").prefix)
+ assert mach == [
+ *context.stdlib_paths,
+ *context.mach_requirement_paths,
+ ]
+
+ command_virtualenv = PythonVirtualenv(str(context.work_dir / "other"))
+ expected_command_paths = [
+ *context.stdlib_paths,
+ *context.mach_requirement_paths,
+ context.command_requirement_path,
+ *(Path(p) for p in command_virtualenv.site_packages_dirs()),
+ ]
+ assert command == expected_command_paths
+
+ command_venv = _sys_path_of_virtualenv(context.virtualenv("other"))
+ assert command_venv == [
+ Path(""),
+ *expected_command_paths,
+ ]
+
+
+def test_sys_path_source_venv_build(context):
+ original, mach, command = _run_activation_script_for_paths(context, "pip", "build")
+ _assert_original_python_sys_path(context, original)
+
+ mach_virtualenv = context.virtualenv("mach")
+ expected_mach_paths = [
+ *context.stdlib_paths,
+ *context.mach_requirement_paths,
+ *(Path(p) for p in mach_virtualenv.site_packages_dirs()),
+ ]
+ assert mach == expected_mach_paths
+
+ command_virtualenv = context.virtualenv("build")
+ expected_command_paths = [
+ *context.stdlib_paths,
+ *context.mach_requirement_paths,
+ *(Path(p) for p in mach_virtualenv.site_packages_dirs()),
+ context.command_requirement_path,
+ *(Path(p) for p in command_virtualenv.site_packages_dirs()),
+ ]
+ assert command == expected_command_paths
+
+ mach_venv = _sys_path_of_virtualenv(mach_virtualenv)
+ assert mach_venv == [
+ Path(""),
+ *expected_mach_paths,
+ ]
+
+ command_venv = _sys_path_of_virtualenv(command_virtualenv)
+ assert command_venv == [
+ Path(""),
+ *expected_command_paths,
+ ]
+
+
+def test_sys_path_source_venv_other(context):
+ original, mach, command = _run_activation_script_for_paths(context, "pip", "other")
+ _assert_original_python_sys_path(context, original)
+
+ mach_virtualenv = context.virtualenv("mach")
+ expected_mach_paths = [
+ *context.stdlib_paths,
+ *context.mach_requirement_paths,
+ *(Path(p) for p in mach_virtualenv.site_packages_dirs()),
+ ]
+ assert mach == expected_mach_paths
+
+ command_virtualenv = context.virtualenv("other")
+ expected_command_paths = [
+ *context.stdlib_paths,
+ *context.mach_requirement_paths,
+ *(Path(p) for p in mach_virtualenv.site_packages_dirs()),
+ context.command_requirement_path,
+ *(Path(p) for p in command_virtualenv.site_packages_dirs()),
+ ]
+ assert command == expected_command_paths
+
+ mach_venv = _sys_path_of_virtualenv(mach_virtualenv)
+ assert mach_venv == [
+ Path(""),
+ *expected_mach_paths,
+ ]
+
+ command_venv = _sys_path_of_virtualenv(command_virtualenv)
+ assert command_venv == [
+ Path(""),
+ *expected_command_paths,
+ ]
+
+
+def test_sys_path_source_system_build(context):
+ original, mach, command = _run_activation_script_for_paths(
+ context, "system", "build"
+ )
+ _assert_original_python_sys_path(context, original)
+
+ assert not os.path.exists(context.virtualenv("mach").prefix)
+ expected_mach_paths = [
+ *context.stdlib_paths,
+ *context.mach_requirement_paths,
+ *context.system_paths,
+ ]
+ assert mach == expected_mach_paths
+
+ command_virtualenv = context.virtualenv("build")
+ expected_command_paths = [
+ *context.stdlib_paths,
+ *context.mach_requirement_paths,
+ *context.system_paths,
+ context.command_requirement_path,
+ ]
+ assert command == expected_command_paths
+
+ command_venv = _sys_path_of_virtualenv(command_virtualenv)
+ assert command_venv == [
+ Path(""),
+ *expected_command_paths,
+ ]
+
+
+def test_sys_path_source_system_other(context):
+ result = _run_activation_script(
+ context,
+ "system",
+ "other",
+ context.original_python_path,
+ stderr=subprocess.PIPE,
+ )
+ assert result.returncode != 0
+ assert (
+ 'Cannot use MACH_BUILD_PYTHON_NATIVE_PACKAGE_SOURCE="system" for any sites '
+ "other than" in result.stderr
+ )
+
+
+def test_sys_path_source_venvsystem_build(context):
+ venv_system_python = _create_venv_system_python(
+ context.work_dir, context.original_python_path
+ )
+ venv_system_site_packages_dirs = [
+ Path(p) for p in venv_system_python.site_packages_dirs()
+ ]
+ original, mach, command = _run_activation_script_for_paths(
+ context, "system", "build", venv_system_python.python_path
+ )
+
+ assert original == [
+ Path(__file__).parent,
+ *context.required_moz_init_sys_paths,
+ *context.stdlib_paths,
+ *venv_system_site_packages_dirs,
+ ]
+
+ assert not os.path.exists(context.virtualenv("mach").prefix)
+ expected_mach_paths = [
+ *context.stdlib_paths,
+ *context.mach_requirement_paths,
+ *venv_system_site_packages_dirs,
+ ]
+ assert mach == expected_mach_paths
+
+ command_virtualenv = context.virtualenv("build")
+ expected_command_paths = [
+ *context.stdlib_paths,
+ *context.mach_requirement_paths,
+ *venv_system_site_packages_dirs,
+ context.command_requirement_path,
+ ]
+ assert command == expected_command_paths
+
+ command_venv = _sys_path_of_virtualenv(command_virtualenv)
+ assert command_venv == [
+ Path(""),
+ *expected_command_paths,
+ ]
+
+
+def test_sys_path_source_venvsystem_other(context):
+ venv_system_python = _create_venv_system_python(
+ context.work_dir, context.original_python_path
+ )
+ result = _run_activation_script(
+ context,
+ "system",
+ "other",
+ venv_system_python.python_path,
+ stderr=subprocess.PIPE,
+ )
+ assert result.returncode != 0
+ assert (
+ 'Cannot use MACH_BUILD_PYTHON_NATIVE_PACKAGE_SOURCE="system" for any sites '
+ "other than" in result.stderr
+ )
+
+
+@pytest.fixture(name="context")
+def _activation_context():
+ original_python_path, stdlib_paths, system_paths = _original_python()
+ topsrcdir = Path(buildconfig.topsrcdir)
+ required_mach_sys_paths = [
+ topsrcdir / "python" / "mach",
+ topsrcdir / "third_party" / "python" / "packaging",
+ topsrcdir / "third_party" / "python" / "pyparsing",
+ topsrcdir / "third_party" / "python" / "pip",
+ ]
+
+ with tempfile.TemporaryDirectory() as work_dir:
+ # Get "resolved" version of path to ease comparison against "site"-added sys.path
+ # entries, as "site" calculates the realpath of provided locations.
+ work_dir = Path(work_dir).resolve()
+ mach_requirement_paths = [
+ *required_mach_sys_paths,
+ work_dir / "mach_site_path",
+ ]
+ command_requirement_path = work_dir / "command_site_path"
+ (work_dir / "mach_site_path").touch()
+ command_requirement_path.touch()
+ yield ActivationContext(
+ topsrcdir,
+ work_dir,
+ original_python_path,
+ stdlib_paths,
+ system_paths,
+ required_mach_sys_paths,
+ mach_requirement_paths,
+ command_requirement_path,
+ )
+
+
+@functools.lru_cache(maxsize=None)
+def _original_python():
+ current_site = MozSiteMetadata.from_runtime()
+ stdlib_paths, system_paths = current_site.original_python.sys_path()
+ stdlib_paths = [Path(path) for path in _filter_pydev_from_paths(stdlib_paths)]
+ system_paths = [Path(path) for path in system_paths]
+ return current_site.original_python.python_path, stdlib_paths, system_paths
+
+
+def _run_activation_script(
+ context: ActivationContext,
+ source: str,
+ site_name: str,
+ invoking_python: str,
+ **kwargs
+) -> CompletedProcess:
+ return subprocess.run(
+ [
+ invoking_python,
+ str(Path(__file__).parent / "script_site_activation.py"),
+ ],
+ stdout=subprocess.PIPE,
+ universal_newlines=True,
+ env={
+ "TOPSRCDIR": str(context.topsrcdir),
+ "COMMAND_SITE": site_name,
+ "PYTHONPATH": os.pathsep.join(
+ str(p) for p in context.required_moz_init_sys_paths
+ ),
+ "MACH_SITE_PTH_REQUIREMENTS": os.pathsep.join(
+ str(p) for p in context.mach_requirement_paths
+ ),
+ "COMMAND_SITE_PTH_REQUIREMENTS": str(context.command_requirement_path),
+ "MACH_BUILD_PYTHON_NATIVE_PACKAGE_SOURCE": source,
+ "WORK_DIR": str(context.work_dir),
+ # These two variables are needed on Windows so that Python initializes
+ # properly and adds the "user site packages" to the sys.path like normal.
+ "SYSTEMROOT": os.environ.get("SYSTEMROOT", ""),
+ "APPDATA": os.environ.get("APPDATA", ""),
+ },
+ **kwargs,
+ )
+
+
+def _run_activation_script_for_paths(
+ context: ActivationContext, source: str, site_name: str, invoking_python: str = None
+) -> List[List[Path]]:
+ """Return the states of the sys.path when activating Mach-managed sites
+
+ Three sys.path states are returned:
+ * The initial sys.path, equivalent to "path_to_python -c "import sys; print(sys.path)"
+ * The sys.path after activating the Mach site
+ * The sys.path after activating the command site
+ """
+
+ output = _run_activation_script(
+ context,
+ source,
+ site_name,
+ invoking_python or context.original_python_path,
+ check=True,
+ ).stdout
+ # Filter to the last line, which will have our nested list that we want to
+ # parse. This will avoid unrelated output, such as from virtualenv creation
+ output = output.splitlines()[-1]
+ return [
+ [Path(path) for path in _filter_pydev_from_paths(paths)]
+ for paths in ast.literal_eval(output)
+ ]
+
+
+def _assert_original_python_sys_path(context: ActivationContext, original: List[Path]):
+ # Assert that initial sys.path (prior to any activations) matches expectations.
+ assert original == [
+ Path(__file__).parent,
+ *context.required_moz_init_sys_paths,
+ *context.stdlib_paths,
+ *context.system_paths,
+ ]
+
+
+def _sys_path_of_virtualenv(virtualenv: PythonVirtualenv) -> List[Path]:
+ output = subprocess.run(
+ [virtualenv.python_path, "-c", "import sys; print(sys.path)"],
+ stdout=subprocess.PIPE,
+ universal_newlines=True,
+ env={
+ # Needed for python to initialize properly
+ "SYSTEMROOT": os.environ.get("SYSTEMROOT", ""),
+ },
+ check=True,
+ ).stdout
+ return [Path(path) for path in _filter_pydev_from_paths(ast.literal_eval(output))]
+
+
+def _filter_pydev_from_paths(paths: List[str]) -> List[str]:
+ # Filter out injected "pydev" debugging tool if running within a JetBrains
+ # debugging context.
+ return [path for path in paths if "pydev" not in path and "JetBrains" not in path]
+
+
+def _create_venv_system_python(
+ work_dir: Path, invoking_python: str
+) -> PythonVirtualenv:
+ virtualenv = PythonVirtualenv(str(work_dir / "system_python"))
+ subprocess.run(
+ [
+ invoking_python,
+ "-m",
+ "venv",
+ virtualenv.prefix,
+ "--without-pip",
+ ],
+ check=True,
+ )
+ return virtualenv
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mach/mach/test/test_site_compatibility.py b/python/mach/mach/test/test_site_compatibility.py
new file mode 100644
index 0000000000..4c1b6d5efa
--- /dev/null
+++ b/python/mach/mach/test/test_site_compatibility.py
@@ -0,0 +1,189 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import os
+import shutil
+import subprocess
+import sys
+from pathlib import Path
+from textwrap import dedent
+
+import mozunit
+from buildconfig import topsrcdir
+
+from mach.requirements import MachEnvRequirements
+from mach.site import PythonVirtualenv
+
+
+def _resolve_command_site_names():
+ site_names = []
+ for child in (Path(topsrcdir) / "python" / "sites").iterdir():
+ if not child.is_file():
+ continue
+
+ if child.suffix != ".txt":
+ continue
+
+ if child.name == "mach.txt":
+ continue
+
+ site_names.append(child.stem)
+ return site_names
+
+
+def _requirement_definition_to_pip_format(site_name, cache, is_mach_or_build_env):
+ """Convert from parsed requirements object to pip-consumable format"""
+ requirements_path = Path(topsrcdir) / "python" / "sites" / f"{site_name}.txt"
+ requirements = MachEnvRequirements.from_requirements_definition(
+ topsrcdir, False, not is_mach_or_build_env, requirements_path
+ )
+
+ lines = []
+ for pypi in (
+ requirements.pypi_requirements + requirements.pypi_optional_requirements
+ ):
+ lines.append(str(pypi.requirement))
+
+ for vendored in requirements.vendored_requirements:
+ lines.append(str(cache.package_for_vendor_dir(Path(vendored.path))))
+
+ for pth in requirements.pth_requirements:
+ path = Path(pth.path)
+
+ if "third_party" not in (p.name for p in path.parents):
+ continue
+
+ for child in path.iterdir():
+ if child.name.endswith(".dist-info"):
+ raise Exception(
+ f'In {requirements_path}, the "pth:" pointing to "{path}" has a '
+ '".dist-info" file.\n'
+ 'Perhaps it should change to start with "vendored:" instead of '
+ '"pth:".'
+ )
+ if child.name.endswith(".egg-info"):
+ raise Exception(
+ f'In {requirements_path}, the "pth:" pointing to "{path}" has an '
+ '".egg-info" file.\n'
+ 'Perhaps it should change to start with "vendored:" instead of '
+ '"pth:".'
+ )
+
+ return "\n".join(lines)
+
+
+class PackageCache:
+ def __init__(self, storage_dir: Path):
+ self._cache = {}
+ self._storage_dir = storage_dir
+
+ def package_for_vendor_dir(self, vendor_path: Path):
+ if vendor_path in self._cache:
+ return self._cache[vendor_path]
+
+ if not any((p for p in vendor_path.iterdir() if p.name.endswith(".dist-info"))):
+ # This vendored package is not a wheel. It may be a source package (with
+ # a setup.py), or just some Python code that was manually copied into the
+ # tree. If it's a source package, the setup.py file may be up a few levels
+ # from the referenced Python module path.
+ package_dir = vendor_path
+ while True:
+ if (package_dir / "setup.py").exists():
+ break
+ elif package_dir.parent == package_dir:
+ raise Exception(
+ f'Package "{vendor_path}" is not a wheel and does not have a '
+ 'setup.py file. Perhaps it should be "pth:" instead of '
+ '"vendored:"?'
+ )
+ package_dir = package_dir.parent
+
+ self._cache[vendor_path] = package_dir
+ return package_dir
+
+ # Pip requires that wheels have a version number in their name, even if
+ # it ignores it. We should parse out the version and put it in here
+ # so that failure debugging is easier, but that's non-trivial work.
+ # So, this "0" satisfies pip's naming requirement while being relatively
+ # obvious that it's a placeholder.
+ output_path = self._storage_dir / f"{vendor_path.name}-0-py3-none-any"
+ shutil.make_archive(str(output_path), "zip", vendor_path)
+
+ whl_path = output_path.parent / (output_path.name + ".whl")
+ (output_path.parent / (output_path.name + ".zip")).rename(whl_path)
+ self._cache[vendor_path] = whl_path
+
+ return whl_path
+
+
+def test_sites_compatible(tmpdir: str):
+ command_site_names = _resolve_command_site_names()
+ work_dir = Path(tmpdir)
+ cache = PackageCache(work_dir)
+ mach_requirements = _requirement_definition_to_pip_format("mach", cache, True)
+
+ # Create virtualenv to try to install all dependencies into.
+ virtualenv = PythonVirtualenv(str(work_dir / "env"))
+ subprocess.check_call(
+ [
+ sys.executable,
+ "-m",
+ "venv",
+ "--without-pip",
+ virtualenv.prefix,
+ ]
+ )
+ platlib_dir = virtualenv.resolve_sysconfig_packages_path("platlib")
+ third_party = Path(topsrcdir) / "third_party" / "python"
+ with open(os.path.join(platlib_dir, "site.pth"), "w") as pthfile:
+ pthfile.write(
+ "\n".join(
+ [
+ str(third_party / "pip"),
+ str(third_party / "wheel"),
+ str(third_party / "setuptools"),
+ ]
+ )
+ )
+
+ for name in command_site_names:
+ print(f'Checking compatibility of "{name}" site')
+ command_requirements = _requirement_definition_to_pip_format(
+ name, cache, name == "build"
+ )
+ with open(work_dir / "requirements.txt", "w") as requirements_txt:
+ requirements_txt.write(mach_requirements)
+ requirements_txt.write("\n")
+ requirements_txt.write(command_requirements)
+
+ # Attempt to install combined set of dependencies (global Mach + current
+ # command)
+ proc = subprocess.run(
+ [
+ virtualenv.python_path,
+ "-m",
+ "pip",
+ "install",
+ "-r",
+ str(work_dir / "requirements.txt"),
+ ],
+ cwd=topsrcdir,
+ )
+ if proc.returncode != 0:
+ print(
+ dedent(
+ f"""
+ Error: The '{name}' site contains dependencies that are not
+ compatible with the 'mach' site. Check the following files for
+ any conflicting packages mentioned in the prior error message:
+
+ python/sites/mach.txt
+ python/sites/{name}.txt
+ """
+ )
+ )
+ assert False
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mach/mach/test/zero_microseconds.py b/python/mach/mach/test/zero_microseconds.py
new file mode 100644
index 0000000000..b1d523071f
--- /dev/null
+++ b/python/mach/mach/test/zero_microseconds.py
@@ -0,0 +1,12 @@
+# This code is loaded via `mach python --exec-file`, so it runs in the scope of
+# the `mach python` command.
+old = self._mach_context.post_dispatch_handler # noqa: F821
+
+
+def handler(context, handler, instance, result, start_time, end_time, depth, args):
+ global old
+ # Round off sub-second precision.
+ old(context, handler, instance, result, int(start_time), end_time, depth, args)
+
+
+self._mach_context.post_dispatch_handler = handler # noqa: F821
diff --git a/python/mach/mach/util.py b/python/mach/mach/util.py
new file mode 100644
index 0000000000..4ed303cf3b
--- /dev/null
+++ b/python/mach/mach/util.py
@@ -0,0 +1,110 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import hashlib
+import os
+import sys
+from pathlib import Path, PurePosixPath
+from typing import Optional, Union
+
+
+class UserError(Exception):
+ """Represents an error caused by something the user did wrong rather than
+ an internal `mach` failure. Exceptions that are subclasses of this class
+ will not be reported as failures to Sentry.
+ """
+
+
+def setenv(key, value):
+ """Compatibility shim to ensure the proper string type is used with
+ os.environ for the version of Python being used.
+ """
+ from six import text_type
+
+ encoding = "mbcs" if sys.platform == "win32" else "utf-8"
+
+ if sys.version_info[0] == 2:
+ if isinstance(key, text_type):
+ key = key.encode(encoding)
+ if isinstance(value, text_type):
+ value = value.encode(encoding)
+ else:
+ if isinstance(key, bytes):
+ key = key.decode(encoding)
+ if isinstance(value, bytes):
+ value = value.decode(encoding)
+
+ os.environ[key] = value
+
+
+def get_state_dir(
+ specific_to_topsrcdir=False, topsrcdir: Optional[Union[str, Path]] = None
+):
+ """Obtain path to a directory to hold state.
+
+ Args:
+ specific_to_topsrcdir (bool): If True, return a state dir specific to the current
+ srcdir instead of the global state dir (default: False)
+
+ Returns:
+ A path to the state dir (str)
+ """
+ state_dir = Path(os.environ.get("MOZBUILD_STATE_PATH", Path.home() / ".mozbuild"))
+ if not specific_to_topsrcdir:
+ return str(state_dir)
+
+ if not topsrcdir:
+ # Only import MozbuildObject if topsrcdir isn't provided. This is to cover
+ # the Mach initialization stage, where "mozbuild" isn't in the import scope.
+ from mozbuild.base import MozbuildObject
+
+ topsrcdir = Path(
+ MozbuildObject.from_environment(cwd=str(Path(__file__).parent)).topsrcdir
+ )
+
+ # Ensure that the topsrcdir is a consistent string before hashing it.
+ topsrcdir = Path(topsrcdir).resolve()
+
+ # Shortening to 12 characters makes these directories a bit more manageable
+ # in a terminal and is more than good enough for this purpose.
+ srcdir_hash = hashlib.sha256(str(topsrcdir).encode("utf-8")).hexdigest()[:12]
+
+ state_dir = state_dir / "srcdirs" / f"{topsrcdir.name}-{srcdir_hash}"
+
+ if not state_dir.is_dir():
+ # We create the srcdir here rather than 'mach_initialize.py' so direct
+ # consumers of this function don't create the directory inconsistently.
+ print(f"Creating local state directory: {state_dir}")
+ state_dir.mkdir(mode=0o770, parents=True)
+ # Save the topsrcdir that this state dir corresponds to so we can clean
+ # it up in the event its srcdir was deleted.
+ with (state_dir / "topsrcdir.txt").open(mode="w") as fh:
+ fh.write(str(topsrcdir))
+
+ return str(state_dir)
+
+
+def win_to_msys_path(path: Path):
+ """Convert a windows-style path to msys-style."""
+ drive, path = os.path.splitdrive(path)
+ path = "/".join(path.split("\\"))
+ if drive:
+ if path[0] == "/":
+ path = path[1:]
+ path = f"/{drive[:-1]}/{path}"
+ return PurePosixPath(path)
+
+
+def to_optional_path(path: Optional[Path]):
+ if path:
+ return Path(path)
+ else:
+ return None
+
+
+def to_optional_str(path: Optional[Path]):
+ if path:
+ return str(path)
+ else:
+ return None
diff --git a/python/mach/metrics.yaml b/python/mach/metrics.yaml
new file mode 100644
index 0000000000..16b2aa2877
--- /dev/null
+++ b/python/mach/metrics.yaml
@@ -0,0 +1,206 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# If this file is changed, update the generated docs:
+# https://firefox-source-docs.mozilla.org/mach/telemetry.html#updating-generated-metrics-docs
+
+# Adding a new metric? We have docs for that!
+# https://mozilla.github.io/glean/book/user/metrics/adding-new-metrics.html
+---
+$schema: moz://mozilla.org/schemas/glean/metrics/2-0-0
+
+mach:
+ command:
+ type: string
+ description: >
+ The name of the mach command that was invoked, such as "build",
+ "doc", or "try".
+ lifetime: application
+ bugs:
+ - https://bugzilla.mozilla.org/show_bug.cgi?id=1291053
+ data_reviews:
+ - https://bugzilla.mozilla.org/show_bug.cgi?id=1291053#c34
+ notification_emails:
+ - build-telemetry@mozilla.com
+ - mhentges@mozilla.com
+ expires: never
+ send_in_pings:
+ - usage
+ argv:
+ type: string_list
+ description: >
+ Parameters provided to mach. Absolute paths are sanitized to be relative
+ to one of a few key base paths, such as the "$topsrcdir", "$topobjdir",
+ or "$HOME". For example: "/home/mozilla/dev/firefox/python/mozbuild"
+ would be replaced with "$topsrcdir/python/mozbuild".
+ If a valid replacement base path cannot be found, the path is replaced
+ with "<path omitted>".
+ lifetime: application
+ bugs:
+ - https://bugzilla.mozilla.org/show_bug.cgi?id=1291053
+ data_reviews:
+ - https://bugzilla.mozilla.org/show_bug.cgi?id=1291053#c34
+ notification_emails:
+ - build-telemetry@mozilla.com
+ - mhentges@mozilla.com
+ expires: never
+ send_in_pings:
+ - usage
+ success:
+ type: boolean
+ description: True if the mach invocation succeeded.
+ lifetime: application
+ bugs:
+ - https://bugzilla.mozilla.org/show_bug.cgi?id=1291053
+ data_reviews:
+ - https://bugzilla.mozilla.org/show_bug.cgi?id=1291053#c34
+ notification_emails:
+ - build-telemetry@mozilla.com
+ - mhentges@mozilla.com
+ expires: never
+ send_in_pings:
+ - usage
+ duration:
+ type: timespan
+ description: How long it took for the command to complete.
+ lifetime: application
+ bugs:
+ - https://bugzilla.mozilla.org/show_bug.cgi?id=1291053
+ data_reviews:
+ - https://bugzilla.mozilla.org/show_bug.cgi?id=1291053#c34
+ notification_emails:
+ - build-telemetry@mozilla.com
+ - mhentges@mozilla.com
+ expires: never
+ send_in_pings:
+ - usage
+
+mach.system:
+ cpu_brand:
+ type: string
+ description: CPU brand string from CPUID.
+ lifetime: application
+ bugs:
+ - https://bugzilla.mozilla.org/show_bug.cgi?id=1291053
+ data_reviews:
+ - https://bugzilla.mozilla.org/show_bug.cgi?id=1291053#c34
+ notification_emails:
+ - build-telemetry@mozilla.com
+ - mhentges@mozilla.com
+ expires: never
+ send_in_pings:
+ - usage
+ distro:
+ type: string
+ description: The name of the operating system distribution.
+ lifetime: application
+ bugs:
+ - https://bugzilla.mozilla.org/show_bug.cgi?id=1655845
+ data_reviews:
+ - https://bugzilla.mozilla.org/show_bug.cgi?id=1655845#c3
+ notification_emails:
+ - build-telemetry@mozilla.com
+ - mhentges@mozilla.com
+ expires: never
+ send_in_pings:
+ - usage
+ distro_version:
+ type: string
+ description: The high-level OS version.
+ lifetime: application
+ bugs:
+ - https://bugzilla.mozilla.org/show_bug.cgi?id=1655845
+ data_reviews:
+ - https://bugzilla.mozilla.org/show_bug.cgi?id=1655845#c3
+ notification_emails:
+ - build-telemetry@mozilla.com
+ - mhentges@mozilla.com
+ expires: never
+ send_in_pings:
+ - usage
+ logical_cores:
+ type: counter
+ description: Number of logical CPU cores present.
+ lifetime: application
+ bugs:
+ - https://bugzilla.mozilla.org/show_bug.cgi?id=1291053
+ data_reviews:
+ - https://bugzilla.mozilla.org/show_bug.cgi?id=1291053#c34
+ notification_emails:
+ - build-telemetry@mozilla.com
+ - mhentges@mozilla.com
+ expires: never
+ send_in_pings:
+ - usage
+ physical_cores:
+ type: counter
+ description: Number of physical CPU cores present.
+ lifetime: application
+ bugs:
+ - https://bugzilla.mozilla.org/show_bug.cgi?id=1291053
+ data_reviews:
+ - https://bugzilla.mozilla.org/show_bug.cgi?id=1291053#c34
+ notification_emails:
+ - build-telemetry@mozilla.com
+ - mhentges@mozilla.com
+ expires: never
+ send_in_pings:
+ - usage
+ memory:
+ type: memory_distribution
+ memory_unit: gigabyte
+ description: Amount of system memory.
+ lifetime: application
+ bugs:
+ - https://bugzilla.mozilla.org/show_bug.cgi?id=1291053
+ data_reviews:
+ - https://bugzilla.mozilla.org/show_bug.cgi?id=1291053#c34
+ notification_emails:
+ - build-telemetry@mozilla.com
+ - mhentges@mozilla.com
+ expires: never
+ send_in_pings:
+ - usage
+ vscode_terminal:
+ type: boolean
+ description: True if the current terminal is opened via Visual Studio Code.
+ lifetime: application
+ bugs:
+ - https://bugzilla.mozilla.org/show_bug.cgi?id=1702172
+ data_reviews:
+ - https://bugzilla.mozilla.org/show_bug.cgi?id=1702172#c4
+ notification_emails:
+ - build-telemetry@mozilla.com
+ - andi@mozilla.com
+ expires: never
+ send_in_pings:
+ - usage
+ ssh_connection:
+ type: boolean
+ description: True if the current shell is a remote SSH connection.
+ lifetime: application
+ bugs:
+ - https://bugzilla.mozilla.org/show_bug.cgi?id=1702172
+ data_reviews:
+ - https://bugzilla.mozilla.org/show_bug.cgi?id=1702172#c4
+ notification_emails:
+ - build-telemetry@mozilla.com
+ - andi@mozilla.com
+ expires: never
+ send_in_pings:
+ - usage
+ vscode_running:
+ type: boolean
+ description: True if there is an instance of vscode running.
+ lifetime: application
+ bugs:
+ - https://bugzilla.mozilla.org/show_bug.cgi?id=1717801
+ data_reviews:
+ - https://bugzilla.mozilla.org/show_bug.cgi?id=1717801#c1
+ notification_emails:
+ - build-telemetry@mozilla.com
+ - andi@mozilla.com
+ expires: never
+ send_in_pings:
+ - usage
diff --git a/python/mach/pings.yaml b/python/mach/pings.yaml
new file mode 100644
index 0000000000..c975437237
--- /dev/null
+++ b/python/mach/pings.yaml
@@ -0,0 +1,22 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# If this file is changed, update the generated docs:
+# https://firefox-source-docs.mozilla.org/mach/telemetry.html#updating-generated-metrics-docs
+---
+$schema: moz://mozilla.org/schemas/glean/pings/2-0-0
+
+usage:
+ description: >
+ Sent when the mach invocation is completed (regardless of result).
+ Contains information about the mach invocation that was made, its result,
+ and some details about the current environment and hardware.
+ bugs:
+ - https://bugzilla.mozilla.org/show_bug.cgi?id=1291053
+ data_reviews:
+ - https://bugzilla.mozilla.org/show_bug.cgi?id=1291053#c34
+ include_client_id: true
+ notification_emails:
+ - build-telemetry@mozilla.com
+ - mhentges@mozilla.com
diff --git a/python/mach/setup.cfg b/python/mach/setup.cfg
new file mode 100644
index 0000000000..3c6e79cf31
--- /dev/null
+++ b/python/mach/setup.cfg
@@ -0,0 +1,2 @@
+[bdist_wheel]
+universal=1
diff --git a/python/mach/setup.py b/python/mach/setup.py
new file mode 100644
index 0000000000..80426b6e00
--- /dev/null
+++ b/python/mach/setup.py
@@ -0,0 +1,42 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+
+try:
+ from setuptools import setup
+except ImportError:
+ from distutils.core import setup
+
+
+VERSION = "1.0.0"
+HERE = os.path.dirname(__file__)
+README = open(os.path.join(HERE, "README.rst")).read()
+
+setup(
+ name="mach",
+ description="Generic command line command dispatching framework.",
+ long_description=README,
+ license="MPL 2.0",
+ author="Gregory Szorc",
+ author_email="gregory.szorc@gmail.com",
+ url="https://developer.mozilla.org/en-US/docs/Developer_Guide/mach",
+ packages=["mach", "mach.mixin"],
+ version=VERSION,
+ classifiers=[
+ "Environment :: Console",
+ "Development Status :: 5 - Production/Stable",
+ "License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)",
+ "Natural Language :: English",
+ "Programming Language :: Python :: 2.7",
+ "Programming Language :: Python :: 3.5",
+ ],
+ install_requires=[
+ "blessed",
+ "mozfile",
+ "mozprocess",
+ "six",
+ ],
+ tests_require=["mock"],
+)
diff --git a/python/mach_commands.py b/python/mach_commands.py
new file mode 100644
index 0000000000..d4f1f67efe
--- /dev/null
+++ b/python/mach_commands.py
@@ -0,0 +1,366 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import argparse
+import logging
+import os
+import subprocess
+import tempfile
+from concurrent.futures import ThreadPoolExecutor, as_completed, thread
+from multiprocessing import cpu_count
+
+import mozinfo
+from mach.decorators import Command, CommandArgument
+from manifestparser import TestManifest
+from manifestparser import filters as mpf
+from mozfile import which
+from tqdm import tqdm
+
+
+@Command("python", category="devenv", description="Run Python.")
+@CommandArgument(
+ "--exec-file", default=None, help="Execute this Python file using `exec`"
+)
+@CommandArgument(
+ "--ipython",
+ action="store_true",
+ default=False,
+ help="Use ipython instead of the default Python REPL.",
+)
+@CommandArgument(
+ "--virtualenv",
+ default=None,
+ help="Prepare and use the virtualenv with the provided name. If not specified, "
+ "then the Mach context is used instead.",
+)
+@CommandArgument("args", nargs=argparse.REMAINDER)
+def python(
+ command_context,
+ exec_file,
+ ipython,
+ virtualenv,
+ args,
+):
+ # Avoid logging the command
+ command_context.log_manager.terminal_handler.setLevel(logging.CRITICAL)
+
+ # Note: subprocess requires native strings in os.environ on Windows.
+ append_env = {"PYTHONDONTWRITEBYTECODE": str("1")}
+
+ if virtualenv:
+ command_context._virtualenv_name = virtualenv
+
+ if exec_file:
+ command_context.activate_virtualenv()
+ exec(open(exec_file).read())
+ return 0
+
+ if ipython:
+ if virtualenv:
+ command_context.virtualenv_manager.ensure()
+ python_path = which(
+ "ipython", path=command_context.virtualenv_manager.bin_path
+ )
+ if not python_path:
+ raise Exception(
+ "--ipython was specified, but the provided "
+ '--virtualenv doesn\'t have "ipython" installed.'
+ )
+ else:
+ command_context._virtualenv_name = "ipython"
+ command_context.virtualenv_manager.ensure()
+ python_path = which(
+ "ipython", path=command_context.virtualenv_manager.bin_path
+ )
+ else:
+ command_context.virtualenv_manager.ensure()
+ python_path = command_context.virtualenv_manager.python_path
+
+ return command_context.run_process(
+ [python_path] + args,
+ pass_thru=True, # Allow user to run Python interactively.
+ ensure_exit_code=False, # Don't throw on non-zero exit code.
+ python_unbuffered=False, # Leave input buffered.
+ append_env=append_env,
+ )
+
+
+@Command(
+ "python-test",
+ category="testing",
+ virtualenv_name="python-test",
+ description="Run Python unit tests with pytest.",
+)
+@CommandArgument(
+ "-v", "--verbose", default=False, action="store_true", help="Verbose output."
+)
+@CommandArgument(
+ "-j",
+ "--jobs",
+ default=None,
+ type=int,
+ help="Number of concurrent jobs to run. Default is the number of CPUs "
+ "in the system.",
+)
+@CommandArgument(
+ "-x",
+ "--exitfirst",
+ default=False,
+ action="store_true",
+ help="Runs all tests sequentially and breaks at the first failure.",
+)
+@CommandArgument(
+ "--subsuite",
+ default=None,
+ help=(
+ "Python subsuite to run. If not specified, all subsuites are run. "
+ "Use the string `default` to only run tests without a subsuite."
+ ),
+)
+@CommandArgument(
+ "tests",
+ nargs="*",
+ metavar="TEST",
+ help=(
+ "Tests to run. Each test can be a single file or a directory. "
+ "Default test resolution relies on PYTHON_UNITTEST_MANIFESTS."
+ ),
+)
+@CommandArgument(
+ "extra",
+ nargs=argparse.REMAINDER,
+ metavar="PYTEST ARGS",
+ help=(
+ "Arguments that aren't recognized by mach. These will be "
+ "passed as it is to pytest"
+ ),
+)
+def python_test(command_context, *args, **kwargs):
+ try:
+ tempdir = str(tempfile.mkdtemp(suffix="-python-test"))
+ os.environ["PYTHON_TEST_TMP"] = tempdir
+ return run_python_tests(command_context, *args, **kwargs)
+ finally:
+ import mozfile
+
+ mozfile.remove(tempdir)
+
+
+def run_python_tests(
+ command_context,
+ tests=None,
+ test_objects=None,
+ subsuite=None,
+ verbose=False,
+ jobs=None,
+ exitfirst=False,
+ extra=None,
+ **kwargs,
+):
+ if test_objects is None:
+ from moztest.resolve import TestResolver
+
+ resolver = command_context._spawn(TestResolver)
+ # If we were given test paths, try to find tests matching them.
+ test_objects = resolver.resolve_tests(paths=tests, flavor="python")
+ else:
+ # We've received test_objects from |mach test|. We need to ignore
+ # the subsuite because python-tests don't use this key like other
+ # harnesses do and |mach test| doesn't realize this.
+ subsuite = None
+
+ mp = TestManifest()
+ mp.tests.extend(test_objects)
+
+ filters = []
+ if subsuite == "default":
+ filters.append(mpf.subsuite(None))
+ elif subsuite:
+ filters.append(mpf.subsuite(subsuite))
+
+ tests = mp.active_tests(filters=filters, disabled=False, python=3, **mozinfo.info)
+
+ if not tests:
+ submsg = "for subsuite '{}' ".format(subsuite) if subsuite else ""
+ message = (
+ "TEST-UNEXPECTED-FAIL | No tests collected "
+ + "{}(Not in PYTHON_UNITTEST_MANIFESTS?)".format(submsg)
+ )
+ command_context.log(logging.WARN, "python-test", {}, message)
+ return 1
+
+ parallel = []
+ sequential = []
+ os.environ.setdefault("PYTEST_ADDOPTS", "")
+
+ if extra:
+ os.environ["PYTEST_ADDOPTS"] += " " + " ".join(extra)
+
+ installed_requirements = set()
+ for test in tests:
+ if (
+ test.get("requirements")
+ and test["requirements"] not in installed_requirements
+ ):
+ command_context.virtualenv_manager.install_pip_requirements(
+ test["requirements"], quiet=True
+ )
+ installed_requirements.add(test["requirements"])
+
+ if exitfirst:
+ sequential = tests
+ os.environ["PYTEST_ADDOPTS"] += " -x"
+ else:
+ for test in tests:
+ if test.get("sequential"):
+ sequential.append(test)
+ else:
+ parallel.append(test)
+
+ jobs = jobs or cpu_count()
+
+ return_code = 0
+ failure_output = []
+
+ def on_test_finished(result):
+ output, ret, test_path = result
+
+ if ret:
+ # Log the output of failed tests at the end so it's easy to find.
+ failure_output.extend(output)
+
+ if not return_code:
+ command_context.log(
+ logging.ERROR,
+ "python-test",
+ {"test_path": test_path, "ret": ret},
+ "Setting retcode to {ret} from {test_path}",
+ )
+ else:
+ for line in output:
+ command_context.log(
+ logging.INFO, "python-test", {"line": line.rstrip()}, "{line}"
+ )
+
+ return return_code or ret
+
+ with tqdm(
+ total=(len(parallel) + len(sequential)),
+ unit="Test",
+ desc="Tests Completed",
+ initial=0,
+ ) as progress_bar:
+ try:
+ with ThreadPoolExecutor(max_workers=jobs) as executor:
+ futures = []
+
+ for test in parallel:
+ command_context.log(
+ logging.DEBUG,
+ "python-test",
+ {"line": f"Launching thread for test {test['file_relpath']}"},
+ "{line}",
+ )
+ futures.append(
+ executor.submit(
+ _run_python_test, command_context, test, jobs, verbose
+ )
+ )
+
+ try:
+ for future in as_completed(futures):
+ progress_bar.clear()
+ return_code = on_test_finished(future.result())
+ progress_bar.update(1)
+ except KeyboardInterrupt:
+ # Hack to force stop currently running threads.
+ # https://gist.github.com/clchiou/f2608cbe54403edb0b13
+ executor._threads.clear()
+ thread._threads_queues.clear()
+ raise
+
+ for test in sequential:
+ test_result = _run_python_test(command_context, test, jobs, verbose)
+
+ progress_bar.clear()
+ return_code = on_test_finished(test_result)
+ if return_code and exitfirst:
+ break
+
+ progress_bar.update(1)
+ finally:
+ progress_bar.clear()
+ # Now log all failures (even if there was a KeyboardInterrupt or other exception).
+ for line in failure_output:
+ command_context.log(
+ logging.INFO, "python-test", {"line": line.rstrip()}, "{line}"
+ )
+
+ command_context.log(
+ logging.INFO,
+ "python-test",
+ {"return_code": return_code},
+ "Return code from mach python-test: {return_code}",
+ )
+
+ return return_code
+
+
+def _run_python_test(command_context, test, jobs, verbose):
+ output = []
+
+ def _log(line):
+ # Buffer messages if more than one worker to avoid interleaving
+ if jobs > 1:
+ output.append(line)
+ else:
+ command_context.log(
+ logging.INFO, "python-test", {"line": line.rstrip()}, "{line}"
+ )
+
+ _log(test["path"])
+ python = command_context.virtualenv_manager.python_path
+ cmd = [python, test["path"]]
+ env = os.environ.copy()
+ env["PYTHONDONTWRITEBYTECODE"] = "1"
+
+ result = subprocess.run(
+ cmd,
+ env=env,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ universal_newlines=True,
+ encoding="UTF-8",
+ )
+
+ return_code = result.returncode
+
+ file_displayed_test = False
+
+ for line in result.stdout.split(os.linesep):
+ if not file_displayed_test:
+ test_ran = "Ran" in line or "collected" in line or line.startswith("TEST-")
+ if test_ran:
+ file_displayed_test = True
+
+ # Hack to make sure treeherder highlights pytest failures
+ if "FAILED" in line.rsplit(" ", 1)[-1]:
+ line = line.replace("FAILED", "TEST-UNEXPECTED-FAIL")
+
+ _log(line)
+
+ if not file_displayed_test:
+ return_code = 1
+ _log(
+ "TEST-UNEXPECTED-FAIL | No test output (missing mozunit.main() "
+ "call?): {}".format(test["path"])
+ )
+
+ if verbose:
+ if return_code != 0:
+ _log("Test failed: {}".format(test["path"]))
+ else:
+ _log("Test passed: {}".format(test["path"]))
+
+ return output, return_code, test["path"]
diff --git a/python/moz.build b/python/moz.build
new file mode 100644
index 0000000000..cd4885aba5
--- /dev/null
+++ b/python/moz.build
@@ -0,0 +1,78 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# Default extra components to build config
+with Files("**"):
+ BUG_COMPONENT = ("Firefox Build System", "General")
+
+with Files("devtools/**"):
+ BUG_COMPONENT = ("DevTools", "General")
+
+with Files("gdbpp/**"):
+ BUG_COMPONENT = ("Firefox Build System", "General")
+
+with Files("lldbutils/**"):
+ BUG_COMPONENT = ("Core", "General")
+
+with Files("mach/**"):
+ BUG_COMPONENT = ("Firefox Build System", "Mach Core")
+
+with Files("mozboot/**"):
+ BUG_COMPONENT = ("Firefox Build System", "Bootstrap Configuration")
+
+with Files("mozbuild/**"):
+ BUG_COMPONENT = ("Firefox Build System", "General")
+
+with Files("mozlint/**"):
+ BUG_COMPONENT = ("Developer Infrastructure", "Lint and Formatting")
+
+with Files("mozversioncontrol/**"):
+ BUG_COMPONENT = ("Firefox Build System", "General")
+
+with Files("l10n/**"):
+ BUG_COMPONENT = ("Firefox Build System", "General")
+
+with Files("mozrelease/**"):
+ BUG_COMPONENT = ("Release Engineering", "General")
+
+with Files("mach_commands.py"):
+ BUG_COMPONENT = ("Testing", "Python Test")
+
+with Files("mozperftest/**"):
+ BUG_COMPONENT = ("Testing", "mozperftest")
+
+with Files("sites/**"):
+ BUG_COMPONENT = ("Firefox Build System", "Mach Core")
+
+
+SPHINX_PYTHON_PACKAGE_DIRS += [
+ "mach",
+ "mozbuild/mozbuild",
+ "mozbuild/mozpack",
+ "mozlint/mozlint",
+ "mozversioncontrol/mozversioncontrol",
+]
+
+with Files("**.py"):
+ SCHEDULES.inclusive += ["docs"]
+
+SPHINX_TREES["/mach"] = "mach/docs"
+SPHINX_TREES["/python"] = "docs"
+
+with Files("mach/docs/**"):
+ SCHEDULES.exclusive = ["docs"]
+
+PYTHON_UNITTEST_MANIFESTS += [
+ "mach/mach/test/python.ini",
+ "mozboot/mozboot/test/python.ini",
+ "mozbuild/mozbuild/repackaging/test/python.ini",
+ "mozbuild/mozbuild/test/python.ini",
+ "mozbuild/mozpack/test/python.ini",
+ "mozlint/test/python.ini",
+ "mozrelease/test/python.ini",
+ "mozterm/test/python.ini",
+ "mozversioncontrol/test/python.ini",
+]
diff --git a/python/mozboot/.ruff.toml b/python/mozboot/.ruff.toml
new file mode 100644
index 0000000000..648a1255cc
--- /dev/null
+++ b/python/mozboot/.ruff.toml
@@ -0,0 +1,4 @@
+extend = "../../pyproject.toml"
+
+[isort]
+known-first-party = ["mozboot"]
diff --git a/python/mozboot/README.rst b/python/mozboot/README.rst
new file mode 100644
index 0000000000..97dc3c97b2
--- /dev/null
+++ b/python/mozboot/README.rst
@@ -0,0 +1,20 @@
+mozboot - Bootstrap your system to build Mozilla projects
+=========================================================
+
+This package contains code used for bootstrapping a system to build
+mozilla-central.
+
+This code is not part of the build system per se. Instead, it is related
+to everything up to invoking the actual build system.
+
+If you have a copy of the source tree, you run:
+
+ python bin/bootstrap.py
+
+If you don't have a copy of the source tree, you can run:
+
+ curl https://hg.mozilla.org/mozilla-central/raw-file/default/python/mozboot/bin/bootstrap.py -o bootstrap.py
+ python bootstrap.py
+
+The bootstrap script will download everything it needs from hg.mozilla.org
+automatically!
diff --git a/python/mozboot/bin/bootstrap.py b/python/mozboot/bin/bootstrap.py
new file mode 100755
index 0000000000..733042bbc5
--- /dev/null
+++ b/python/mozboot/bin/bootstrap.py
@@ -0,0 +1,439 @@
+#!/usr/bin/env python3
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This script provides one-line bootstrap support to configure systems to build
+# the tree. It does so by cloning the repo before calling directly into `mach
+# bootstrap`.
+
+# Note that this script can't assume anything in particular about the host
+# Python environment (except that it's run with a sufficiently recent version of
+# Python 3), so we are restricted to stdlib modules.
+
+import sys
+
+major, minor = sys.version_info[:2]
+if (major < 3) or (major == 3 and minor < 6):
+ print(
+ "Bootstrap currently only runs on Python 3.6+."
+ "Please try re-running with python3.6+."
+ )
+ sys.exit(1)
+
+import ctypes
+import os
+import shutil
+import subprocess
+import tempfile
+from optparse import OptionParser
+from pathlib import Path
+
+CLONE_MERCURIAL_PULL_FAIL = """
+Failed to pull from hg.mozilla.org.
+
+This is most likely because of unstable network connection.
+Try running `cd %s && hg pull https://hg.mozilla.org/mozilla-unified` manually,
+or download a mercurial bundle and use it:
+https://firefox-source-docs.mozilla.org/contributing/vcs/mercurial_bundles.html"""
+
+WINDOWS = sys.platform.startswith("win32") or sys.platform.startswith("msys")
+VCS_HUMAN_READABLE = {
+ "hg": "Mercurial",
+ "git": "Git",
+}
+
+
+def which(name):
+ """Python implementation of which.
+
+ It returns the path of an executable or None if it couldn't be found.
+ """
+ search_dirs = os.environ["PATH"].split(os.pathsep)
+ potential_names = [name]
+ if WINDOWS:
+ potential_names.insert(0, name + ".exe")
+
+ for path in search_dirs:
+ for executable_name in potential_names:
+ test = Path(path) / executable_name
+ if test.is_file() and os.access(test, os.X_OK):
+ return test
+
+ return None
+
+
+def validate_clone_dest(dest: Path):
+ dest = dest.resolve()
+
+ if not dest.exists():
+ return dest
+
+ if not dest.is_dir():
+ print(f"ERROR! Destination {dest} exists but is not a directory.")
+ return None
+
+ if not any(dest.iterdir()):
+ return dest
+ else:
+ print(f"ERROR! Destination directory {dest} exists but is nonempty.")
+ print(
+ f"To re-bootstrap the existing checkout, go into '{dest}' and run './mach bootstrap'."
+ )
+ return None
+
+
+def input_clone_dest(vcs, no_interactive):
+ repo_name = "mozilla-unified"
+ print(f"Cloning into {repo_name} using {VCS_HUMAN_READABLE[vcs]}...")
+ while True:
+ dest = None
+ if not no_interactive:
+ dest = input(
+ f"Destination directory for clone (leave empty to use "
+ f"default destination of {repo_name}): "
+ ).strip()
+ if not dest:
+ dest = repo_name
+ dest = validate_clone_dest(Path(dest).expanduser())
+ if dest:
+ return dest
+ if no_interactive:
+ return None
+
+
+def hg_clone_firefox(hg: Path, dest: Path, head_repo, head_rev):
+ # We create an empty repo then modify the config before adding data.
+ # This is necessary to ensure storage settings are optimally
+ # configured.
+ args = [
+ str(hg),
+ # The unified repo is generaldelta, so ensure the client is as
+ # well.
+ "--config",
+ "format.generaldelta=true",
+ "init",
+ str(dest),
+ ]
+ res = subprocess.call(args)
+ if res:
+ print("unable to create destination repo; please try cloning manually")
+ return None
+
+ # Strictly speaking, this could overwrite a config based on a template
+ # the user has installed. Let's pretend this problem doesn't exist
+ # unless someone complains about it.
+ with open(dest / ".hg" / "hgrc", "a") as fh:
+ fh.write("[paths]\n")
+ fh.write("default = https://hg.mozilla.org/mozilla-unified\n")
+ fh.write("\n")
+
+ # The server uses aggressivemergedeltas which can blow up delta chain
+ # length. This can cause performance to tank due to delta chains being
+ # too long. Limit the delta chain length to something reasonable
+ # to bound revlog read time.
+ fh.write("[format]\n")
+ fh.write("# This is necessary to keep performance in check\n")
+ fh.write("maxchainlen = 10000\n")
+
+ # Pulling a specific revision into an empty repository induces a lot of
+ # load on the Mercurial server, so we always pull from mozilla-unified (which,
+ # when done from an empty repository, is equivalent to a clone), and then pull
+ # the specific revision we want (if we want a specific one, otherwise we just
+ # use the "central" bookmark), at which point it will be an incremental pull,
+ # that the server can process more easily.
+ # This is the same thing that robustcheckout does on automation.
+ res = subprocess.call(
+ [str(hg), "pull", "https://hg.mozilla.org/mozilla-unified"], cwd=str(dest)
+ )
+ if not res and head_repo:
+ res = subprocess.call(
+ [str(hg), "pull", head_repo, "-r", head_rev], cwd=str(dest)
+ )
+ print("")
+ if res:
+ print(CLONE_MERCURIAL_PULL_FAIL % dest)
+ return None
+
+ head_rev = head_rev or "central"
+ print(f'updating to "{head_rev}" - the development head of Gecko and Firefox')
+ res = subprocess.call([str(hg), "update", "-r", head_rev], cwd=str(dest))
+ if res:
+ print(
+ f"error updating; you will need to `cd {dest} && hg update -r central` "
+ "manually"
+ )
+ return dest
+
+
+def git_clone_firefox(git: Path, dest: Path, watchman: Path, head_repo, head_rev):
+ tempdir = None
+ cinnabar = None
+ env = dict(os.environ)
+ try:
+ cinnabar = which("git-cinnabar")
+ if not cinnabar:
+ from urllib.request import urlopen
+
+ cinnabar_url = "https://github.com/glandium/git-cinnabar/"
+ # If git-cinnabar isn't installed already, that's fine; we can
+ # download a temporary copy. `mach bootstrap` will install a copy
+ # in the state dir; we don't want to copy all that logic to this
+ # tiny bootstrapping script.
+ tempdir = Path(tempfile.mkdtemp())
+ with open(tempdir / "download.py", "wb") as fh:
+ shutil.copyfileobj(
+ urlopen(f"{cinnabar_url}/raw/master/download.py"), fh
+ )
+
+ subprocess.check_call(
+ [sys.executable, str(tempdir / "download.py")],
+ cwd=str(tempdir),
+ )
+ env["PATH"] = str(tempdir) + os.pathsep + env["PATH"]
+ print(
+ "WARNING! git-cinnabar is required for Firefox development "
+ "with git. After the clone is complete, the bootstrapper "
+ "will ask if you would like to configure git; answer yes, "
+ "and be sure to add git-cinnabar to your PATH according to "
+ "the bootstrapper output."
+ )
+
+ # We're guaranteed to have `git-cinnabar` installed now.
+ # Configure git per the git-cinnabar requirements.
+ subprocess.check_call(
+ [
+ str(git),
+ "clone",
+ "--no-checkout",
+ "hg::https://hg.mozilla.org/mozilla-unified",
+ str(dest),
+ ],
+ env=env,
+ )
+ subprocess.check_call(
+ [str(git), "config", "fetch.prune", "true"], cwd=str(dest), env=env
+ )
+ subprocess.check_call(
+ [str(git), "config", "pull.ff", "only"], cwd=str(dest), env=env
+ )
+
+ if head_repo:
+ subprocess.check_call(
+ [str(git), "cinnabar", "fetch", f"hg::{head_repo}", head_rev],
+ cwd=str(dest),
+ env=env,
+ )
+
+ subprocess.check_call(
+ [str(git), "checkout", "FETCH_HEAD" if head_rev else "bookmarks/central"],
+ cwd=str(dest),
+ env=env,
+ )
+
+ watchman_sample = dest / ".git/hooks/fsmonitor-watchman.sample"
+ # Older versions of git didn't include fsmonitor-watchman.sample.
+ if watchman and watchman_sample.exists():
+ print("Configuring watchman")
+ watchman_config = dest / ".git/hooks/query-watchman"
+ if not watchman_config.exists():
+ print(f"Copying {watchman_sample} to {watchman_config}")
+ copy_args = [
+ "cp",
+ ".git/hooks/fsmonitor-watchman.sample",
+ ".git/hooks/query-watchman",
+ ]
+ subprocess.check_call(copy_args, cwd=str(dest))
+
+ config_args = [
+ str(git),
+ "config",
+ "core.fsmonitor",
+ ".git/hooks/query-watchman",
+ ]
+ subprocess.check_call(config_args, cwd=str(dest), env=env)
+ return dest
+ finally:
+ if tempdir:
+ shutil.rmtree(str(tempdir))
+
+
+def add_microsoft_defender_antivirus_exclusions(dest, no_system_changes):
+ if no_system_changes:
+ return
+
+ if not WINDOWS:
+ return
+
+ powershell_exe = which("powershell")
+
+ if not powershell_exe:
+ return
+
+ def print_attempt_exclusion(path):
+ print(
+ f"Attempting to add exclusion path to Microsoft Defender Antivirus for: {path}"
+ )
+
+ powershell_exe = str(powershell_exe)
+ paths = []
+
+ # mozilla-unified / clone dest
+ repo_dir = Path.cwd() / dest
+ paths.append(repo_dir)
+ print_attempt_exclusion(repo_dir)
+
+ # MOZILLABUILD
+ mozillabuild_dir = os.getenv("MOZILLABUILD")
+ if mozillabuild_dir:
+ paths.append(mozillabuild_dir)
+ print_attempt_exclusion(mozillabuild_dir)
+
+ # .mozbuild
+ mozbuild_dir = Path.home() / ".mozbuild"
+ paths.append(mozbuild_dir)
+ print_attempt_exclusion(mozbuild_dir)
+
+ args = ";".join(f"Add-MpPreference -ExclusionPath '{path}'" for path in paths)
+ command = f'-Command "{args}"'
+
+ # This will attempt to run as administrator by triggering a UAC prompt
+ # for admin credentials. If "No" is selected, no exclusions are added.
+ ctypes.windll.shell32.ShellExecuteW(None, "runas", powershell_exe, command, None, 0)
+
+
+def clone(options):
+ vcs = options.vcs
+ no_interactive = options.no_interactive
+ no_system_changes = options.no_system_changes
+
+ if vcs == "hg":
+ hg = which("hg")
+ if not hg:
+ print("Mercurial is not installed. Mercurial is required to clone Firefox.")
+ try:
+ # We're going to recommend people install the Mercurial package with
+ # pip3. That will work if `pip3` installs binaries to a location
+ # that's in the PATH, but it might not be. To help out, if we CAN
+ # import "mercurial" (in which case it's already been installed),
+ # offer that as a solution.
+ import mercurial # noqa: F401
+
+ print(
+ "Hint: have you made sure that Mercurial is installed to a "
+ "location in your PATH?"
+ )
+ except ImportError:
+ print("Try installing hg with `pip3 install Mercurial`.")
+ return None
+ binary = hg
+ else:
+ binary = which(vcs)
+ if not binary:
+ print("Git is not installed.")
+ print("Try installing git using your system package manager.")
+ return None
+
+ dest = input_clone_dest(vcs, no_interactive)
+ if not dest:
+ return None
+
+ add_microsoft_defender_antivirus_exclusions(dest, no_system_changes)
+
+ print(f"Cloning Firefox {VCS_HUMAN_READABLE[vcs]} repository to {dest}")
+
+ head_repo = os.environ.get("GECKO_HEAD_REPOSITORY")
+ head_rev = os.environ.get("GECKO_HEAD_REV")
+
+ if vcs == "hg":
+ return hg_clone_firefox(binary, dest, head_repo, head_rev)
+ else:
+ watchman = which("watchman")
+ return git_clone_firefox(binary, dest, watchman, head_repo, head_rev)
+
+
+def bootstrap(srcdir: Path, application_choice, no_interactive, no_system_changes):
+ args = [sys.executable, "mach"]
+
+ if no_interactive:
+ # --no-interactive is a global argument, not a command argument,
+ # so it needs to be specified before "bootstrap" is appended.
+ args += ["--no-interactive"]
+
+ args += ["bootstrap"]
+
+ if application_choice:
+ args += ["--application-choice", application_choice]
+ if no_system_changes:
+ args += ["--no-system-changes"]
+
+ print("Running `%s`" % " ".join(args))
+ return subprocess.call(args, cwd=str(srcdir))
+
+
+def main(args):
+ parser = OptionParser()
+ parser.add_option(
+ "--application-choice",
+ dest="application_choice",
+ help='Pass in an application choice (see "APPLICATIONS" in '
+ "python/mozboot/mozboot/bootstrap.py) instead of using the "
+ "default interactive prompt.",
+ )
+ parser.add_option(
+ "--vcs",
+ dest="vcs",
+ default="hg",
+ choices=["git", "hg"],
+ help="VCS (hg or git) to use for downloading the source code, "
+ "instead of using the default interactive prompt.",
+ )
+ parser.add_option(
+ "--no-interactive",
+ dest="no_interactive",
+ action="store_true",
+ help="Answer yes to any (Y/n) interactive prompts.",
+ )
+ parser.add_option(
+ "--no-system-changes",
+ dest="no_system_changes",
+ action="store_true",
+ help="Only executes actions that leave the system " "configuration alone.",
+ )
+
+ options, leftover = parser.parse_args(args)
+ try:
+ srcdir = clone(options)
+ if not srcdir:
+ return 1
+ print("Clone complete.")
+ print(
+ "If you need to run the tooling bootstrapping again, "
+ "then consider running './mach bootstrap' instead."
+ )
+ if not options.no_interactive:
+ remove_bootstrap_file = input(
+ "Unless you are going to have more local copies of Firefox source code, "
+ "this 'bootstrap.py' file is no longer needed and can be deleted. "
+ "Clean up the bootstrap.py file? (Y/n)"
+ )
+ if not remove_bootstrap_file:
+ remove_bootstrap_file = "y"
+ if options.no_interactive or remove_bootstrap_file == "y":
+ try:
+ Path(sys.argv[0]).unlink()
+ except FileNotFoundError:
+ print("File could not be found !")
+ return bootstrap(
+ srcdir,
+ options.application_choice,
+ options.no_interactive,
+ options.no_system_changes,
+ )
+ except Exception:
+ print("Could not bootstrap Firefox! Consider filing a bug.")
+ raise
+
+
+if __name__ == "__main__":
+ sys.exit(main(sys.argv))
diff --git a/python/mozboot/mozboot/__init__.py b/python/mozboot/mozboot/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozboot/mozboot/__init__.py
diff --git a/python/mozboot/mozboot/android-avds/arm.json b/python/mozboot/mozboot/android-avds/arm.json
new file mode 100644
index 0000000000..756018631c
--- /dev/null
+++ b/python/mozboot/mozboot/android-avds/arm.json
@@ -0,0 +1,27 @@
+{
+ "emulator_package": "system-images;android-24;default;armeabi-v7a",
+ "emulator_avd_name": "mozemulator-armeabi-v7a",
+ "emulator_extra_args": [
+ "-skip-adb-auth",
+ "-verbose",
+ "-show-kernel",
+ "-ranchu",
+ "-selinux", "permissive",
+ "-memory", "3072",
+ "-cores", "4",
+ "-skin", "800x1280",
+ "-gpu", "on",
+ "-no-snapstorage",
+ "-no-snapshot",
+ "-no-window",
+ "-no-accel",
+ "-prop", "ro.test_harness=true"
+ ],
+ "emulator_extra_config": {
+ "hw.keyboard": "yes",
+ "hw.lcd.density": "320",
+ "disk.dataPartition.size": "4000MB",
+ "sdcard.size": "600M"
+ },
+ "emulator_prewarm": false
+}
diff --git a/python/mozboot/mozboot/android-avds/arm64.json b/python/mozboot/mozboot/android-avds/arm64.json
new file mode 100644
index 0000000000..767f9299d4
--- /dev/null
+++ b/python/mozboot/mozboot/android-avds/arm64.json
@@ -0,0 +1,27 @@
+{
+ "emulator_package": "system-images;android-30;default;arm64-v8a",
+ "emulator_avd_name": "mozemulator-arm64",
+ "emulator_extra_args": [
+ "-skip-adb-auth",
+ "-verbose",
+ "-show-kernel",
+ "-ranchu",
+ "-selinux", "permissive",
+ "-memory", "3072",
+ "-cores", "4",
+ "-skin", "800x1280",
+ "-gpu", "on",
+ "-no-snapstorage",
+ "-no-snapshot",
+ "-no-window",
+ "-no-accel",
+ "-prop", "ro.test_harness=true"
+ ],
+ "emulator_extra_config": {
+ "hw.keyboard": "yes",
+ "hw.lcd.density": "320",
+ "disk.dataPartition.size": "4000MB",
+ "sdcard.size": "600M"
+ },
+ "emulator_prewarm": false
+}
diff --git a/python/mozboot/mozboot/android-avds/x86_64.json b/python/mozboot/mozboot/android-avds/x86_64.json
new file mode 100644
index 0000000000..68f99a9937
--- /dev/null
+++ b/python/mozboot/mozboot/android-avds/x86_64.json
@@ -0,0 +1,26 @@
+{
+ "emulator_package": "system-images;android-24;default;x86_64",
+ "emulator_avd_name": "mozemulator-x86_64",
+ "emulator_extra_args": [
+ "-skip-adb-auth",
+ "-verbose",
+ "-show-kernel",
+ "-ranchu",
+ "-selinux", "permissive",
+ "-memory", "3072",
+ "-cores", "4",
+ "-skin", "800x1280",
+ "-gpu", "on",
+ "-no-snapstorage",
+ "-no-snapshot",
+ "-no-window",
+ "-no-accel",
+ "-prop", "ro.test_harness=true"
+ ],
+ "emulator_extra_config": {
+ "hw.keyboard": "yes",
+ "hw.lcd.density": "320",
+ "disk.dataPartition.size": "4000MB",
+ "sdcard.size": "600M"
+ }
+}
diff --git a/python/mozboot/mozboot/android-emulator-packages.txt b/python/mozboot/mozboot/android-emulator-packages.txt
new file mode 100644
index 0000000000..3e782df670
--- /dev/null
+++ b/python/mozboot/mozboot/android-emulator-packages.txt
@@ -0,0 +1,2 @@
+platform-tools
+emulator
diff --git a/python/mozboot/mozboot/android-packages.txt b/python/mozboot/mozboot/android-packages.txt
new file mode 100644
index 0000000000..e36a300d83
--- /dev/null
+++ b/python/mozboot/mozboot/android-packages.txt
@@ -0,0 +1,4 @@
+emulator
+platform-tools
+build-tools;33.0.1
+platforms;android-33
diff --git a/python/mozboot/mozboot/android-system-images-packages.txt b/python/mozboot/mozboot/android-system-images-packages.txt
new file mode 100644
index 0000000000..86069f7680
--- /dev/null
+++ b/python/mozboot/mozboot/android-system-images-packages.txt
@@ -0,0 +1 @@
+emulator
diff --git a/python/mozboot/mozboot/android.py b/python/mozboot/mozboot/android.py
new file mode 100644
index 0000000000..26929da696
--- /dev/null
+++ b/python/mozboot/mozboot/android.py
@@ -0,0 +1,886 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this,
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import errno
+import json
+import os
+import stat
+import subprocess
+import sys
+import time
+from pathlib import Path
+from typing import Optional, Union
+
+import requests
+from tqdm import tqdm
+
+# We need the NDK version in multiple different places, and it's inconvenient
+# to pass down the NDK version to all relevant places, so we have this global
+# variable.
+from mozboot.bootstrap import MOZCONFIG_SUGGESTION_TEMPLATE
+
+NDK_VERSION = "r23c"
+CMDLINE_TOOLS_VERSION_STRING = "9.0"
+CMDLINE_TOOLS_VERSION = "9477386"
+
+BUNDLETOOL_VERSION = "1.14.1"
+
+# We expect the emulator AVD definitions to be platform agnostic
+LINUX_X86_64_ANDROID_AVD = "linux64-android-avd-x86_64-repack"
+LINUX_ARM_ANDROID_AVD = "linux64-android-avd-arm-repack"
+
+MACOS_X86_64_ANDROID_AVD = "linux64-android-avd-x86_64-repack"
+MACOS_ARM_ANDROID_AVD = "linux64-android-avd-arm-repack"
+MACOS_ARM64_ANDROID_AVD = "linux64-android-avd-arm64-repack"
+
+WINDOWS_X86_64_ANDROID_AVD = "linux64-android-avd-x86_64-repack"
+WINDOWS_ARM_ANDROID_AVD = "linux64-android-avd-arm-repack"
+
+AVD_MANIFEST_X86_64 = Path(__file__).resolve().parent / "android-avds/x86_64.json"
+AVD_MANIFEST_ARM = Path(__file__).resolve().parent / "android-avds/arm.json"
+AVD_MANIFEST_ARM64 = Path(__file__).resolve().parent / "android-avds/arm64.json"
+
+JAVA_VERSION_MAJOR = "17"
+JAVA_VERSION_MINOR = "0.7"
+JAVA_VERSION_PATCH = "7"
+
+ANDROID_NDK_EXISTS = """
+Looks like you have the correct version of the Android NDK installed at:
+%s
+"""
+
+ANDROID_SDK_EXISTS = """
+Looks like you have the Android SDK installed at:
+%s
+We will install all required Android packages.
+"""
+
+ANDROID_SDK_TOO_OLD = """
+Looks like you have an outdated Android SDK installed at:
+%s
+I can't update outdated Android SDKs to have the required 'sdkmanager'
+tool. Move it out of the way (or remove it entirely) and then run
+bootstrap again.
+"""
+
+INSTALLING_ANDROID_PACKAGES = """
+We are now installing the following Android packages:
+%s
+You may be prompted to agree to the Android license. You may see some of
+output as packages are downloaded and installed.
+"""
+
+MOBILE_ANDROID_MOZCONFIG_TEMPLATE = """
+# Build GeckoView/Firefox for Android:
+ac_add_options --enable-project=mobile/android
+
+# Targeting the following architecture.
+# For regular phones, no --target is needed.
+# For x86 emulators (and x86 devices, which are uncommon):
+# ac_add_options --target=i686
+# For newer phones or Apple silicon
+# ac_add_options --target=aarch64
+# For x86_64 emulators (and x86_64 devices, which are even less common):
+# ac_add_options --target=x86_64
+
+{extra_lines}
+"""
+
+MOBILE_ANDROID_ARTIFACT_MODE_MOZCONFIG_TEMPLATE = """
+# Build GeckoView/Firefox for Android Artifact Mode:
+ac_add_options --enable-project=mobile/android
+ac_add_options --enable-artifact-builds
+
+{extra_lines}
+# Write build artifacts to:
+mk_add_options MOZ_OBJDIR=./objdir-frontend
+"""
+
+
+class GetNdkVersionError(Exception):
+ pass
+
+
+def install_mobile_android_sdk_or_ndk(url, path: Path):
+ """
+ Fetch an Android SDK or NDK from |url| and unpack it into the given |path|.
+
+ We use, and 'requests' respects, https. We could also include SHAs for a
+ small improvement in the integrity guarantee we give. But this script is
+ bootstrapped over https anyway, so it's a really minor improvement.
+
+ We keep a cache of the downloaded artifacts, writing into |path|/mozboot.
+ We don't yet clean the cache; it's better to waste some disk space and
+ not require a long re-download than to wipe the cache prematurely.
+ """
+
+ download_path = path / "mozboot"
+ try:
+ download_path.mkdir(parents=True)
+ except OSError as e:
+ if e.errno == errno.EEXIST and download_path.is_dir():
+ pass
+ else:
+ raise
+
+ file_name = url.split("/")[-1]
+ download_file_path = download_path / file_name
+ download(url, download_file_path)
+
+ if file_name.endswith(".tar.gz") or file_name.endswith(".tgz"):
+ cmd = ["tar", "zxf", str(download_file_path)]
+ elif file_name.endswith(".tar.bz2"):
+ cmd = ["tar", "jxf", str(download_file_path)]
+ elif file_name.endswith(".zip"):
+ cmd = ["unzip", "-q", str(download_file_path)]
+ elif file_name.endswith(".bin"):
+ # Execute the .bin file, which unpacks the content.
+ mode = os.stat(path).st_mode
+ download_file_path.chmod(mode | stat.S_IXUSR)
+ cmd = [str(download_file_path)]
+ else:
+ raise NotImplementedError(f"Don't know how to unpack file: {file_name}")
+
+ print(f"Unpacking {download_file_path}...")
+
+ with open(os.devnull, "w") as stdout:
+ # These unpack commands produce a ton of output; ignore it. The
+ # .bin files are 7z archives; there's no command line flag to quiet
+ # output, so we use this hammer.
+ subprocess.check_call(cmd, stdout=stdout, cwd=str(path))
+
+ print(f"Unpacking {download_file_path}... DONE")
+ # Now delete the archive
+ download_file_path.unlink()
+
+
+def download(
+ url,
+ download_file_path: Path,
+):
+ with requests.Session() as session:
+ request = session.head(url, allow_redirects=True)
+ request.raise_for_status()
+ remote_file_size = int(request.headers["content-length"])
+
+ if download_file_path.is_file():
+ local_file_size = download_file_path.stat().st_size
+
+ if local_file_size == remote_file_size:
+ print(
+ f"{download_file_path.name} already downloaded. Skipping download..."
+ )
+ else:
+ print(f"Partial download detected. Resuming download of {url}...")
+ download_internal(
+ download_file_path,
+ session,
+ url,
+ remote_file_size,
+ local_file_size,
+ )
+ else:
+ print(f"Downloading {url}...")
+ download_internal(download_file_path, session, url, remote_file_size)
+
+
+def download_internal(
+ download_file_path: Path,
+ session,
+ url,
+ remote_file_size,
+ resume_from_byte_pos: int = None,
+):
+ """
+ Handles both a fresh SDK/NDK download, as well as resuming a partial one
+ """
+ # "ab" will behave same as "wb" if file does not exist
+ with open(download_file_path, "ab") as file:
+ # 64 KB/s should be fine on even the slowest internet connections
+ chunk_size = 1024 * 64
+ # https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Range#directives
+ resume_header = (
+ {"Range": f"bytes={resume_from_byte_pos}-"}
+ if resume_from_byte_pos
+ else None
+ )
+
+ request = session.get(
+ url, stream=True, allow_redirects=True, headers=resume_header
+ )
+
+ with tqdm(
+ total=int(remote_file_size),
+ unit="B",
+ unit_scale=True,
+ unit_divisor=1024,
+ desc=download_file_path.name,
+ initial=resume_from_byte_pos if resume_from_byte_pos else 0,
+ ) as progress_bar:
+ for chunk in request.iter_content(chunk_size):
+ file.write(chunk)
+ progress_bar.update(len(chunk))
+
+
+def get_ndk_version(ndk_path: Union[str, Path]):
+ """Given the path to the NDK, return the version as a 3-tuple of (major,
+ minor, human).
+ """
+ ndk_path = Path(ndk_path)
+ with open(ndk_path / "source.properties", "r") as f:
+ revision = [line for line in f if line.startswith("Pkg.Revision")]
+ if not revision:
+ raise GetNdkVersionError(
+ "Cannot determine NDK version from source.properties"
+ )
+ if len(revision) != 1:
+ raise GetNdkVersionError("Too many Pkg.Revision lines in source.properties")
+
+ (_, version) = revision[0].split("=")
+ if not version:
+ raise GetNdkVersionError(
+ "Unexpected Pkg.Revision line in source.properties"
+ )
+
+ (major, minor, revision) = version.strip().split(".")
+ if not major or not minor:
+ raise GetNdkVersionError("Unexpected NDK version string: " + version)
+
+ # source.properties contains a $MAJOR.$MINOR.$PATCH revision number,
+ # but the more common nomenclature that Google uses is alphanumeric
+ # version strings like "r20" or "r19c". Convert the source.properties
+ # notation into an alphanumeric string.
+ int_minor = int(minor)
+ alphas = "abcdefghijklmnop"
+ ascii_minor = alphas[int_minor] if int_minor > 0 else ""
+ human = "r%s%s" % (major, ascii_minor)
+ return (major, minor, human)
+
+
+def get_paths(os_name):
+ mozbuild_path = Path(
+ os.environ.get("MOZBUILD_STATE_PATH", Path("~/.mozbuild").expanduser())
+ )
+ sdk_path = Path(
+ os.environ.get("ANDROID_SDK_HOME", mozbuild_path / f"android-sdk-{os_name}"),
+ )
+ ndk_path = Path(
+ os.environ.get(
+ "ANDROID_NDK_HOME", mozbuild_path / f"android-ndk-{NDK_VERSION}"
+ ),
+ )
+ avd_home_path = Path(
+ os.environ.get("ANDROID_AVD_HOME", mozbuild_path / "android-device" / "avd")
+ )
+ return mozbuild_path, sdk_path, ndk_path, avd_home_path
+
+
+def sdkmanager_tool(sdk_path: Path):
+ # sys.platform is win32 even if Python/Win64.
+ sdkmanager = "sdkmanager.bat" if sys.platform.startswith("win") else "sdkmanager"
+ return (
+ sdk_path / "cmdline-tools" / CMDLINE_TOOLS_VERSION_STRING / "bin" / sdkmanager
+ )
+
+
+def avdmanager_tool(sdk_path: Path):
+ # sys.platform is win32 even if Python/Win64.
+ sdkmanager = "avdmanager.bat" if sys.platform.startswith("win") else "avdmanager"
+ return (
+ sdk_path / "cmdline-tools" / CMDLINE_TOOLS_VERSION_STRING / "bin" / sdkmanager
+ )
+
+
+def adb_tool(sdk_path: Path):
+ adb = "adb.bat" if sys.platform.startswith("win") else "adb"
+ return sdk_path / "platform-tools" / adb
+
+
+def emulator_tool(sdk_path: Path):
+ emulator = "emulator.bat" if sys.platform.startswith("win") else "emulator"
+ return sdk_path / "emulator" / emulator
+
+
+def ensure_android(
+ os_name,
+ os_arch,
+ artifact_mode=False,
+ ndk_only=False,
+ system_images_only=False,
+ emulator_only=False,
+ avd_manifest_path: Optional[Path] = None,
+ prewarm_avd=False,
+ no_interactive=False,
+ list_packages=False,
+):
+ """
+ Ensure the Android SDK (and NDK, if `artifact_mode` is falsy) are
+ installed. If not, fetch and unpack the SDK and/or NDK from the
+ given URLs. Ensure the required Android SDK packages are
+ installed.
+
+ `os_name` can be 'linux', 'macosx' or 'windows'.
+ """
+ # The user may have an external Android SDK (in which case we
+ # save them a lengthy download), or they may have already
+ # completed the download. We unpack to
+ # ~/.mozbuild/{android-sdk-$OS_NAME, android-ndk-$VER}.
+ mozbuild_path, sdk_path, ndk_path, avd_home_path = get_paths(os_name)
+
+ if os_name == "macosx":
+ os_tag = "mac"
+ elif os_name == "windows":
+ os_tag = "win"
+ else:
+ os_tag = os_name
+
+ sdk_url = "https://dl.google.com/android/repository/commandlinetools-{0}-{1}_latest.zip".format( # NOQA: E501
+ os_tag, CMDLINE_TOOLS_VERSION
+ )
+ ndk_url = android_ndk_url(os_name)
+ bundletool_url = "https://github.com/google/bundletool/releases/download/{v}/bundletool-all-{v}.jar".format( # NOQA: E501
+ v=BUNDLETOOL_VERSION
+ )
+
+ ensure_android_sdk_and_ndk(
+ mozbuild_path,
+ os_name,
+ sdk_path=sdk_path,
+ sdk_url=sdk_url,
+ ndk_path=ndk_path,
+ ndk_url=ndk_url,
+ bundletool_url=bundletool_url,
+ artifact_mode=artifact_mode,
+ ndk_only=ndk_only,
+ emulator_only=emulator_only,
+ )
+
+ if ndk_only:
+ return
+
+ avd_manifest = None
+ if avd_manifest_path is not None:
+ with open(avd_manifest_path) as f:
+ avd_manifest = json.load(f)
+ # Some AVDs cannot be prewarmed in CI because they cannot run on linux64
+ # (like the arm64 AVD).
+ if "emulator_prewarm" in avd_manifest:
+ prewarm_avd = prewarm_avd and avd_manifest["emulator_prewarm"]
+
+ # We expect the |sdkmanager| tool to be at
+ # ~/.mozbuild/android-sdk-$OS_NAME/tools/cmdline-tools/$CMDLINE_TOOLS_VERSION_STRING/bin/sdkmanager. # NOQA: E501
+ ensure_android_packages(
+ os_name,
+ os_arch,
+ sdkmanager_tool=sdkmanager_tool(sdk_path),
+ emulator_only=emulator_only,
+ system_images_only=system_images_only,
+ avd_manifest=avd_manifest,
+ no_interactive=no_interactive,
+ list_packages=list_packages,
+ )
+
+ if emulator_only or system_images_only:
+ return
+
+ ensure_android_avd(
+ avdmanager_tool=avdmanager_tool(sdk_path),
+ adb_tool=adb_tool(sdk_path),
+ emulator_tool=emulator_tool(sdk_path),
+ avd_home_path=avd_home_path,
+ sdk_path=sdk_path,
+ no_interactive=no_interactive,
+ avd_manifest=avd_manifest,
+ prewarm_avd=prewarm_avd,
+ )
+
+
+def ensure_android_sdk_and_ndk(
+ mozbuild_path: Path,
+ os_name,
+ sdk_path: Path,
+ sdk_url,
+ ndk_path: Path,
+ ndk_url,
+ bundletool_url,
+ artifact_mode,
+ ndk_only,
+ emulator_only,
+):
+ """
+ Ensure the Android SDK and NDK are found at the given paths. If not, fetch
+ and unpack the SDK and/or NDK from the given URLs into
+ |mozbuild_path/{android-sdk-$OS_NAME,android-ndk-$VER}|.
+ """
+
+ # It's not particularly bad to overwrite the NDK toolchain, but it does take
+ # a while to unpack, so let's avoid the disk activity if possible. The SDK
+ # may prompt about licensing, so we do this first.
+ # Check for Android NDK only if we are not in artifact mode.
+ if not artifact_mode and not emulator_only:
+ install_ndk = True
+ if ndk_path.is_dir():
+ try:
+ _, _, human = get_ndk_version(ndk_path)
+ if human == NDK_VERSION:
+ print(ANDROID_NDK_EXISTS % ndk_path)
+ install_ndk = False
+ except GetNdkVersionError:
+ pass # Just do the install.
+ if install_ndk:
+ # The NDK archive unpacks into a top-level android-ndk-$VER directory.
+ install_mobile_android_sdk_or_ndk(ndk_url, mozbuild_path)
+
+ if ndk_only:
+ return
+
+ # We don't want to blindly overwrite, since we use the
+ # |sdkmanager| tool to install additional parts of the Android
+ # toolchain. If we overwrite, we lose whatever Android packages
+ # the user may have already installed.
+ if sdkmanager_tool(sdk_path).is_file():
+ print(ANDROID_SDK_EXISTS % sdk_path)
+ elif sdk_path.is_dir():
+ raise NotImplementedError(ANDROID_SDK_TOO_OLD % sdk_path)
+ else:
+ # The SDK archive used to include a top-level
+ # android-sdk-$OS_NAME directory; it no longer does so. We
+ # preserve the old convention to smooth detecting existing SDK
+ # installations.
+ cmdline_tools_path = mozbuild_path / f"android-sdk-{os_name}" / "cmdline-tools"
+ install_mobile_android_sdk_or_ndk(sdk_url, cmdline_tools_path)
+ # The tools package *really* wants to be in
+ # <sdk>/cmdline-tools/$CMDLINE_TOOLS_VERSION_STRING
+ (cmdline_tools_path / "cmdline-tools").rename(
+ cmdline_tools_path / CMDLINE_TOOLS_VERSION_STRING
+ )
+ download(bundletool_url, mozbuild_path / "bundletool.jar")
+
+
+def get_packages_to_install(packages_file_content, avd_manifest):
+ packages = []
+ packages += map(lambda package: package.strip(), packages_file_content)
+ if avd_manifest is not None:
+ packages += [avd_manifest["emulator_package"]]
+ return packages
+
+
+def ensure_android_avd(
+ avdmanager_tool: Path,
+ adb_tool: Path,
+ emulator_tool: Path,
+ avd_home_path: Path,
+ sdk_path: Path,
+ no_interactive=False,
+ avd_manifest=None,
+ prewarm_avd=False,
+):
+ """
+ Use the given sdkmanager tool (like 'sdkmanager') to install required
+ Android packages.
+ """
+ if avd_manifest is None:
+ return
+
+ avd_home_path.mkdir(parents=True, exist_ok=True)
+ # The AVD needs this folder to boot, so make sure it exists here.
+ (sdk_path / "platforms").mkdir(parents=True, exist_ok=True)
+
+ avd_name = avd_manifest["emulator_avd_name"]
+ args = [
+ str(avdmanager_tool),
+ "--verbose",
+ "create",
+ "avd",
+ "--force",
+ "--name",
+ avd_name,
+ "--package",
+ avd_manifest["emulator_package"],
+ ]
+
+ if not no_interactive:
+ subprocess.check_call(args)
+ return
+
+ # Flush outputs before running sdkmanager.
+ sys.stdout.flush()
+ env = os.environ.copy()
+ env["ANDROID_AVD_HOME"] = str(avd_home_path)
+ proc = subprocess.Popen(args, stdin=subprocess.PIPE, env=env)
+ proc.communicate("no\n".encode("UTF-8"))
+
+ retcode = proc.poll()
+ if retcode:
+ cmd = args[0]
+ e = subprocess.CalledProcessError(retcode, cmd)
+ raise e
+
+ avd_path = avd_home_path / (str(avd_name) + ".avd")
+ config_file_name = avd_path / "config.ini"
+
+ print(f"Writing config at {config_file_name}")
+
+ if config_file_name.is_file():
+ with open(config_file_name, "a") as config:
+ for key, value in avd_manifest["emulator_extra_config"].items():
+ config.write("%s=%s\n" % (key, value))
+ else:
+ raise NotImplementedError(
+ f"Could not find config file at {config_file_name}, something went wrong"
+ )
+ if prewarm_avd:
+ run_prewarm_avd(adb_tool, emulator_tool, env, avd_name, avd_manifest)
+ # When running in headless mode, the emulator does not run the cleanup
+ # step, and thus doesn't delete lock files. On some platforms, left-over
+ # lock files can cause the emulator to not start, so we remove them here.
+ for lock_file in ["hardware-qemu.ini.lock", "multiinstance.lock"]:
+ lock_file_path = avd_path / lock_file
+ try:
+ lock_file_path.unlink()
+ print(f"Removed lock file {lock_file_path}")
+ except OSError:
+ # The lock file is not there, nothing to do.
+ pass
+
+
+def run_prewarm_avd(
+ adb_tool: Path,
+ emulator_tool: Path,
+ env,
+ avd_name,
+ avd_manifest,
+):
+ """
+ Ensures the emulator is fully booted to save time on future iterations.
+ """
+ args = [str(emulator_tool), "-avd", avd_name] + avd_manifest["emulator_extra_args"]
+
+ # Flush outputs before running emulator.
+ sys.stdout.flush()
+ proc = subprocess.Popen(args, env=env)
+
+ booted = False
+ for i in range(100):
+ boot_completed_cmd = [str(adb_tool), "shell", "getprop", "sys.boot_completed"]
+ completed_proc = subprocess.Popen(
+ boot_completed_cmd, env=env, stdout=subprocess.PIPE
+ )
+ try:
+ out, err = completed_proc.communicate(timeout=30)
+ boot_completed = out.decode("UTF-8").strip()
+ print("sys.boot_completed = %s" % boot_completed)
+ time.sleep(30)
+ if boot_completed == "1":
+ booted = True
+ break
+ except subprocess.TimeoutExpired:
+ # Sometimes the adb command hangs, that's ok
+ print("sys.boot_completed = Timeout")
+
+ if not booted:
+ raise NotImplementedError("Could not prewarm emulator")
+
+ # Wait until the emulator completely shuts down
+ subprocess.Popen([str(adb_tool), "emu", "kill"], env=env).wait()
+ proc.wait()
+
+
+def ensure_android_packages(
+ os_name,
+ os_arch,
+ sdkmanager_tool: Path,
+ emulator_only=False,
+ system_images_only=False,
+ avd_manifest=None,
+ no_interactive=False,
+ list_packages=False,
+):
+ """
+ Use the given sdkmanager tool (like 'sdkmanager') to install required
+ Android packages.
+ """
+
+ # This tries to install all the required Android packages. The user
+ # may be prompted to agree to the Android license.
+ if system_images_only:
+ packages_file_name = "android-system-images-packages.txt"
+ elif emulator_only:
+ packages_file_name = "android-emulator-packages.txt"
+ else:
+ packages_file_name = "android-packages.txt"
+
+ packages_file_path = (Path(__file__).parent / packages_file_name).resolve()
+
+ with open(packages_file_path) as packages_file:
+ packages_file_content = packages_file.readlines()
+
+ packages = get_packages_to_install(packages_file_content, avd_manifest)
+ print(INSTALLING_ANDROID_PACKAGES % "\n".join(packages))
+
+ args = [str(sdkmanager_tool)]
+ if os_name == "macosx" and os_arch == "arm64":
+ # Support for Apple Silicon is still in nightly
+ args.append("--channel=3")
+ args.extend(packages)
+
+ # sdkmanager needs JAVA_HOME
+ java_bin_path = ensure_java(os_name, os_arch)
+ env = os.environ.copy()
+ env["JAVA_HOME"] = str(java_bin_path.parent)
+
+ if not no_interactive:
+ subprocess.check_call(args, env=env)
+ return
+
+ # Flush outputs before running sdkmanager.
+ sys.stdout.flush()
+ sys.stderr.flush()
+ # Emulate yes. For a discussion of passing input to check_output,
+ # see https://stackoverflow.com/q/10103551.
+ yes = "\n".join(["y"] * 100).encode("UTF-8")
+ proc = subprocess.Popen(args, stdin=subprocess.PIPE, env=env)
+ proc.communicate(yes)
+
+ retcode = proc.poll()
+ if retcode:
+ cmd = args[0]
+ e = subprocess.CalledProcessError(retcode, cmd)
+ raise e
+ if list_packages:
+ subprocess.check_call([str(sdkmanager_tool), "--list"])
+
+
+def generate_mozconfig(os_name, artifact_mode=False):
+ moz_state_dir, sdk_path, ndk_path, avd_home_path = get_paths(os_name)
+
+ extra_lines = []
+ if extra_lines:
+ extra_lines.append("")
+
+ if artifact_mode:
+ template = MOBILE_ANDROID_ARTIFACT_MODE_MOZCONFIG_TEMPLATE
+ else:
+ template = MOBILE_ANDROID_MOZCONFIG_TEMPLATE
+
+ kwargs = dict(
+ sdk_path=sdk_path,
+ ndk_path=ndk_path,
+ avd_home_path=avd_home_path,
+ moz_state_dir=moz_state_dir,
+ extra_lines="\n".join(extra_lines),
+ )
+ return template.format(**kwargs).strip()
+
+
+def android_ndk_url(os_name, ver=NDK_VERSION):
+ # Produce a URL like
+ # 'https://dl.google.com/android/repository/android-ndk-$VER-linux.zip
+ base_url = "https://dl.google.com/android/repository/android-ndk"
+
+ if os_name == "macosx":
+ # |mach bootstrap| uses 'macosx', but Google uses 'darwin'.
+ os_name = "darwin"
+
+ return "%s-%s-%s.zip" % (base_url, ver, os_name)
+
+
+def main(argv):
+ import optparse # No argparse, which is new in Python 2.7.
+ import platform
+
+ parser = optparse.OptionParser()
+ parser.add_option(
+ "-a",
+ "--artifact-mode",
+ dest="artifact_mode",
+ action="store_true",
+ help="If true, install only the Android SDK (and not the Android NDK).",
+ )
+ parser.add_option(
+ "--jdk-only",
+ dest="jdk_only",
+ action="store_true",
+ help="If true, install only the Java JDK.",
+ )
+ parser.add_option(
+ "--ndk-only",
+ dest="ndk_only",
+ action="store_true",
+ help="If true, install only the Android NDK (and not the Android SDK).",
+ )
+ parser.add_option(
+ "--system-images-only",
+ dest="system_images_only",
+ action="store_true",
+ help="If true, install only the system images for the AVDs.",
+ )
+ parser.add_option(
+ "--no-interactive",
+ dest="no_interactive",
+ action="store_true",
+ help="Accept the Android SDK licenses without user interaction.",
+ )
+ parser.add_option(
+ "--emulator-only",
+ dest="emulator_only",
+ action="store_true",
+ help="If true, install only the Android emulator (and not the SDK or NDK).",
+ )
+ parser.add_option(
+ "--avd-manifest",
+ dest="avd_manifest_path",
+ help="If present, generate AVD from the manifest pointed by this argument.",
+ )
+ parser.add_option(
+ "--prewarm-avd",
+ dest="prewarm_avd",
+ action="store_true",
+ help="If true, boot the AVD and wait until completed to speed up subsequent boots.",
+ )
+ parser.add_option(
+ "--list-packages",
+ dest="list_packages",
+ action="store_true",
+ help="If true, list installed packages.",
+ )
+
+ options, _ = parser.parse_args(argv)
+
+ if options.artifact_mode and options.ndk_only:
+ raise NotImplementedError("Use no options to install the NDK and the SDK.")
+
+ if options.artifact_mode and options.emulator_only:
+ raise NotImplementedError("Use no options to install the SDK and emulators.")
+
+ os_name = None
+ if platform.system() == "Darwin":
+ os_name = "macosx"
+ elif platform.system() == "Linux":
+ os_name = "linux"
+ elif platform.system() == "Windows":
+ os_name = "windows"
+ else:
+ raise NotImplementedError(
+ "We don't support bootstrapping the Android SDK (or Android "
+ "NDK) on {0} yet!".format(platform.system())
+ )
+
+ os_arch = platform.machine()
+
+ if options.jdk_only:
+ ensure_java(os_name, os_arch)
+ return 0
+
+ avd_manifest_path = (
+ Path(options.avd_manifest_path) if options.avd_manifest_path else None
+ )
+
+ ensure_android(
+ os_name,
+ os_arch,
+ artifact_mode=options.artifact_mode,
+ ndk_only=options.ndk_only,
+ system_images_only=options.system_images_only,
+ emulator_only=options.emulator_only,
+ avd_manifest_path=avd_manifest_path,
+ prewarm_avd=options.prewarm_avd,
+ no_interactive=options.no_interactive,
+ list_packages=options.list_packages,
+ )
+ mozconfig = generate_mozconfig(os_name, options.artifact_mode)
+
+ # |./mach bootstrap| automatically creates a mozconfig file for you if it doesn't
+ # exist. However, here, we don't know where the "topsrcdir" is, and it's not worth
+ # pulling in CommandContext (and its dependencies) to find out.
+ # So, instead, we'll politely ask users to create (or update) the file themselves.
+ suggestion = MOZCONFIG_SUGGESTION_TEMPLATE % ("$topsrcdir/mozconfig", mozconfig)
+ print("\n" + suggestion)
+
+ return 0
+
+
+def ensure_java(os_name, os_arch):
+ mozbuild_path, _, _, _ = get_paths(os_name)
+
+ if os_name == "macosx":
+ os_tag = "mac"
+ else:
+ os_tag = os_name
+
+ if os_arch == "x86_64":
+ arch = "x64"
+ elif os_arch == "arm64":
+ arch = "aarch64"
+ else:
+ arch = os_arch
+
+ ext = "zip" if os_name == "windows" else "tar.gz"
+
+ java_path = java_bin_path(os_name, mozbuild_path)
+ if not java_path:
+ raise NotImplementedError(f"Could not bootstrap java for {os_name}.")
+
+ if not java_path.exists():
+ # e.g. https://github.com/adoptium/temurin17-binaries/releases/
+ # download/jdk-17.0.7%2B7/OpenJDK17U-jre_x64_linux_hotspot_17.0.7_7.tar.gz
+ java_url = (
+ "https://github.com/adoptium/temurin{major}-binaries/releases/"
+ "download/jdk-{major}.{minor}%2B{patch}/"
+ "OpenJDK{major}U-jdk_{arch}_{os}_hotspot_{major}.{minor}_{patch}.{ext}"
+ ).format(
+ major=JAVA_VERSION_MAJOR,
+ minor=JAVA_VERSION_MINOR,
+ patch=JAVA_VERSION_PATCH,
+ os=os_tag,
+ arch=arch,
+ ext=ext,
+ )
+ install_mobile_android_sdk_or_ndk(java_url, mozbuild_path / "jdk")
+ return java_path
+
+
+def java_bin_path(os_name, toolchain_path: Path):
+ # Like jdk-17.0.7+7
+ jdk_folder = "jdk-{major}.{minor}+{patch}".format(
+ major=JAVA_VERSION_MAJOR, minor=JAVA_VERSION_MINOR, patch=JAVA_VERSION_PATCH
+ )
+
+ java_path = toolchain_path / "jdk" / jdk_folder
+
+ if os_name == "macosx":
+ return java_path / "Contents" / "Home" / "bin"
+ elif os_name == "linux":
+ return java_path / "bin"
+ elif os_name == "windows":
+ return java_path / "bin"
+ else:
+ return None
+
+
+def locate_java_bin_path(host_kernel, toolchain_path: Union[str, Path]):
+ if host_kernel == "WINNT":
+ os_name = "windows"
+ elif host_kernel == "Darwin":
+ os_name = "macosx"
+ elif host_kernel == "Linux":
+ os_name = "linux"
+ else:
+ # Default to Linux
+ os_name = "linux"
+ path = java_bin_path(os_name, Path(toolchain_path))
+ if not path.is_dir():
+ raise JavaLocationFailedException(
+ f"Could not locate Java at {path}, please run "
+ "./mach bootstrap --no-system-changes"
+ )
+ return str(path)
+
+
+class JavaLocationFailedException(Exception):
+ pass
+
+
+if __name__ == "__main__":
+ sys.exit(main(sys.argv))
diff --git a/python/mozboot/mozboot/archlinux.py b/python/mozboot/mozboot/archlinux.py
new file mode 100644
index 0000000000..391bd3e3ff
--- /dev/null
+++ b/python/mozboot/mozboot/archlinux.py
@@ -0,0 +1,33 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import sys
+
+from mozboot.base import BaseBootstrapper
+from mozboot.linux_common import LinuxBootstrapper
+
+
+class ArchlinuxBootstrapper(LinuxBootstrapper, BaseBootstrapper):
+ """Archlinux experimental bootstrapper."""
+
+ def __init__(self, version, dist_id, **kwargs):
+ print("Using an experimental bootstrapper for Archlinux.", file=sys.stderr)
+ BaseBootstrapper.__init__(self, **kwargs)
+
+ def install_packages(self, packages):
+ # watchman is not available via pacman
+ packages = [p for p in packages if p != "watchman"]
+ self.pacman_install(*packages)
+
+ def upgrade_mercurial(self, current):
+ self.pacman_install("mercurial")
+
+ def pacman_install(self, *packages):
+ command = ["pacman", "-S", "--needed"]
+ if self.no_interactive:
+ command.append("--noconfirm")
+
+ command.extend(packages)
+
+ self.run_as_root(command)
diff --git a/python/mozboot/mozboot/base.py b/python/mozboot/mozboot/base.py
new file mode 100644
index 0000000000..c32946c4eb
--- /dev/null
+++ b/python/mozboot/mozboot/base.py
@@ -0,0 +1,733 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import re
+import subprocess
+import sys
+from pathlib import Path
+
+from mach.util import to_optional_path, win_to_msys_path
+from mozbuild.bootstrap import bootstrap_all_toolchains_for, bootstrap_toolchain
+from mozfile import which
+from packaging.version import Version
+
+from mozboot import rust
+from mozboot.util import (
+ MINIMUM_RUST_VERSION,
+ get_mach_virtualenv_binary,
+ http_download_and_save,
+)
+
+NO_MERCURIAL = """
+Could not find Mercurial (hg) in the current shell's path. Try starting a new
+shell and running the bootstrapper again.
+"""
+
+MERCURIAL_UNABLE_UPGRADE = """
+You are currently running Mercurial %s. Running %s or newer is
+recommended for performance and stability reasons.
+
+Unfortunately, this bootstrapper currently does not know how to automatically
+upgrade Mercurial on your machine.
+
+You can usually install Mercurial through your package manager or by
+downloading a package from http://mercurial.selenic.com/.
+"""
+
+MERCURIAL_UPGRADE_FAILED = """
+We attempted to upgrade Mercurial to a modern version (%s or newer).
+However, you appear to have version %s still.
+
+It's possible your package manager doesn't support a modern version of
+Mercurial. It's also possible Mercurial is not being installed in the search
+path for this shell. Try creating a new shell and run this bootstrapper again.
+
+If it continues to fail, consider installing Mercurial by following the
+instructions at http://mercurial.selenic.com/.
+"""
+
+MERCURIAL_INSTALL_PROMPT = """
+Mercurial releases a new version every 3 months and your distro's package
+may become out of date. This may cause incompatibility with some
+Mercurial extensions that rely on new Mercurial features. As a result,
+you may not have an optimal version control experience.
+
+To have the best Mercurial experience possible, we recommend installing
+Mercurial via the "pip" Python packaging utility. This will likely result
+in files being placed in /usr/local/bin and /usr/local/lib.
+
+How would you like to continue?
+ 1. Install a modern Mercurial via pip [default]
+ 2. Install a legacy Mercurial via the distro package manager
+ 3. Do not install Mercurial
+Your choice: """
+
+PYTHON_UNABLE_UPGRADE = """
+You are currently running Python %s. Running %s or newer (but
+not 3.x) is required.
+
+Unfortunately, this bootstrapper does not currently know how to automatically
+upgrade Python on your machine.
+
+Please search the Internet for how to upgrade your Python and try running this
+bootstrapper again to ensure your machine is up to date.
+"""
+
+RUST_INSTALL_COMPLETE = """
+Rust installation complete. You should now have rustc and cargo
+in %(cargo_bin)s
+
+The installer tries to add these to your default shell PATH, so
+restarting your shell and running this script again may work.
+If it doesn't, you'll need to add the new command location
+manually.
+
+If restarting doesn't work, edit your shell initialization
+script, which may be called ~/.bashrc or ~/.bash_profile or
+~/.profile, and add the following line:
+
+ %(cmd)s
+
+Then restart your shell and run the bootstrap script again.
+"""
+
+RUST_NOT_IN_PATH = """
+You have some rust files in %(cargo_bin)s
+but they're not part of this shell's PATH.
+
+To add these to the PATH, edit your shell initialization
+script, which may be called ~/.bashrc or ~/.bash_profile or
+~/.profile, and add the following line:
+
+ %(cmd)s
+
+Then restart your shell and run the bootstrap script again.
+"""
+
+RUSTUP_OLD = """
+We found an executable called `rustup` which we normally use to install
+and upgrade Rust programming language support, but we didn't understand
+its output. It may be an old version, or not be the installer from
+https://rustup.rs/
+
+Please move it out of the way and run the bootstrap script again.
+Or if you prefer and know how, use the current rustup to install
+a compatible version of the Rust programming language yourself.
+"""
+
+RUST_UPGRADE_FAILED = """
+We attempted to upgrade Rust to a modern version (%s or newer).
+However, you appear to still have version %s.
+
+It's possible rustup failed. It's also possible the new Rust is not being
+installed in the search path for this shell. Try creating a new shell and
+run this bootstrapper again.
+
+If this continues to fail and you are sure you have a modern Rust on your
+system, ensure it is on the $PATH and try again. If that fails, you'll need to
+install Rust manually.
+
+We recommend the installer from https://rustup.rs/ for installing Rust,
+but you may be able to get a recent enough version from a software install
+tool or package manager on your system, or directly from https://rust-lang.org/
+"""
+
+BROWSER_ARTIFACT_MODE_MOZCONFIG = """
+# Automatically download and use compiled C++ components:
+ac_add_options --enable-artifact-builds
+""".strip()
+
+JS_MOZCONFIG_TEMPLATE = """\
+# Build only the SpiderMonkey JS test shell
+ac_add_options --enable-project=js
+"""
+
+# Upgrade Mercurial older than this.
+# This should match the OLDEST_NON_LEGACY_VERSION in
+# version-control-tools/hgext/configwizard/__init__.py.
+MODERN_MERCURIAL_VERSION = Version("4.9")
+
+# Upgrade rust older than this.
+MODERN_RUST_VERSION = Version(MINIMUM_RUST_VERSION)
+
+
+class BaseBootstrapper(object):
+ """Base class for system bootstrappers."""
+
+ def __init__(self, no_interactive=False, no_system_changes=False):
+ self.package_manager_updated = False
+ self.no_interactive = no_interactive
+ self.no_system_changes = no_system_changes
+ self.state_dir = None
+ self.srcdir = None
+
+ def validate_environment(self):
+ """
+ Called once the current firefox checkout has been detected.
+ Platform-specific implementations should check the environment and offer advice/warnings
+ to the user, if necessary.
+ """
+
+ def suggest_install_distutils(self):
+ """Called if distutils.{sysconfig,spawn} can't be imported."""
+ print(
+ "Does your distro require installing another package for distutils?",
+ file=sys.stderr,
+ )
+
+ def suggest_install_pip3(self):
+ """Called if pip3 can't be found."""
+ print(
+ "Try installing pip3 with your system's package manager.", file=sys.stderr
+ )
+
+ def install_system_packages(self):
+ """
+ Install packages shared by all applications. These are usually
+ packages required by the development (like mercurial) or the
+ build system (like autoconf).
+ """
+ raise NotImplementedError(
+ "%s must implement install_system_packages()" % __name__
+ )
+
+ def install_browser_packages(self, mozconfig_builder):
+ """
+ Install packages required to build Firefox for Desktop (application
+ 'browser').
+ """
+ raise NotImplementedError(
+ "Cannot bootstrap Firefox for Desktop: "
+ "%s does not yet implement install_browser_packages()" % __name__
+ )
+
+ def ensure_browser_packages(self):
+ """
+ Install pre-built packages needed to build Firefox for Desktop (application 'browser')
+
+ Currently this is not needed and kept for compatibility with Firefox for Android.
+ """
+ pass
+
+ def ensure_js_packages(self):
+ """
+ Install pre-built packages needed to build SpiderMonkey JavaScript Engine
+
+ Currently this is not needed and kept for compatibility with Firefox for Android.
+ """
+ pass
+
+ def ensure_browser_artifact_mode_packages(self):
+ """
+ Install pre-built packages needed to build Firefox for Desktop (application 'browser')
+
+ Currently this is not needed and kept for compatibility with Firefox for Android.
+ """
+ pass
+
+ def generate_browser_mozconfig(self):
+ """
+ Print a message to the console detailing what the user's mozconfig
+ should contain.
+
+ Firefox for Desktop can in simple cases determine its build environment
+ entirely from configure.
+ """
+ pass
+
+ def install_js_packages(self, mozconfig_builder):
+ """
+ Install packages required to build SpiderMonkey JavaScript Engine
+ (application 'js').
+ """
+ return self.install_browser_packages(mozconfig_builder)
+
+ def generate_js_mozconfig(self):
+ """
+ Create a reasonable starting point for a JS shell build.
+ """
+ return JS_MOZCONFIG_TEMPLATE
+
+ def install_browser_artifact_mode_packages(self, mozconfig_builder):
+ """
+ Install packages required to build Firefox for Desktop (application
+ 'browser') in Artifact Mode.
+ """
+ raise NotImplementedError(
+ "Cannot bootstrap Firefox for Desktop Artifact Mode: "
+ "%s does not yet implement install_browser_artifact_mode_packages()"
+ % __name__
+ )
+
+ def generate_browser_artifact_mode_mozconfig(self):
+ """
+ Print a message to the console detailing what the user's mozconfig
+ should contain.
+
+ Firefox for Desktop Artifact Mode needs to enable artifact builds and
+ a path where the build artifacts will be written to.
+ """
+ return BROWSER_ARTIFACT_MODE_MOZCONFIG
+
+ def install_mobile_android_packages(self, mozconfig_builder):
+ """
+ Install packages required to build GeckoView (application
+ 'mobile/android').
+ """
+ raise NotImplementedError(
+ "Cannot bootstrap GeckoView/Firefox for Android: "
+ "%s does not yet implement install_mobile_android_packages()" % __name__
+ )
+
+ def ensure_mobile_android_packages(self):
+ """
+ Install pre-built packages required to run GeckoView (application 'mobile/android')
+ """
+ raise NotImplementedError(
+ "Cannot bootstrap GeckoView/Firefox for Android: "
+ "%s does not yet implement ensure_mobile_android_packages()" % __name__
+ )
+
+ def ensure_mobile_android_artifact_mode_packages(self):
+ """
+ Install pre-built packages required to run GeckoView Artifact Build
+ (application 'mobile/android')
+ """
+ self.ensure_mobile_android_packages()
+
+ def generate_mobile_android_mozconfig(self):
+ """
+ Print a message to the console detailing what the user's mozconfig
+ should contain.
+
+ GeckoView/Firefox for Android needs an application and an ABI set, and it needs
+ paths to the Android SDK and NDK.
+ """
+ raise NotImplementedError(
+ "%s does not yet implement generate_mobile_android_mozconfig()" % __name__
+ )
+
+ def install_mobile_android_artifact_mode_packages(self, mozconfig_builder):
+ """
+ Install packages required to build GeckoView/Firefox for Android (application
+ 'mobile/android', also known as Fennec) in Artifact Mode.
+ """
+ raise NotImplementedError(
+ "Cannot bootstrap GeckoView/Firefox for Android Artifact Mode: "
+ "%s does not yet implement install_mobile_android_artifact_mode_packages()"
+ % __name__
+ )
+
+ def generate_mobile_android_artifact_mode_mozconfig(self):
+ """
+ Print a message to the console detailing what the user's mozconfig
+ should contain.
+
+ GeckoView/Firefox for Android Artifact Mode needs an application and an ABI set,
+ and it needs paths to the Android SDK.
+ """
+ raise NotImplementedError(
+ "%s does not yet implement generate_mobile_android_artifact_mode_mozconfig()"
+ % __name__
+ )
+
+ def ensure_sccache_packages(self):
+ """
+ Install sccache.
+ """
+ pass
+
+ def install_toolchain_artifact(self, toolchain_job, no_unpack=False):
+ if no_unpack:
+ return self.install_toolchain_artifact_impl(
+ self.state_dir, toolchain_job, no_unpack
+ )
+ bootstrap_toolchain(toolchain_job)
+
+ def install_toolchain_artifact_impl(
+ self, install_dir: Path, toolchain_job, no_unpack=False
+ ):
+ if type(self.srcdir) is str:
+ mach_binary = Path(self.srcdir) / "mach"
+ else:
+ mach_binary = (self.srcdir / "mach").resolve()
+ if not mach_binary.exists():
+ raise ValueError(f"mach not found at {mach_binary}")
+
+ if not self.state_dir:
+ raise ValueError(
+ "Need a state directory (e.g. ~/.mozbuild) to download " "artifacts"
+ )
+ python_location = get_mach_virtualenv_binary()
+ if not python_location.exists():
+ raise ValueError(f"python not found at {python_location}")
+
+ cmd = [
+ str(python_location),
+ str(mach_binary),
+ "artifact",
+ "toolchain",
+ "--bootstrap",
+ "--from-build",
+ toolchain_job,
+ ]
+
+ if no_unpack:
+ cmd += ["--no-unpack"]
+
+ subprocess.check_call(cmd, cwd=str(install_dir))
+
+ def auto_bootstrap(self, application, exclude=[]):
+ args = ["--with-ccache=sccache"]
+ if application.endswith("_artifact_mode"):
+ args.append("--enable-artifact-builds")
+ application = application[: -len("_artifact_mode")]
+ args.append("--enable-project={}".format(application.replace("_", "/")))
+ if exclude:
+ args.append(
+ "--enable-bootstrap={}".format(",".join(f"-{x}" for x in exclude))
+ )
+ bootstrap_all_toolchains_for(args)
+
+ def run_as_root(self, command, may_use_sudo=True):
+ if os.geteuid() != 0:
+ if may_use_sudo and which("sudo"):
+ command.insert(0, "sudo")
+ else:
+ command = ["su", "root", "-c", " ".join(command)]
+
+ print("Executing as root:", subprocess.list2cmdline(command))
+
+ subprocess.check_call(command, stdin=sys.stdin)
+
+ def prompt_int(self, prompt, low, high, default=None):
+ """Prompts the user with prompt and requires an integer between low and high.
+
+ If the user doesn't select an option and a default isn't provided, then
+ the lowest option is used. This is because some option must be implicitly
+ selected if mach is invoked with "--no-interactive"
+ """
+ if default is not None:
+ assert isinstance(default, int)
+ assert low <= default <= high
+ else:
+ default = low
+
+ if self.no_interactive:
+ print(prompt)
+ print('Selecting "{}" because context is not interactive.'.format(default))
+ return default
+
+ while True:
+ choice = input(prompt)
+ if choice == "" and default is not None:
+ return default
+ try:
+ choice = int(choice)
+ if low <= choice <= high:
+ return choice
+ except ValueError:
+ pass
+ print("ERROR! Please enter a valid option!")
+
+ def prompt_yesno(self, prompt):
+ """Prompts the user with prompt and requires a yes/no answer."""
+ if self.no_interactive:
+ print(prompt)
+ print('Selecting "Y" because context is not interactive.')
+ return True
+
+ while True:
+ choice = input(prompt + " (Yn): ").strip().lower()[:1]
+ if choice == "":
+ return True
+ elif choice in ("y", "n"):
+ return choice == "y"
+
+ print("ERROR! Please enter y or n!")
+
+ def _ensure_package_manager_updated(self):
+ if self.package_manager_updated:
+ return
+
+ self._update_package_manager()
+ self.package_manager_updated = True
+
+ def _update_package_manager(self):
+ """Updates the package manager's manifests/package list.
+
+ This should be defined in child classes.
+ """
+
+ def _parse_version_impl(self, path: Path, name, env, version_param):
+ """Execute the given path, returning the version.
+
+ Invokes the path argument with the --version switch
+ and returns a Version representing the output
+ if successful. If not, returns None.
+
+ An optional name argument gives the expected program
+ name returned as part of the version string, if it's
+ different from the basename of the executable.
+
+ An optional env argument allows modifying environment
+ variable during the invocation to set options, PATH,
+ etc.
+ """
+ if not name:
+ name = path.name
+ if name.lower().endswith(".exe"):
+ name = name[:-4]
+
+ process = subprocess.run(
+ [str(path), version_param],
+ env=env,
+ universal_newlines=True,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ )
+ if process.returncode != 0:
+ # This can happen e.g. if the user has an inactive pyenv shim in
+ # their path. Just silently treat this as a failure to parse the
+ # path and move on.
+ return None
+
+ match = re.search(name + " ([a-z0-9\.]+)", process.stdout)
+ if not match:
+ print("ERROR! Unable to identify %s version." % name)
+ return None
+
+ return Version(match.group(1))
+
+ def _parse_version(self, path: Path, name=None, env=None):
+ return self._parse_version_impl(path, name, env, "--version")
+
+ def _hg_cleanenv(self, load_hgrc=False):
+ """Returns a copy of the current environment updated with the HGPLAIN
+ and HGRCPATH environment variables.
+
+ HGPLAIN prevents Mercurial from applying locale variations to the output
+ making it suitable for use in scripts.
+
+ HGRCPATH controls the loading of hgrc files. Setting it to the empty
+ string forces that no user or system hgrc file is used.
+ """
+ env = os.environ.copy()
+ env["HGPLAIN"] = "1"
+ if not load_hgrc:
+ env["HGRCPATH"] = ""
+
+ return env
+
+ def is_mercurial_modern(self):
+ hg = to_optional_path(which("hg"))
+ if not hg:
+ print(NO_MERCURIAL)
+ return False, False, None
+
+ our = self._parse_version(hg, "version", self._hg_cleanenv())
+ if not our:
+ return True, False, None
+
+ return True, our >= MODERN_MERCURIAL_VERSION, our
+
+ def ensure_mercurial_modern(self):
+ installed, modern, version = self.is_mercurial_modern()
+
+ if modern:
+ print("Your version of Mercurial (%s) is sufficiently modern." % version)
+ return installed, modern
+
+ self._ensure_package_manager_updated()
+
+ if installed:
+ print("Your version of Mercurial (%s) is not modern enough." % version)
+ print(
+ "(Older versions of Mercurial have known security vulnerabilities. "
+ "Unless you are running a patched Mercurial version, you may be "
+ "vulnerable."
+ )
+ else:
+ print("You do not have Mercurial installed")
+
+ if self.upgrade_mercurial(version) is False:
+ return installed, modern
+
+ installed, modern, after = self.is_mercurial_modern()
+
+ if installed and not modern:
+ print(MERCURIAL_UPGRADE_FAILED % (MODERN_MERCURIAL_VERSION, after))
+
+ return installed, modern
+
+ def upgrade_mercurial(self, current):
+ """Upgrade Mercurial.
+
+ Child classes should reimplement this.
+
+ Return False to not perform a version check after the upgrade is
+ performed.
+ """
+ print(MERCURIAL_UNABLE_UPGRADE % (current, MODERN_MERCURIAL_VERSION))
+
+ def warn_if_pythonpath_is_set(self):
+ if "PYTHONPATH" in os.environ:
+ print(
+ "WARNING: Your PYTHONPATH environment variable is set. This can "
+ "cause flaky installations of the requirements, and other unexpected "
+ "issues with mach. It is recommended to unset this variable."
+ )
+
+ def is_rust_modern(self, cargo_bin: Path):
+ rustc = to_optional_path(which("rustc", extra_search_dirs=[str(cargo_bin)]))
+ if not rustc:
+ print("Could not find a Rust compiler.")
+ return False, None
+
+ our = self._parse_version(rustc)
+ if not our:
+ return False, None
+
+ return our >= MODERN_RUST_VERSION, our
+
+ def cargo_home(self):
+ cargo_home = Path(os.environ.get("CARGO_HOME", Path("~/.cargo").expanduser()))
+ cargo_bin = cargo_home / "bin"
+ return cargo_home, cargo_bin
+
+ def print_rust_path_advice(self, template, cargo_home: Path, cargo_bin: Path):
+ # Suggest ~/.cargo/env if it exists.
+ if (cargo_home / "env").exists():
+ cmd = f"source {cargo_home}/env"
+ else:
+ # On Windows rustup doesn't write out ~/.cargo/env
+ # so fall back to a manual PATH update. Bootstrap
+ # only runs under msys, so a unix-style shell command
+ # is appropriate there.
+ cargo_bin = win_to_msys_path(cargo_bin)
+ cmd = f"export PATH={cargo_bin}:$PATH"
+ print(template % {"cargo_bin": cargo_bin, "cmd": cmd})
+
+ def ensure_rust_modern(self):
+ cargo_home, cargo_bin = self.cargo_home()
+ modern, version = self.is_rust_modern(cargo_bin)
+
+ rustup = to_optional_path(which("rustup", extra_search_dirs=[str(cargo_bin)]))
+
+ if modern:
+ print("Your version of Rust (%s) is new enough." % version)
+
+ elif version:
+ print("Your version of Rust (%s) is too old." % version)
+
+ if rustup and not modern:
+ rustup_version = self._parse_version(rustup)
+ if not rustup_version:
+ print(RUSTUP_OLD)
+ sys.exit(1)
+ print("Found rustup. Will try to upgrade.")
+ self.upgrade_rust(rustup)
+
+ modern, after = self.is_rust_modern(cargo_bin)
+ if not modern:
+ print(RUST_UPGRADE_FAILED % (MODERN_RUST_VERSION, after))
+ sys.exit(1)
+ elif not rustup:
+ # No rustup. Download and run the installer.
+ print("Will try to install Rust.")
+ self.install_rust()
+ modern, version = self.is_rust_modern(cargo_bin)
+ rustup = to_optional_path(
+ which("rustup", extra_search_dirs=[str(cargo_bin)])
+ )
+
+ self.ensure_rust_targets(rustup, version)
+
+ def ensure_rust_targets(self, rustup: Path, rust_version):
+ """Make sure appropriate cross target libraries are installed."""
+ target_list = subprocess.check_output(
+ [str(rustup), "target", "list"], universal_newlines=True
+ )
+ targets = [
+ line.split()[0]
+ for line in target_list.splitlines()
+ if "installed" in line or "default" in line
+ ]
+ print("Rust supports %s targets." % ", ".join(targets))
+
+ # Support 32-bit Windows on 64-bit Windows.
+ win32 = "i686-pc-windows-msvc"
+ win64 = "x86_64-pc-windows-msvc"
+ if rust.platform() == win64 and win32 not in targets:
+ subprocess.check_call([str(rustup), "target", "add", win32])
+
+ if "mobile_android" in self.application:
+ # Let's add the most common targets.
+ if rust_version < Version("1.33"):
+ arm_target = "armv7-linux-androideabi"
+ else:
+ arm_target = "thumbv7neon-linux-androideabi"
+ android_targets = (
+ arm_target,
+ "aarch64-linux-android",
+ "i686-linux-android",
+ "x86_64-linux-android",
+ )
+ for target in android_targets:
+ if target not in targets:
+ subprocess.check_call([str(rustup), "target", "add", target])
+
+ def upgrade_rust(self, rustup: Path):
+ """Upgrade Rust.
+
+ Invoke rustup from the given path to update the rust install."""
+ subprocess.check_call([str(rustup), "update"])
+ # This installs rustfmt when not already installed, or nothing
+ # otherwise, while the update above would have taken care of upgrading
+ # it.
+ subprocess.check_call([str(rustup), "component", "add", "rustfmt"])
+
+ def install_rust(self):
+ """Download and run the rustup installer."""
+ import errno
+ import stat
+ import tempfile
+
+ platform = rust.platform()
+ url = rust.rustup_url(platform)
+ checksum = rust.rustup_hash(platform)
+ if not url or not checksum:
+ print("ERROR: Could not download installer.")
+ sys.exit(1)
+ print("Downloading rustup-init... ", end="")
+ fd, rustup_init = tempfile.mkstemp(prefix=Path(url).name)
+ rustup_init = Path(rustup_init)
+ os.close(fd)
+ try:
+ http_download_and_save(url, rustup_init, checksum)
+ mode = rustup_init.stat().st_mode
+ rustup_init.chmod(mode | stat.S_IRWXU)
+ print("Ok")
+ print("Running rustup-init...")
+ subprocess.check_call(
+ [
+ str(rustup_init),
+ "-y",
+ "--default-toolchain",
+ "stable",
+ "--default-host",
+ platform,
+ "--component",
+ "rustfmt",
+ ]
+ )
+ cargo_home, cargo_bin = self.cargo_home()
+ self.print_rust_path_advice(RUST_INSTALL_COMPLETE, cargo_home, cargo_bin)
+ finally:
+ try:
+ rustup_init.unlink()
+ except OSError as e:
+ if e.errno != errno.ENOENT:
+ raise
diff --git a/python/mozboot/mozboot/bootstrap.py b/python/mozboot/mozboot/bootstrap.py
new file mode 100644
index 0000000000..e57f496f29
--- /dev/null
+++ b/python/mozboot/mozboot/bootstrap.py
@@ -0,0 +1,776 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import platform
+import re
+import shutil
+import stat
+import subprocess
+import sys
+import time
+from collections import OrderedDict
+from pathlib import Path
+from typing import Optional
+
+# Use distro package to retrieve linux platform information
+import distro
+from mach.site import MachSiteManager
+from mach.telemetry import initialize_telemetry_setting
+from mach.util import (
+ UserError,
+ get_state_dir,
+ to_optional_path,
+ to_optional_str,
+ win_to_msys_path,
+)
+from mozbuild.base import MozbuildObject
+from mozfile import which
+from packaging.version import Version
+
+from mozboot.archlinux import ArchlinuxBootstrapper
+from mozboot.base import MODERN_RUST_VERSION
+from mozboot.centosfedora import CentOSFedoraBootstrapper
+from mozboot.debian import DebianBootstrapper
+from mozboot.freebsd import FreeBSDBootstrapper
+from mozboot.gentoo import GentooBootstrapper
+from mozboot.mozconfig import MozconfigBuilder
+from mozboot.mozillabuild import MozillaBuildBootstrapper
+from mozboot.openbsd import OpenBSDBootstrapper
+from mozboot.opensuse import OpenSUSEBootstrapper
+from mozboot.osx import OSXBootstrapper, OSXBootstrapperLight
+from mozboot.solus import SolusBootstrapper
+from mozboot.void import VoidBootstrapper
+from mozboot.windows import WindowsBootstrapper
+
+APPLICATION_CHOICE = """
+Note on Artifact Mode:
+
+Artifact builds download prebuilt C++ components rather than building
+them locally. Artifact builds are faster!
+
+Artifact builds are recommended for people working on Firefox or
+Firefox for Android frontends, or the GeckoView Java API. They are unsuitable
+for those working on C++ code. For more information see:
+https://firefox-source-docs.mozilla.org/contributing/build/artifact_builds.html.
+
+Please choose the version of Firefox you want to build (see note above):
+%s
+Your choice: """
+
+APPLICATIONS = OrderedDict(
+ [
+ ("Firefox for Desktop Artifact Mode", "browser_artifact_mode"),
+ ("Firefox for Desktop", "browser"),
+ ("GeckoView/Firefox for Android Artifact Mode", "mobile_android_artifact_mode"),
+ ("GeckoView/Firefox for Android", "mobile_android"),
+ ("SpiderMonkey JavaScript engine", "js"),
+ ]
+)
+
+FINISHED = """
+Your system should be ready to build %s!
+"""
+
+MOZCONFIG_SUGGESTION_TEMPLATE = """
+Paste the lines between the chevrons (>>> and <<<) into
+%s:
+
+>>>
+%s
+<<<
+"""
+
+MOZCONFIG_MISMATCH_WARNING_TEMPLATE = """
+WARNING! Mismatch detected between the selected build target and the
+mozconfig file %s:
+
+Current config
+>>>
+%s
+<<<
+
+Expected config
+>>>
+%s
+<<<
+"""
+
+CONFIGURE_MERCURIAL = """
+Mozilla recommends a number of changes to Mercurial to enhance your
+experience with it.
+
+Would you like to run a configuration wizard to ensure Mercurial is
+optimally configured? (This will also ensure 'version-control-tools' is up-to-date)"""
+
+CONFIGURE_GIT = """
+Mozilla recommends using git-cinnabar to work with mozilla-central (or
+mozilla-unified).
+
+Would you like to run a few configuration steps to ensure Git is
+optimally configured?"""
+
+DEBIAN_DISTROS = (
+ "debian",
+ "ubuntu",
+ "linuxmint",
+ "elementary",
+ "neon",
+ "pop",
+ "kali",
+ "devuan",
+ "pureos",
+ "deepin",
+ "tuxedo",
+)
+
+FEDORA_DISTROS = (
+ "centos",
+ "fedora",
+ "rocky",
+ "oracle",
+)
+
+ADD_GIT_CINNABAR_PATH = """
+To add git-cinnabar to the PATH, edit your shell initialization script, which
+may be called {prefix}/.bash_profile or {prefix}/.profile, and add the following
+lines:
+
+ export PATH="{cinnabar_dir}:$PATH"
+
+Then restart your shell.
+"""
+
+
+OLD_REVISION_WARNING = """
+WARNING! You appear to be running `mach bootstrap` from an old revision.
+bootstrap is meant primarily for getting developer environments up-to-date to
+build the latest version of tree. Running bootstrap on old revisions may fail
+and is not guaranteed to bring your machine to any working state in particular.
+Proceed at your own peril.
+"""
+
+
+# Version 2.24 changes the "core.commitGraph" setting to be "True" by default.
+MINIMUM_RECOMMENDED_GIT_VERSION = Version("2.24")
+OLD_GIT_WARNING = """
+You are running an older version of git ("{old_version}").
+We recommend upgrading to at least version "{minimum_recommended_version}" to improve
+performance.
+""".strip()
+
+
+class Bootstrapper(object):
+ """Main class that performs system bootstrap."""
+
+ def __init__(
+ self,
+ choice=None,
+ no_interactive=False,
+ hg_configure=False,
+ no_system_changes=False,
+ exclude=[],
+ mach_context=None,
+ ):
+ self.instance = None
+ self.choice = choice
+ self.hg_configure = hg_configure
+ self.no_system_changes = no_system_changes
+ self.exclude = exclude
+ self.mach_context = mach_context
+ cls = None
+ args = {
+ "no_interactive": no_interactive,
+ "no_system_changes": no_system_changes,
+ }
+
+ if sys.platform.startswith("linux"):
+ # distro package provides reliable ids for popular distributions so
+ # we use those instead of the full distribution name
+ dist_id, version, codename = distro.linux_distribution(
+ full_distribution_name=False
+ )
+
+ if dist_id in FEDORA_DISTROS:
+ cls = CentOSFedoraBootstrapper
+ args["distro"] = dist_id
+ elif dist_id in DEBIAN_DISTROS:
+ cls = DebianBootstrapper
+ args["distro"] = dist_id
+ args["codename"] = codename
+ elif dist_id in ("gentoo", "funtoo"):
+ cls = GentooBootstrapper
+ elif dist_id in ("solus"):
+ cls = SolusBootstrapper
+ elif dist_id in ("arch") or Path("/etc/arch-release").exists():
+ cls = ArchlinuxBootstrapper
+ elif dist_id in ("void"):
+ cls = VoidBootstrapper
+ elif dist_id in (
+ "opensuse",
+ "opensuse-leap",
+ "opensuse-tumbleweed",
+ "suse",
+ ):
+ cls = OpenSUSEBootstrapper
+ else:
+ raise NotImplementedError(
+ "Bootstrap support for this Linux "
+ "distro not yet available: " + dist_id
+ )
+
+ args["version"] = version
+ args["dist_id"] = dist_id
+
+ elif sys.platform.startswith("darwin"):
+ # TODO Support Darwin platforms that aren't OS X.
+ osx_version = platform.mac_ver()[0]
+ if platform.machine() == "arm64" or _macos_is_running_under_rosetta():
+ cls = OSXBootstrapperLight
+ else:
+ cls = OSXBootstrapper
+ args["version"] = osx_version
+
+ elif sys.platform.startswith("openbsd"):
+ cls = OpenBSDBootstrapper
+ args["version"] = platform.uname()[2]
+
+ elif sys.platform.startswith(("dragonfly", "freebsd", "netbsd")):
+ cls = FreeBSDBootstrapper
+ args["version"] = platform.release()
+ args["flavor"] = platform.system()
+
+ elif sys.platform.startswith("win32") or sys.platform.startswith("msys"):
+ if "MOZILLABUILD" in os.environ:
+ cls = MozillaBuildBootstrapper
+ else:
+ cls = WindowsBootstrapper
+ if cls is None:
+ raise NotImplementedError(
+ "Bootstrap support is not yet available " "for your OS."
+ )
+
+ self.instance = cls(**args)
+
+ def maybe_install_private_packages_or_exit(self, application, checkout_type):
+ # Install the clang packages needed for building the style system, as
+ # well as the version of NodeJS that we currently support.
+ # Also install the clang static-analysis package by default
+ # The best place to install our packages is in the state directory
+ # we have. We should have created one above in non-interactive mode.
+ self.instance.auto_bootstrap(application, self.exclude)
+ self.instance.install_toolchain_artifact("fix-stacks")
+ self.instance.install_toolchain_artifact("minidump-stackwalk")
+ if not self.instance.artifact_mode:
+ self.instance.install_toolchain_artifact("clang-tools/clang-tidy")
+ self.instance.ensure_sccache_packages()
+ # Like 'ensure_browser_packages' or 'ensure_mobile_android_packages'
+ getattr(self.instance, "ensure_%s_packages" % application)()
+
+ def check_code_submission(self, checkout_root: Path):
+ if self.instance.no_interactive or which("moz-phab"):
+ return
+
+ # Skip moz-phab install until bug 1696357 is fixed and makes it to a moz-phab
+ # release.
+ if sys.platform.startswith("darwin") and platform.machine() == "arm64":
+ return
+
+ if not self.instance.prompt_yesno("Will you be submitting commits to Mozilla?"):
+ return
+
+ mach_binary = checkout_root / "mach"
+ subprocess.check_call((sys.executable, str(mach_binary), "install-moz-phab"))
+
+ def bootstrap(self, settings):
+ if self.choice is None:
+ applications = APPLICATIONS
+ # Like ['1. Firefox for Desktop', '2. Firefox for Android Artifact Mode', ...].
+ labels = [
+ "%s. %s" % (i, name) for i, name in enumerate(applications.keys(), 1)
+ ]
+ choices = [" {} [default]".format(labels[0])]
+ choices += [" {}".format(label) for label in labels[1:]]
+ prompt = APPLICATION_CHOICE % "\n".join(choices)
+ prompt_choice = self.instance.prompt_int(
+ prompt=prompt, low=1, high=len(applications)
+ )
+ name, application = list(applications.items())[prompt_choice - 1]
+ elif self.choice in APPLICATIONS.keys():
+ name, application = self.choice, APPLICATIONS[self.choice]
+ elif self.choice in APPLICATIONS.values():
+ name, application = next(
+ (k, v) for k, v in APPLICATIONS.items() if v == self.choice
+ )
+ else:
+ raise Exception(
+ "Please pick a valid application choice: (%s)"
+ % "/".join(APPLICATIONS.keys())
+ )
+
+ mozconfig_builder = MozconfigBuilder()
+ self.instance.application = application
+ self.instance.artifact_mode = "artifact_mode" in application
+
+ self.instance.warn_if_pythonpath_is_set()
+
+ if sys.platform.startswith("darwin") and not os.environ.get(
+ "MACH_I_DO_WANT_TO_USE_ROSETTA"
+ ):
+ # If running on arm64 mac, check whether we're running under
+ # Rosetta and advise against it.
+ if _macos_is_running_under_rosetta():
+ print(
+ "Python is being emulated under Rosetta. Please use a native "
+ "Python instead. If you still really want to go ahead, set "
+ "the MACH_I_DO_WANT_TO_USE_ROSETTA environment variable.",
+ file=sys.stderr,
+ )
+ return 1
+
+ state_dir = Path(get_state_dir())
+ self.instance.state_dir = state_dir
+
+ hg = to_optional_path(which("hg"))
+
+ # We need to enable the loading of hgrc in case extensions are
+ # required to open the repo.
+ (checkout_type, checkout_root) = current_firefox_checkout(
+ env=self.instance._hg_cleanenv(load_hgrc=True),
+ hg=hg,
+ )
+ self.instance.srcdir = checkout_root
+ self.instance.validate_environment()
+ self._validate_python_environment(checkout_root)
+
+ if self.instance.no_system_changes:
+ self.maybe_install_private_packages_or_exit(application, checkout_type)
+ self._output_mozconfig(application, mozconfig_builder)
+ sys.exit(0)
+
+ self.instance.install_system_packages()
+
+ # Like 'install_browser_packages' or 'install_mobile_android_packages'.
+ getattr(self.instance, "install_%s_packages" % application)(mozconfig_builder)
+
+ if not self.instance.artifact_mode:
+ self.instance.ensure_rust_modern()
+
+ git = to_optional_path(which("git"))
+
+ # Possibly configure Mercurial, but not if the current checkout or repo
+ # type is Git.
+ hg_installed = bool(hg)
+ if checkout_type == "hg":
+ hg_installed, hg_modern = self.instance.ensure_mercurial_modern()
+
+ if hg_installed and checkout_type == "hg":
+ if not self.instance.no_interactive:
+ configure_hg = self.instance.prompt_yesno(prompt=CONFIGURE_MERCURIAL)
+ else:
+ configure_hg = self.hg_configure
+
+ if configure_hg:
+ configure_mercurial(hg, state_dir)
+
+ # Offer to configure Git, if the current checkout or repo type is Git.
+ elif git and checkout_type == "git":
+ should_configure_git = False
+ if not self.instance.no_interactive:
+ should_configure_git = self.instance.prompt_yesno(prompt=CONFIGURE_GIT)
+ else:
+ # Assuming default configuration setting applies to all VCS.
+ should_configure_git = self.hg_configure
+
+ if should_configure_git:
+ configure_git(
+ git,
+ to_optional_path(which("git-cinnabar")),
+ state_dir,
+ checkout_root,
+ )
+
+ self.maybe_install_private_packages_or_exit(application, checkout_type)
+ self.check_code_submission(checkout_root)
+ # Wait until after moz-phab setup to check telemetry so that employees
+ # will be automatically opted-in.
+ if not self.instance.no_interactive and not settings.mach_telemetry.is_set_up:
+ initialize_telemetry_setting(settings, str(checkout_root), str(state_dir))
+
+ self._output_mozconfig(application, mozconfig_builder)
+
+ print(FINISHED % name)
+ if not (
+ which("rustc")
+ and self.instance._parse_version(Path("rustc")) >= MODERN_RUST_VERSION
+ ):
+ print(
+ "To build %s, please restart the shell (Start a new terminal window)"
+ % name
+ )
+
+ def _default_mozconfig_path(self):
+ return Path(self.mach_context.topdir) / "mozconfig"
+
+ def _read_default_mozconfig(self):
+ path = self._default_mozconfig_path()
+ with open(path, "r") as mozconfig_file:
+ return mozconfig_file.read()
+
+ def _write_default_mozconfig(self, raw_mozconfig):
+ path = self._default_mozconfig_path()
+ with open(path, "w") as mozconfig_file:
+ mozconfig_file.write(raw_mozconfig)
+ print(f'Your requested configuration has been written to "{path}".')
+
+ def _show_mozconfig_suggestion(self, raw_mozconfig):
+ suggestion = MOZCONFIG_SUGGESTION_TEMPLATE % (
+ self._default_mozconfig_path(),
+ raw_mozconfig,
+ )
+ print(suggestion, end="")
+
+ def _check_default_mozconfig_mismatch(
+ self, current_mozconfig_info, expected_application, expected_raw_mozconfig
+ ):
+ current_raw_mozconfig = self._read_default_mozconfig()
+ current_application = current_mozconfig_info["project"][0].replace("/", "_")
+ if current_mozconfig_info["artifact-builds"]:
+ current_application += "_artifact_mode"
+
+ if expected_application == current_application:
+ if expected_raw_mozconfig == current_raw_mozconfig:
+ return
+
+ # There's minor difference, show the suggestion.
+ self._show_mozconfig_suggestion(expected_raw_mozconfig)
+ return
+
+ warning = MOZCONFIG_MISMATCH_WARNING_TEMPLATE % (
+ self._default_mozconfig_path(),
+ current_raw_mozconfig,
+ expected_raw_mozconfig,
+ )
+ print(warning)
+
+ if not self.instance.prompt_yesno("Do you want to overwrite the config?"):
+ return
+
+ self._write_default_mozconfig(expected_raw_mozconfig)
+
+ def _output_mozconfig(self, application, mozconfig_builder):
+ # Like 'generate_browser_mozconfig' or 'generate_mobile_android_mozconfig'.
+ additional_mozconfig = getattr(
+ self.instance, "generate_%s_mozconfig" % application
+ )()
+ if additional_mozconfig:
+ mozconfig_builder.append(additional_mozconfig)
+ raw_mozconfig = mozconfig_builder.generate()
+
+ current_mozconfig_info = MozbuildObject.get_base_mozconfig_info(
+ self.mach_context.topdir, None, ""
+ )
+ current_mozconfig_path = current_mozconfig_info["mozconfig"]["path"]
+
+ if current_mozconfig_path:
+ # mozconfig file exists
+ if self._default_mozconfig_path().exists() and Path.samefile(
+ Path(current_mozconfig_path), self._default_mozconfig_path()
+ ):
+ # This mozconfig file may be created by bootstrap.
+ self._check_default_mozconfig_mismatch(
+ current_mozconfig_info, application, raw_mozconfig
+ )
+ elif raw_mozconfig:
+ # The mozconfig file is created by user.
+ self._show_mozconfig_suggestion(raw_mozconfig)
+ elif raw_mozconfig:
+ # No mozconfig file exists yet
+ self._write_default_mozconfig(raw_mozconfig)
+
+ def _validate_python_environment(self, topsrcdir):
+ valid = True
+ try:
+ # distutils is singled out here because some distros (namely Ubuntu)
+ # include it in a separate package outside of the main Python
+ # installation.
+ import distutils.spawn
+ import distutils.sysconfig
+
+ assert distutils.sysconfig is not None and distutils.spawn is not None
+ except ImportError as e:
+ print("ERROR: Could not import package %s" % e.name, file=sys.stderr)
+ self.instance.suggest_install_distutils()
+ valid = False
+ except AssertionError:
+ print("ERROR: distutils is not behaving as expected.", file=sys.stderr)
+ self.instance.suggest_install_distutils()
+ valid = False
+ pip3 = to_optional_path(which("pip3"))
+ if not pip3:
+ print("ERROR: Could not find pip3.", file=sys.stderr)
+ self.instance.suggest_install_pip3()
+ valid = False
+ if not valid:
+ print(
+ "ERROR: Your Python installation will not be able to run "
+ "`mach bootstrap`. `mach bootstrap` cannot maintain your "
+ "Python environment for you; fix the errors shown here, and "
+ "then re-run `mach bootstrap`.",
+ file=sys.stderr,
+ )
+ sys.exit(1)
+
+ mach_site = MachSiteManager.from_environment(
+ topsrcdir,
+ lambda: os.path.normpath(get_state_dir(True, topsrcdir=topsrcdir)),
+ )
+ mach_site.attempt_populate_optional_packages()
+
+
+def update_vct(hg: Path, root_state_dir: Path):
+ """Ensure version-control-tools in the state directory is up to date."""
+ vct_dir = root_state_dir / "version-control-tools"
+
+ # Ensure the latest revision of version-control-tools is present.
+ update_mercurial_repo(
+ hg, "https://hg.mozilla.org/hgcustom/version-control-tools", vct_dir, "@"
+ )
+
+ return vct_dir
+
+
+def configure_mercurial(hg: Optional[Path], root_state_dir: Path):
+ """Run the Mercurial configuration wizard."""
+ vct_dir = update_vct(hg, root_state_dir)
+
+ hg = to_optional_str(hg)
+
+ # Run the config wizard from v-c-t.
+ args = [
+ hg,
+ "--config",
+ f"extensions.configwizard={vct_dir}/hgext/configwizard",
+ "configwizard",
+ ]
+ subprocess.call(args)
+
+
+def update_mercurial_repo(hg: Path, url, dest: Path, revision):
+ """Perform a clone/pull + update of a Mercurial repository."""
+ # Disable common extensions whose older versions may cause `hg`
+ # invocations to abort.
+ pull_args = [str(hg)]
+ if dest.exists():
+ pull_args.extend(["pull", url])
+ cwd = dest
+ else:
+ pull_args.extend(["clone", "--noupdate", url, str(dest)])
+ cwd = "/"
+
+ update_args = [str(hg), "update", "-r", revision]
+
+ print("=" * 80)
+ print(f"Ensuring {url} is up to date at {dest}")
+
+ env = os.environ.copy()
+ env.update({"HGPLAIN": "1"})
+
+ try:
+ subprocess.check_call(pull_args, cwd=str(cwd), env=env)
+ subprocess.check_call(update_args, cwd=str(dest), env=env)
+ finally:
+ print("=" * 80)
+
+
+def current_firefox_checkout(env, hg: Optional[Path] = None):
+ """Determine whether we're in a Firefox checkout.
+
+ Returns one of None, ``git``, or ``hg``.
+ """
+ HG_ROOT_REVISIONS = set(
+ [
+ # From mozilla-unified.
+ "8ba995b74e18334ab3707f27e9eb8f4e37ba3d29"
+ ]
+ )
+
+ path = Path.cwd()
+ while path:
+ hg_dir = path / ".hg"
+ git_dir = path / ".git"
+ known_file = path / "config" / "milestone.txt"
+ if hg and hg_dir.exists():
+ # Verify the hg repo is a Firefox repo by looking at rev 0.
+ try:
+ node = subprocess.check_output(
+ [str(hg), "log", "-r", "0", "--template", "{node}"],
+ cwd=str(path),
+ env=env,
+ universal_newlines=True,
+ )
+ if node in HG_ROOT_REVISIONS:
+ _warn_if_risky_revision(path)
+ return "hg", path
+ # Else the root revision is different. There could be nested
+ # repos. So keep traversing the parents.
+ except subprocess.CalledProcessError:
+ pass
+
+ # Just check for known-good files in the checkout, to prevent attempted
+ # foot-shootings. Determining a canonical git checkout of mozilla-unified
+ # is...complicated
+ elif git_dir.exists() or hg_dir.exists():
+ if known_file.exists():
+ _warn_if_risky_revision(path)
+ return ("git" if git_dir.exists() else "hg"), path
+ elif known_file.exists():
+ return "SOURCE", path
+
+ if not len(path.parents):
+ break
+ path = path.parent
+
+ raise UserError(
+ "Could not identify the root directory of your checkout! "
+ "Are you running `mach bootstrap` in an hg or git clone?"
+ )
+
+
+def update_git_tools(git: Optional[Path], root_state_dir: Path):
+ """Update git tools, hooks and extensions"""
+ # Ensure git-cinnabar is up to date.
+ cinnabar_dir = root_state_dir / "git-cinnabar"
+ cinnabar_exe = cinnabar_dir / "git-cinnabar"
+
+ if sys.platform.startswith(("win32", "msys")):
+ cinnabar_exe = cinnabar_exe.with_suffix(".exe")
+
+ # Previously, this script would do a full clone of the git-cinnabar
+ # repository. It now only downloads prebuilt binaries, so if we are
+ # updating from an old setup, remove the repository and start over.
+ if (cinnabar_dir / ".git").exists():
+ # git sets pack files read-only, which causes problems removing
+ # them on Windows. To work around that, we use an error handler
+ # on rmtree that retries to remove the file after chmod'ing it.
+ def onerror(func, path, exc):
+ if func == os.unlink:
+ os.chmod(path, stat.S_IRWXU)
+ func(path)
+ else:
+ raise
+
+ shutil.rmtree(str(cinnabar_dir), onerror=onerror)
+
+ # If we already have an executable, ask it to update itself.
+ exists = cinnabar_exe.exists()
+ if exists:
+ try:
+ subprocess.check_call([str(cinnabar_exe), "self-update"])
+ except subprocess.CalledProcessError as e:
+ print(e)
+
+ # git-cinnabar 0.6.0rc1 self-update had a bug that could leave an empty
+ # file. If that happens, install from scratch.
+ if not exists or cinnabar_exe.stat().st_size == 0:
+ from urllib.request import urlopen
+
+ import certifi
+
+ if not cinnabar_dir.exists():
+ cinnabar_dir.mkdir()
+
+ cinnabar_url = "https://github.com/glandium/git-cinnabar/"
+ download_py = cinnabar_dir / "download.py"
+ with open(download_py, "wb") as fh:
+ shutil.copyfileobj(
+ urlopen(
+ f"{cinnabar_url}/raw/master/download.py", cafile=certifi.where()
+ ),
+ fh,
+ )
+
+ try:
+ subprocess.check_call(
+ [sys.executable, str(download_py)], cwd=str(cinnabar_dir)
+ )
+ except subprocess.CalledProcessError as e:
+ print(e)
+ finally:
+ download_py.unlink()
+
+ return cinnabar_dir
+
+
+def configure_git(
+ git: Optional[Path],
+ cinnabar: Optional[Path],
+ root_state_dir: Path,
+ top_src_dir: Path,
+):
+ """Run the Git configuration steps."""
+
+ git_str = to_optional_str(git)
+
+ match = re.search(
+ r"(\d+\.\d+\.\d+)",
+ subprocess.check_output([git_str, "--version"], universal_newlines=True),
+ )
+ if not match:
+ raise Exception("Could not find git version")
+ git_version = Version(match.group(1))
+
+ if git_version < MINIMUM_RECOMMENDED_GIT_VERSION:
+ print(
+ OLD_GIT_WARNING.format(
+ old_version=git_version,
+ minimum_recommended_version=MINIMUM_RECOMMENDED_GIT_VERSION,
+ )
+ )
+
+ if git_version >= Version("2.17"):
+ # "core.untrackedCache" has a bug before 2.17
+ subprocess.check_call(
+ [git_str, "config", "core.untrackedCache", "true"], cwd=str(top_src_dir)
+ )
+
+ cinnabar_dir = str(update_git_tools(git, root_state_dir))
+
+ if not cinnabar:
+ if "MOZILLABUILD" in os.environ:
+ # Slightly modify the path on Windows to be correct
+ # for the copy/paste into the .bash_profile
+ cinnabar_dir = win_to_msys_path(cinnabar_dir)
+
+ print(
+ ADD_GIT_CINNABAR_PATH.format(
+ prefix="%USERPROFILE%", cinnabar_dir=cinnabar_dir
+ )
+ )
+ else:
+ print(ADD_GIT_CINNABAR_PATH.format(prefix="~", cinnabar_dir=cinnabar_dir))
+
+
+def _warn_if_risky_revision(path: Path):
+ # Warn the user if they're trying to bootstrap from an obviously old
+ # version of tree as reported by the version control system (a month in
+ # this case). This is an approximate calculation but is probably good
+ # enough for our purposes.
+ NUM_SECONDS_IN_MONTH = 60 * 60 * 24 * 30
+ from mozversioncontrol import get_repository_object
+
+ repo = get_repository_object(path)
+ if (time.time() - repo.get_commit_time()) >= NUM_SECONDS_IN_MONTH:
+ print(OLD_REVISION_WARNING)
+
+
+def _macos_is_running_under_rosetta():
+ proc = subprocess.run(
+ ["sysctl", "-n", "sysctl.proc_translated"],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.DEVNULL,
+ )
+ return (
+ proc.returncode == 0 and proc.stdout.decode("ascii", "replace").strip() == "1"
+ )
diff --git a/python/mozboot/mozboot/centosfedora.py b/python/mozboot/mozboot/centosfedora.py
new file mode 100644
index 0000000000..37aa0e8eaa
--- /dev/null
+++ b/python/mozboot/mozboot/centosfedora.py
@@ -0,0 +1,80 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import subprocess
+
+from mozfile import which
+
+from mozboot.base import BaseBootstrapper
+from mozboot.linux_common import LinuxBootstrapper
+
+
+class CentOSFedoraBootstrapper(LinuxBootstrapper, BaseBootstrapper):
+ def __init__(self, distro, version, dist_id, **kwargs):
+ BaseBootstrapper.__init__(self, **kwargs)
+
+ self.distro = distro
+ self.version = int(version.split(".")[0])
+ self.dist_id = dist_id
+
+ def install_packages(self, packages):
+ if self.version >= 33 and "perl" in packages:
+ packages.append("perl-FindBin")
+ # watchman is not available on centos/rocky
+ if self.distro in ("centos", "rocky", "oracle"):
+ packages = [p for p in packages if p != "watchman"]
+ self.dnf_install(*packages)
+
+ def upgrade_mercurial(self, current):
+ if current is None:
+ self.dnf_install("mercurial")
+ else:
+ self.dnf_update("mercurial")
+
+ def dnf_install(self, *packages):
+ if which("dnf"):
+
+ def not_installed(package):
+ # We could check for "Error: No matching Packages to list", but
+ # checking `dnf`s exit code is sufficent.
+ # Ideally we'd invoke dnf with '--cacheonly', but there's:
+ # https://bugzilla.redhat.com/show_bug.cgi?id=2030255
+ is_installed = subprocess.run(
+ ["dnf", "list", "--installed", package],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ )
+ if is_installed.returncode not in [0, 1]:
+ stdout = is_installed.stdout
+ raise Exception(
+ f'Failed to determine whether package "{package}" is installed: "{stdout}"'
+ )
+ return is_installed.returncode != 0
+
+ packages = list(filter(not_installed, packages))
+ if len(packages) == 0:
+ # avoid sudo prompt (support unattended re-bootstrapping)
+ return
+
+ command = ["dnf", "install"]
+ else:
+ command = ["yum", "install"]
+
+ if self.no_interactive:
+ command.append("-y")
+ command.extend(packages)
+
+ self.run_as_root(command)
+
+ def dnf_update(self, *packages):
+ if which("dnf"):
+ command = ["dnf", "update"]
+ else:
+ command = ["yum", "update"]
+
+ if self.no_interactive:
+ command.append("-y")
+ command.extend(packages)
+
+ self.run_as_root(command)
diff --git a/python/mozboot/mozboot/debian.py b/python/mozboot/mozboot/debian.py
new file mode 100644
index 0000000000..34e328586e
--- /dev/null
+++ b/python/mozboot/mozboot/debian.py
@@ -0,0 +1,83 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import sys
+
+from mozboot.base import MERCURIAL_INSTALL_PROMPT, BaseBootstrapper
+from mozboot.linux_common import LinuxBootstrapper
+
+
+class DebianBootstrapper(LinuxBootstrapper, BaseBootstrapper):
+ def __init__(self, distro, version, dist_id, codename, **kwargs):
+ BaseBootstrapper.__init__(self, **kwargs)
+
+ self.distro = distro
+ self.version = version
+ self.dist_id = dist_id
+ self.codename = codename
+
+ def suggest_install_distutils(self):
+ print(
+ "HINT: Try installing distutils with "
+ "`apt-get install python3-distutils`.",
+ file=sys.stderr,
+ )
+
+ def suggest_install_pip3(self):
+ print(
+ "HINT: Try installing pip3 with `apt-get install python3-pip`.",
+ file=sys.stderr,
+ )
+
+ def install_packages(self, packages):
+ try:
+ if int(self.version) < 11:
+ # watchman is only available starting from Debian 11.
+ packages = [p for p in packages if p != "watchman"]
+ except ValueError:
+ pass
+
+ self.apt_install(*packages)
+
+ def _update_package_manager(self):
+ self.apt_update()
+
+ def upgrade_mercurial(self, current):
+ """Install Mercurial from pip because Debian packages typically lag."""
+ if self.no_interactive:
+ # Install via Apt in non-interactive mode because it is the more
+ # conservative option and less likely to make people upset.
+ self.apt_install("mercurial")
+ return
+
+ res = self.prompt_int(MERCURIAL_INSTALL_PROMPT, 1, 3)
+
+ # Apt.
+ if res == 2:
+ self.apt_install("mercurial")
+ return False
+
+ # No Mercurial.
+ if res == 3:
+ print("Not installing Mercurial.")
+ return False
+
+ # pip.
+ assert res == 1
+ self.run_as_root(["pip3", "install", "--upgrade", "Mercurial"])
+
+ def apt_install(self, *packages):
+ command = ["apt-get", "install"]
+ if self.no_interactive:
+ command.append("-y")
+ command.extend(packages)
+
+ self.run_as_root(command)
+
+ def apt_update(self):
+ command = ["apt-get", "update"]
+ if self.no_interactive:
+ command.append("-y")
+
+ self.run_as_root(command)
diff --git a/python/mozboot/mozboot/freebsd.py b/python/mozboot/mozboot/freebsd.py
new file mode 100644
index 0000000000..f4d6d1847b
--- /dev/null
+++ b/python/mozboot/mozboot/freebsd.py
@@ -0,0 +1,70 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import sys
+
+from mozfile import which
+
+from mozboot.base import BaseBootstrapper
+
+
+class FreeBSDBootstrapper(BaseBootstrapper):
+ def __init__(self, version, flavor, **kwargs):
+ BaseBootstrapper.__init__(self, **kwargs)
+ self.version = int(version.split(".")[0])
+ self.flavor = flavor.lower()
+
+ self.packages = [
+ "gmake",
+ "gtar",
+ "m4",
+ "npm",
+ "pkgconf",
+ "py%d%d-sqlite3" % sys.version_info[0:2],
+ "rust",
+ "watchman",
+ ]
+
+ self.browser_packages = [
+ "dbus-glib",
+ "libXt",
+ "nasm",
+ "pulseaudio",
+ ]
+
+ if not which("as"):
+ self.packages.append("binutils")
+
+ if not which("unzip"):
+ self.packages.append("unzip")
+
+ def pkg_install(self, *packages):
+ if sys.platform.startswith("netbsd"):
+ command = ["pkgin", "install"]
+ else:
+ command = ["pkg", "install"]
+ if self.no_interactive:
+ command.append("-y")
+
+ command.extend(packages)
+ self.run_as_root(command)
+
+ def install_system_packages(self):
+ self.pkg_install(*self.packages)
+
+ def install_browser_packages(self, mozconfig_builder, artifact_mode=False):
+ # TODO: Figure out what not to install for artifact mode
+ packages = self.browser_packages.copy()
+ if not artifact_mode:
+ if sys.platform.startswith("netbsd"):
+ packages.extend(["brotli", "gtk3+", "libv4l", "cbindgen"])
+ else:
+ packages.extend(["gtk3", "mesa-dri", "v4l_compat", "rust-cbindgen"])
+ self.pkg_install(*packages)
+
+ def install_browser_artifact_mode_packages(self, mozconfig_builder):
+ self.install_browser_packages(mozconfig_builder, artifact_mode=True)
+
+ def upgrade_mercurial(self, current):
+ self.pkg_install("mercurial")
diff --git a/python/mozboot/mozboot/gentoo.py b/python/mozboot/mozboot/gentoo.py
new file mode 100644
index 0000000000..4ddf86696f
--- /dev/null
+++ b/python/mozboot/mozboot/gentoo.py
@@ -0,0 +1,29 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from mozboot.base import BaseBootstrapper
+from mozboot.linux_common import LinuxBootstrapper
+
+
+class GentooBootstrapper(LinuxBootstrapper, BaseBootstrapper):
+ def __init__(self, version, dist_id, **kwargs):
+ BaseBootstrapper.__init__(self, **kwargs)
+
+ self.version = version
+ self.dist_id = dist_id
+
+ def install_packages(self, packages):
+ DISAMBIGUATE = {
+ "gzip": "app-arch/gzip",
+ "tar": "app-arch/tar",
+ }
+ # watchman is available but requires messing with USEs.
+ packages = [DISAMBIGUATE.get(p, p) for p in packages if p != "watchman"]
+ self.run_as_root(["emerge", "--noreplace"] + packages)
+
+ def _update_package_manager(self):
+ self.run_as_root(["emerge", "--sync"])
+
+ def upgrade_mercurial(self, current):
+ self.run_as_root(["emerge", "--update", "mercurial"])
diff --git a/python/mozboot/mozboot/linux_common.py b/python/mozboot/mozboot/linux_common.py
new file mode 100644
index 0000000000..c6751d333b
--- /dev/null
+++ b/python/mozboot/mozboot/linux_common.py
@@ -0,0 +1,93 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# An easy way for distribution-specific bootstrappers to share the code
+# needed to install Stylo and Node dependencies. This class must come before
+# BaseBootstrapper in the inheritance list.
+
+import platform
+
+
+def is_non_x86_64():
+ return platform.machine() != "x86_64"
+
+
+class MobileAndroidBootstrapper(object):
+ def __init__(self, **kwargs):
+ pass
+
+ def install_mobile_android_packages(self, mozconfig_builder, artifact_mode=False):
+ from mozboot import android
+
+ os_arch = platform.machine()
+ android.ensure_android(
+ "linux",
+ os_arch,
+ artifact_mode=artifact_mode,
+ no_interactive=self.no_interactive,
+ )
+ android.ensure_android(
+ "linux",
+ os_arch,
+ artifact_mode=artifact_mode,
+ no_interactive=self.no_interactive,
+ system_images_only=True,
+ avd_manifest_path=android.AVD_MANIFEST_X86_64,
+ )
+ android.ensure_android(
+ "linux",
+ os_arch,
+ artifact_mode=artifact_mode,
+ no_interactive=self.no_interactive,
+ system_images_only=True,
+ avd_manifest_path=android.AVD_MANIFEST_ARM,
+ )
+
+ def install_mobile_android_artifact_mode_packages(self, mozconfig_builder):
+ self.install_mobile_android_packages(mozconfig_builder, artifact_mode=True)
+
+ def ensure_mobile_android_packages(self):
+ from mozboot import android
+
+ android.ensure_java("linux", platform.machine())
+ self.install_toolchain_artifact(android.LINUX_X86_64_ANDROID_AVD)
+ self.install_toolchain_artifact(android.LINUX_ARM_ANDROID_AVD)
+
+ def generate_mobile_android_mozconfig(self, artifact_mode=False):
+ from mozboot import android
+
+ return android.generate_mozconfig("linux", artifact_mode=artifact_mode)
+
+ def generate_mobile_android_artifact_mode_mozconfig(self):
+ return self.generate_mobile_android_mozconfig(artifact_mode=True)
+
+
+class LinuxBootstrapper(MobileAndroidBootstrapper):
+ def __init__(self, **kwargs):
+ pass
+
+ def ensure_sccache_packages(self):
+ pass
+
+ def install_system_packages(self):
+ self.install_packages(
+ [
+ "bash",
+ "findutils", # contains xargs
+ "gzip",
+ "libxml2", # used by bootstrapped clang
+ "m4",
+ "make",
+ "perl",
+ "tar",
+ "unzip",
+ "watchman",
+ ]
+ )
+
+ def install_browser_packages(self, mozconfig_builder, artifact_mode=False):
+ pass
+
+ def install_browser_artifact_mode_packages(self, mozconfig_builder):
+ pass
diff --git a/python/mozboot/mozboot/mach_commands.py b/python/mozboot/mozboot/mach_commands.py
new file mode 100644
index 0000000000..02cc69f54b
--- /dev/null
+++ b/python/mozboot/mozboot/mach_commands.py
@@ -0,0 +1,119 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this,
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import errno
+import sys
+from pathlib import Path
+
+from mach.decorators import Command, CommandArgument
+
+from mozboot.bootstrap import APPLICATIONS
+
+
+@Command(
+ "bootstrap",
+ category="devenv",
+ description="Install required system packages for building.",
+)
+@CommandArgument(
+ "--application-choice",
+ choices=list(APPLICATIONS.keys()) + list(APPLICATIONS.values()),
+ default=None,
+ help="Pass in an application choice instead of using the default "
+ "interactive prompt.",
+)
+@CommandArgument(
+ "--no-system-changes",
+ dest="no_system_changes",
+ action="store_true",
+ help="Only execute actions that leave the system configuration alone.",
+)
+@CommandArgument(
+ "--exclude",
+ nargs="+",
+ help="A list of bootstrappable elements not to bootstrap.",
+)
+def bootstrap(
+ command_context, application_choice=None, no_system_changes=False, exclude=[]
+):
+ """Bootstrap system and mach for optimal development experience."""
+ from mozboot.bootstrap import Bootstrapper
+
+ bootstrapper = Bootstrapper(
+ choice=application_choice,
+ no_interactive=not command_context._mach_context.is_interactive,
+ no_system_changes=no_system_changes,
+ exclude=exclude,
+ mach_context=command_context._mach_context,
+ )
+ bootstrapper.bootstrap(command_context.settings)
+
+
+@Command(
+ "vcs-setup",
+ category="devenv",
+ description="Help configure a VCS for optimal development.",
+)
+@CommandArgument(
+ "-u",
+ "--update-only",
+ action="store_true",
+ help="Only update recommended extensions, don't run the wizard.",
+)
+def vcs_setup(command_context, update_only=False):
+ """Ensure a Version Control System (Mercurial or Git) is optimally
+ configured.
+
+ This command will inspect your VCS configuration and
+ guide you through an interactive wizard helping you configure the
+ VCS for optimal use on Mozilla projects.
+
+ User choice is respected: no changes are made without explicit
+ confirmation from you.
+
+ If "--update-only" is used, the interactive wizard is disabled
+ and this command only ensures that remote repositories providing
+ VCS extensions are up to date.
+ """
+ import mozversioncontrol
+ from mach.util import to_optional_path
+ from mozfile import which
+
+ import mozboot.bootstrap as bootstrap
+
+ repo = mozversioncontrol.get_repository_object(command_context._mach_context.topdir)
+ tool = "hg"
+ if repo.name == "git":
+ tool = "git"
+
+ # "hg" is an executable script with a shebang, which will be found by
+ # which. We need to pass a win32 executable to the function because we
+ # spawn a process from it.
+ if sys.platform in ("win32", "msys"):
+ tool += ".exe"
+
+ vcs = to_optional_path(which(tool))
+ if not vcs:
+ raise OSError(errno.ENOENT, "Could not find {} on $PATH".format(tool))
+
+ if update_only:
+ if repo.name == "git":
+ bootstrap.update_git_tools(
+ vcs,
+ Path(command_context._mach_context.state_dir),
+ )
+ else:
+ bootstrap.update_vct(vcs, Path(command_context._mach_context.state_dir))
+ else:
+ if repo.name == "git":
+ bootstrap.configure_git(
+ vcs,
+ to_optional_path(which("git-cinnabar")),
+ Path(command_context._mach_context.state_dir),
+ Path(command_context._mach_context.topdir),
+ )
+ else:
+ bootstrap.configure_mercurial(
+ vcs, Path(command_context._mach_context.state_dir)
+ )
diff --git a/python/mozboot/mozboot/mozconfig.py b/python/mozboot/mozboot/mozconfig.py
new file mode 100644
index 0000000000..a1ae4c8523
--- /dev/null
+++ b/python/mozboot/mozboot/mozconfig.py
@@ -0,0 +1,156 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import filecmp
+import os
+from pathlib import Path
+from typing import Union
+
+MOZ_MYCONFIG_ERROR = """
+The MOZ_MYCONFIG environment variable to define the location of mozconfigs
+is deprecated. If you wish to define the mozconfig path via an environment
+variable, use MOZCONFIG instead.
+""".strip()
+
+MOZCONFIG_LEGACY_PATH_ERROR = """
+You currently have a mozconfig at %s. This implicit location is no longer
+supported. Please move it to %s/.mozconfig or set an explicit path
+via the $MOZCONFIG environment variable.
+""".strip()
+
+DEFAULT_TOPSRCDIR_PATHS = (".mozconfig", "mozconfig")
+DEPRECATED_TOPSRCDIR_PATHS = ("mozconfig.sh", "myconfig.sh")
+DEPRECATED_HOME_PATHS = (".mozconfig", ".mozconfig.sh", ".mozmyconfig.sh")
+
+
+class MozconfigFindException(Exception):
+ """Raised when a mozconfig location is not defined properly."""
+
+
+class MozconfigBuilder(object):
+ def __init__(self):
+ self._lines = []
+
+ def append(self, block):
+ self._lines.extend([line.strip() for line in block.split("\n") if line.strip()])
+
+ def generate(self):
+ return "".join(line + "\n" for line in self._lines)
+
+
+def find_mozconfig(topsrcdir: Union[str, Path], env=os.environ):
+ """Find the active mozconfig file for the current environment.
+
+ This emulates the logic in mozconfig-find.
+
+ 1) If ENV[MOZCONFIG] is set, use that
+ 2) If $TOPSRCDIR/mozconfig or $TOPSRCDIR/.mozconfig exists, use it.
+ 3) If both exist or if there are legacy locations detected, error out.
+
+ The absolute path to the found mozconfig will be returned on success.
+ None will be returned if no mozconfig could be found. A
+ MozconfigFindException will be raised if there is a bad state,
+ including conditions from #3 above.
+ """
+ topsrcdir = Path(topsrcdir)
+
+ # Check for legacy methods first.
+ if "MOZ_MYCONFIG" in env:
+ raise MozconfigFindException(MOZ_MYCONFIG_ERROR)
+
+ env_path = env.get("MOZCONFIG", None) or None
+
+ if env_path is not None:
+ env_path = Path(env_path)
+
+ if env_path is not None:
+ if not env_path.is_absolute():
+ potential_roots = [topsrcdir, Path.cwd()]
+ # Attempt to eliminate duplicates for e.g.
+ # self.topsrcdir == Path.cwd().
+ potential_roots_strings = set(str(p.resolve()) for p in potential_roots)
+ existing = [
+ root
+ for root in potential_roots_strings
+ if (Path(root) / env_path).exists()
+ ]
+ if len(existing) > 1:
+ # There are multiple files, but we might have a setup like:
+ #
+ # somedirectory/
+ # srcdir/
+ # objdir/
+ #
+ # MOZCONFIG=../srcdir/some/path/to/mozconfig
+ #
+ # and be configuring from the objdir. So even though we
+ # have multiple existing files, they are actually the same
+ # file.
+ mozconfigs = [root / env_path for root in existing]
+ if not all(
+ map(
+ lambda p1, p2: filecmp.cmp(p1, p2, shallow=False),
+ mozconfigs[:-1],
+ mozconfigs[1:],
+ )
+ ):
+ raise MozconfigFindException(
+ "MOZCONFIG environment variable refers to a path that "
+ + "exists in more than one of "
+ + ", ".join(potential_roots_strings)
+ + ". Remove all but one."
+ )
+ elif not existing:
+ raise MozconfigFindException(
+ "MOZCONFIG environment variable refers to a path that "
+ + "does not exist in any of "
+ + ", ".join(potential_roots_strings)
+ )
+
+ env_path = existing[0] / env_path
+ elif not env_path.exists(): # non-relative path
+ raise MozconfigFindException(
+ "MOZCONFIG environment variable refers to a path that "
+ f"does not exist: {env_path}"
+ )
+
+ if not env_path.is_file():
+ raise MozconfigFindException(
+ "MOZCONFIG environment variable refers to a " f"non-file: {env_path}"
+ )
+
+ srcdir_paths = [topsrcdir / p for p in DEFAULT_TOPSRCDIR_PATHS]
+ existing = [p for p in srcdir_paths if p.is_file()]
+
+ if env_path is None and len(existing) > 1:
+ raise MozconfigFindException(
+ "Multiple default mozconfig files "
+ "present. Remove all but one. " + ", ".join(str(p) for p in existing)
+ )
+
+ path = None
+
+ if env_path is not None:
+ path = env_path
+ elif len(existing):
+ assert len(existing) == 1
+ path = existing[0]
+
+ if path is not None:
+ return Path.cwd() / path
+
+ deprecated_paths = [topsrcdir / s for s in DEPRECATED_TOPSRCDIR_PATHS]
+
+ home = env.get("HOME", None)
+ if home is not None:
+ home = Path(home)
+ deprecated_paths.extend([home / s for s in DEPRECATED_HOME_PATHS])
+
+ for path in deprecated_paths:
+ if path.exists():
+ raise MozconfigFindException(
+ MOZCONFIG_LEGACY_PATH_ERROR % (path, topsrcdir)
+ )
+
+ return None
diff --git a/python/mozboot/mozboot/mozillabuild.py b/python/mozboot/mozboot/mozillabuild.py
new file mode 100644
index 0000000000..c783809656
--- /dev/null
+++ b/python/mozboot/mozboot/mozillabuild.py
@@ -0,0 +1,235 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import ctypes
+import os
+import platform
+import subprocess
+import sys
+from pathlib import Path
+
+from mozbuild.util import mozilla_build_version
+from packaging.version import Version
+
+from mozboot.base import BaseBootstrapper
+
+
+def is_aarch64_host():
+ from ctypes import wintypes
+
+ kernel32 = ctypes.windll.kernel32
+ IMAGE_FILE_MACHINE_UNKNOWN = 0
+ IMAGE_FILE_MACHINE_ARM64 = 0xAA64
+
+ try:
+ iswow64process2 = kernel32.IsWow64Process2
+ except Exception:
+ # If we can't access the symbol, we know we're not on aarch64.
+ return False
+
+ currentProcess = kernel32.GetCurrentProcess()
+ processMachine = wintypes.USHORT(IMAGE_FILE_MACHINE_UNKNOWN)
+ nativeMachine = wintypes.USHORT(IMAGE_FILE_MACHINE_UNKNOWN)
+
+ gotValue = iswow64process2(
+ currentProcess, ctypes.byref(processMachine), ctypes.byref(nativeMachine)
+ )
+ # If this call fails, we have no idea.
+ if not gotValue:
+ return False
+
+ return nativeMachine.value == IMAGE_FILE_MACHINE_ARM64
+
+
+def get_is_windefender_disabled():
+ import winreg
+
+ try:
+ with winreg.OpenKeyEx(
+ winreg.HKEY_LOCAL_MACHINE, r"SOFTWARE\Microsoft\Windows Defender"
+ ) as windefender_key:
+ is_antivirus_disabled, _ = winreg.QueryValueEx(
+ windefender_key, "DisableAntiSpyware"
+ )
+ # is_antivirus_disabled is either 0 (False) or 1 (True)
+ return bool(is_antivirus_disabled)
+ except FileNotFoundError:
+ return True
+
+
+def get_windefender_exclusion_paths():
+ import winreg
+
+ paths = []
+ try:
+ with winreg.OpenKeyEx(
+ winreg.HKEY_LOCAL_MACHINE,
+ r"SOFTWARE\Microsoft\Windows Defender\Exclusions\Paths",
+ ) as exclusions_key:
+ _, values_count, __ = winreg.QueryInfoKey(exclusions_key)
+ for i in range(0, values_count):
+ path, _, __ = winreg.EnumValue(exclusions_key, i)
+ paths.append(Path(path))
+ except FileNotFoundError:
+ pass
+
+ return paths
+
+
+def is_windefender_affecting_srcdir(src_dir: Path):
+ if get_is_windefender_disabled():
+ return False
+
+ # When there's a match, but path cases aren't the same between srcdir and exclusion_path,
+ # commonpath will use the casing of the first path provided.
+ # To avoid surprises here, we normcase(...) so we don't get unexpected breakage if we change
+ # the path order.
+ src_dir = src_dir.resolve()
+
+ try:
+ exclusion_paths = get_windefender_exclusion_paths()
+ except OSError as e:
+ if e.winerror == 5:
+ # A version of Windows 10 released in 2021 raises an "Access is denied"
+ # error (ERROR_ACCESS_DENIED == 5) to un-elevated processes when they
+ # query Windows Defender's exclusions. Skip the exclusion path checking.
+ return
+ raise
+
+ for exclusion_path in exclusion_paths:
+ exclusion_path = exclusion_path.resolve()
+ try:
+ if Path(os.path.commonpath((exclusion_path, src_dir))) == exclusion_path:
+ # exclusion_path is an ancestor of srcdir
+ return False
+ except ValueError:
+ # ValueError: Paths don't have the same drive - can't be ours
+ pass
+ return True
+
+
+class MozillaBuildBootstrapper(BaseBootstrapper):
+ """Bootstrapper for MozillaBuild to install rustup."""
+
+ def __init__(self, no_interactive=False, no_system_changes=False):
+ BaseBootstrapper.__init__(
+ self, no_interactive=no_interactive, no_system_changes=no_system_changes
+ )
+
+ def validate_environment(self):
+ if self.application.startswith("mobile_android"):
+ print(
+ "WARNING!!! Building Firefox for Android on Windows is not "
+ "fully supported. See https://bugzilla.mozilla.org/show_bug."
+ "cgi?id=1169873 for details.",
+ file=sys.stderr,
+ )
+
+ if is_windefender_affecting_srcdir(self.srcdir):
+ print(
+ "Warning: the Firefox checkout directory is currently not in the "
+ "Windows Defender exclusion list. This can cause the build process "
+ "to be dramatically slowed or broken. To resolve this, follow the "
+ "directions here: "
+ "https://firefox-source-docs.mozilla.org/setup/windows_build.html"
+ "#antivirus-performance",
+ file=sys.stderr,
+ )
+
+ def install_system_packages(self):
+ pass
+
+ def upgrade_mercurial(self, current):
+ # Mercurial upstream sometimes doesn't upload wheels, and building
+ # from source requires MS Visual C++ 9.0. So we force pip to install
+ # the last version that comes with wheels.
+ if mozilla_build_version() >= Version("4.0"):
+ pip_dir = (
+ Path(os.environ["MOZILLABUILD"]) / "python3" / "Scripts" / "pip.exe"
+ )
+ else:
+ pip_dir = (
+ Path(os.environ["MOZILLABUILD"]) / "python" / "Scripts" / "pip.exe"
+ )
+
+ command = [
+ str(pip_dir),
+ "install",
+ "--upgrade",
+ "mercurial",
+ "--only-binary",
+ "mercurial",
+ ]
+ self.run(command)
+
+ def install_browser_packages(self, mozconfig_builder):
+ pass
+
+ def install_browser_artifact_mode_packages(self, mozconfig_builder):
+ pass
+
+ def _os_arch(self):
+ os_arch = platform.machine()
+ if os_arch == "AMD64":
+ # On Windows, x86_64 is reported as AMD64 but we use x86_64
+ # everywhere else, so let's normalized it here.
+ return "x86_64"
+ return os_arch
+
+ def install_mobile_android_packages(self, mozconfig_builder, artifact_mode=False):
+ from mozboot import android
+
+ os_arch = self._os_arch()
+ android.ensure_android(
+ "windows",
+ os_arch,
+ artifact_mode=artifact_mode,
+ no_interactive=self.no_interactive,
+ )
+ android.ensure_android(
+ "windows",
+ os_arch,
+ system_images_only=True,
+ artifact_mode=artifact_mode,
+ no_interactive=self.no_interactive,
+ avd_manifest_path=android.AVD_MANIFEST_X86_64,
+ )
+ android.ensure_android(
+ "windows",
+ os_arch,
+ system_images_only=True,
+ artifact_mode=artifact_mode,
+ no_interactive=self.no_interactive,
+ avd_manifest_path=android.AVD_MANIFEST_ARM,
+ )
+
+ def ensure_mobile_android_packages(self):
+ from mozboot import android
+
+ android.ensure_java("windows", self._os_arch())
+ self.install_toolchain_artifact(android.WINDOWS_X86_64_ANDROID_AVD)
+ self.install_toolchain_artifact(android.WINDOWS_ARM_ANDROID_AVD)
+
+ def install_mobile_android_artifact_mode_packages(self, mozconfig_builder):
+ self.install_mobile_android_packages(mozconfig_builder, artifact_mode=True)
+
+ def generate_mobile_android_mozconfig(self, artifact_mode=False):
+ from mozboot import android
+
+ return android.generate_mozconfig("windows", artifact_mode=artifact_mode)
+
+ def generate_mobile_android_artifact_mode_mozconfig(self):
+ return self.generate_mobile_android_mozconfig(artifact_mode=True)
+
+ def ensure_sccache_packages(self):
+ from mozboot import sccache
+
+ self.install_toolchain_artifact(sccache.RUSTC_DIST_TOOLCHAIN, no_unpack=True)
+ self.install_toolchain_artifact(sccache.CLANG_DIST_TOOLCHAIN, no_unpack=True)
+
+ def _update_package_manager(self):
+ pass
+
+ def run(self, command):
+ subprocess.check_call(command, stdin=sys.stdin)
diff --git a/python/mozboot/mozboot/openbsd.py b/python/mozboot/mozboot/openbsd.py
new file mode 100644
index 0000000000..a862525ece
--- /dev/null
+++ b/python/mozboot/mozboot/openbsd.py
@@ -0,0 +1,34 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from mozboot.base import BaseBootstrapper
+
+
+class OpenBSDBootstrapper(BaseBootstrapper):
+ def __init__(self, version, **kwargs):
+ BaseBootstrapper.__init__(self, **kwargs)
+
+ self.packages = ["gmake", "gtar", "rust", "unzip"]
+
+ self.browser_packages = [
+ "llvm",
+ "cbindgen",
+ "nasm",
+ "node",
+ "gtk+3",
+ "dbus-glib",
+ "pulseaudio",
+ ]
+
+ def install_system_packages(self):
+ # we use -z because there's no other way to say "any autoconf-2.13"
+ self.run_as_root(["pkg_add", "-z"] + self.packages)
+
+ def install_browser_packages(self, mozconfig_builder, artifact_mode=False):
+ # TODO: Figure out what not to install for artifact mode
+ # we use -z because there's no other way to say "any autoconf-2.13"
+ self.run_as_root(["pkg_add", "-z"] + self.browser_packages)
+
+ def install_browser_artifact_mode_packages(self, mozconfig_builder):
+ self.install_browser_packages(mozconfig_builder, artifact_mode=True)
diff --git a/python/mozboot/mozboot/opensuse.py b/python/mozboot/mozboot/opensuse.py
new file mode 100644
index 0000000000..051ee97f4b
--- /dev/null
+++ b/python/mozboot/mozboot/opensuse.py
@@ -0,0 +1,63 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from mozboot.base import MERCURIAL_INSTALL_PROMPT, BaseBootstrapper
+from mozboot.linux_common import LinuxBootstrapper
+
+
+class OpenSUSEBootstrapper(LinuxBootstrapper, BaseBootstrapper):
+ """openSUSE experimental bootstrapper."""
+
+ def __init__(self, version, dist_id, **kwargs):
+ print("Using an experimental bootstrapper for openSUSE.")
+ BaseBootstrapper.__init__(self, **kwargs)
+
+ def install_packages(self, packages):
+ ALTERNATIVE_NAMES = {
+ "libxml2": "libxml2-2",
+ }
+ # watchman is not available
+ packages = [ALTERNATIVE_NAMES.get(p, p) for p in packages if p != "watchman"]
+ self.zypper_install(*packages)
+
+ def _update_package_manager(self):
+ self.zypper_update()
+
+ def upgrade_mercurial(self, current):
+ """Install Mercurial from pip because system packages could lag."""
+ if self.no_interactive:
+ # Install via zypper in non-interactive mode because it is the more
+ # conservative option and less likely to make people upset.
+ self.zypper_install("mercurial")
+ return
+
+ res = self.prompt_int(MERCURIAL_INSTALL_PROMPT, 1, 3)
+
+ # zypper.
+ if res == 2:
+ self.zypper_install("mercurial")
+ return False
+
+ # No Mercurial.
+ if res == 3:
+ print("Not installing Mercurial.")
+ return False
+
+ # pip.
+ assert res == 1
+ self.run_as_root(["pip3", "install", "--upgrade", "Mercurial"])
+
+ def zypper(self, *args):
+ if self.no_interactive:
+ command = ["zypper", "-n", *args]
+ else:
+ command = ["zypper", *args]
+
+ self.run_as_root(command)
+
+ def zypper_install(self, *packages):
+ self.zypper("install", *packages)
+
+ def zypper_update(self, *packages):
+ self.zypper("update", *packages)
diff --git a/python/mozboot/mozboot/osx.py b/python/mozboot/mozboot/osx.py
new file mode 100644
index 0000000000..8cd180f4ab
--- /dev/null
+++ b/python/mozboot/mozboot/osx.py
@@ -0,0 +1,310 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import platform
+import subprocess
+import sys
+import tempfile
+from urllib.request import urlopen
+
+import certifi
+from mach.util import to_optional_path, to_optional_str
+from mozfile import which
+from packaging.version import Version
+
+from mozboot.base import BaseBootstrapper
+
+HOMEBREW_BOOTSTRAP = (
+ "https://raw.githubusercontent.com/Homebrew/install/master/install.sh"
+)
+
+BREW_INSTALL = """
+We will install the Homebrew package manager to install required packages.
+
+You will be prompted to install Homebrew with its default settings. If you
+would prefer to do this manually, hit CTRL+c, install Homebrew yourself, ensure
+"brew" is in your $PATH, and relaunch bootstrap.
+"""
+
+BREW_PACKAGES = """
+We are now installing all required packages via Homebrew. You will see a lot of
+output as packages are built.
+"""
+
+NO_BREW_INSTALLED = "It seems you don't have Homebrew installed."
+
+
+class OSXAndroidBootstrapper(object):
+ def install_mobile_android_packages(self, mozconfig_builder, artifact_mode=False):
+ os_arch = platform.machine()
+ if os_arch != "x86_64" and os_arch != "arm64":
+ raise Exception(
+ "You need a 64-bit version of Mac OS X to build "
+ "GeckoView/Firefox for Android."
+ )
+
+ from mozboot import android
+
+ android.ensure_android(
+ "macosx",
+ os_arch,
+ artifact_mode=artifact_mode,
+ no_interactive=self.no_interactive,
+ )
+
+ if os_arch == "x86_64" or os_arch == "x86":
+ android.ensure_android(
+ "macosx",
+ os_arch,
+ system_images_only=True,
+ artifact_mode=artifact_mode,
+ no_interactive=self.no_interactive,
+ avd_manifest_path=android.AVD_MANIFEST_X86_64,
+ )
+ android.ensure_android(
+ "macosx",
+ os_arch,
+ system_images_only=True,
+ artifact_mode=artifact_mode,
+ no_interactive=self.no_interactive,
+ avd_manifest_path=android.AVD_MANIFEST_ARM,
+ )
+ else:
+ android.ensure_android(
+ "macosx",
+ os_arch,
+ system_images_only=True,
+ artifact_mode=artifact_mode,
+ no_interactive=self.no_interactive,
+ avd_manifest_path=android.AVD_MANIFEST_ARM64,
+ )
+
+ def ensure_mobile_android_packages(self):
+ from mozboot import android
+
+ arch = platform.machine()
+ android.ensure_java("macosx", arch)
+
+ if arch == "x86_64" or arch == "x86":
+ self.install_toolchain_artifact(android.MACOS_X86_64_ANDROID_AVD)
+ self.install_toolchain_artifact(android.MACOS_ARM_ANDROID_AVD)
+ elif arch == "arm64":
+ # The only emulator supported on Apple Silicon is the Arm64 one.
+ self.install_toolchain_artifact(android.MACOS_ARM64_ANDROID_AVD)
+
+ def install_mobile_android_artifact_mode_packages(self, mozconfig_builder):
+ self.install_mobile_android_packages(mozconfig_builder, artifact_mode=True)
+
+ def generate_mobile_android_mozconfig(self):
+ return self._generate_mobile_android_mozconfig()
+
+ def generate_mobile_android_artifact_mode_mozconfig(self):
+ return self._generate_mobile_android_mozconfig(artifact_mode=True)
+
+ def _generate_mobile_android_mozconfig(self, artifact_mode=False):
+ from mozboot import android
+
+ return android.generate_mozconfig("macosx", artifact_mode=artifact_mode)
+
+
+def ensure_command_line_tools():
+ # We need either the command line tools or Xcode (one is sufficient).
+ # Python 3, required to run this code, is not installed by default on macos
+ # as of writing (macos <= 11.x).
+ # There are at least 5 different ways to obtain it:
+ # - macports
+ # - homebrew
+ # - command line tools
+ # - Xcode
+ # - python.org
+ # The first two require to install the command line tools.
+ # So only in the last case we may not have command line tools or xcode
+ # available.
+ # When the command line tools are installed, `xcode-select --print-path`
+ # prints their path.
+ # When Xcode is installed, `xcode-select --print-path` prints its path.
+ # When neither is installed, `xcode-select --print-path` prints an error
+ # to stderr and nothing to stdout.
+ # So in the rare case where we detect neither the command line tools or
+ # Xcode is installed, we trigger an intall of the command line tools
+ # (via `xcode-select --install`).
+ proc = subprocess.run(
+ ["xcode-select", "--print-path"],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.DEVNULL,
+ )
+ if not proc.stdout:
+ subprocess.run(["xcode-select", "--install"], check=True)
+ # xcode-select --install triggers a separate process to be started by
+ # launchd, and tracking its successful outcome would require something
+ # like figuring its pid and using kqueue to get a notification when it
+ # finishes. Considering how unlikely it is that someone would end up
+ # here in the first place, we just bail out.
+ print("Please follow the command line tools installer instructions")
+ print("and rerun `./mach bootstrap` when it's finished.")
+ sys.exit(1)
+
+
+class OSXBootstrapperLight(OSXAndroidBootstrapper, BaseBootstrapper):
+ def __init__(self, version, **kwargs):
+ BaseBootstrapper.__init__(self, **kwargs)
+
+ def install_system_packages(self):
+ ensure_command_line_tools()
+
+ # All the installs below are assumed to be handled by mach configure/build by
+ # default, which is true for arm64.
+ def install_browser_packages(self, mozconfig_builder):
+ pass
+
+ def install_browser_artifact_mode_packages(self, mozconfig_builder):
+ pass
+
+
+class OSXBootstrapper(OSXAndroidBootstrapper, BaseBootstrapper):
+ def __init__(self, version, **kwargs):
+ BaseBootstrapper.__init__(self, **kwargs)
+
+ self.os_version = Version(version)
+
+ if self.os_version < Version("10.6"):
+ raise Exception("OS X 10.6 or above is required.")
+
+ self.minor_version = version.split(".")[1]
+
+ def install_system_packages(self):
+ ensure_command_line_tools()
+
+ self.ensure_homebrew_installed()
+ _, hg_modern, _ = self.is_mercurial_modern()
+ if not hg_modern:
+ print(
+ "Mercurial wasn't found or is not sufficiently modern. "
+ "It will be installed with brew"
+ )
+
+ packages = ["git", "gnu-tar", "terminal-notifier", "watchman"]
+ if not hg_modern:
+ packages.append("mercurial")
+ self._ensure_homebrew_packages(packages)
+
+ def install_browser_packages(self, mozconfig_builder):
+ pass
+
+ def install_browser_artifact_mode_packages(self, mozconfig_builder):
+ pass
+
+ def _ensure_homebrew_found(self):
+ self.brew = to_optional_path(which("brew"))
+
+ return self.brew is not None
+
+ def _ensure_homebrew_packages(self, packages, is_for_cask=False):
+ package_type_flag = "--cask" if is_for_cask else "--formula"
+ self.ensure_homebrew_installed()
+
+ def create_homebrew_cmd(*parameters):
+ base_cmd = [to_optional_str(self.brew)]
+ base_cmd.extend(parameters)
+ return base_cmd + [package_type_flag]
+
+ installed = set(
+ subprocess.check_output(
+ create_homebrew_cmd("list"), universal_newlines=True
+ ).split()
+ )
+ outdated = set(
+ subprocess.check_output(
+ create_homebrew_cmd("outdated", "--quiet"), universal_newlines=True
+ ).split()
+ )
+
+ to_install = set(package for package in packages if package not in installed)
+ to_upgrade = set(package for package in packages if package in outdated)
+
+ if to_install or to_upgrade:
+ print(BREW_PACKAGES)
+ if to_install:
+ subprocess.check_call(create_homebrew_cmd("install") + list(to_install))
+ if to_upgrade:
+ subprocess.check_call(create_homebrew_cmd("upgrade") + list(to_upgrade))
+
+ def _ensure_homebrew_casks(self, casks):
+ self._ensure_homebrew_found()
+
+ known_taps = subprocess.check_output([to_optional_str(self.brew), "tap"])
+
+ # Ensure that we can access old versions of packages.
+ if b"homebrew/cask-versions" not in known_taps:
+ subprocess.check_output(
+ [to_optional_str(self.brew), "tap", "homebrew/cask-versions"]
+ )
+
+ # "caskroom/versions" has been renamed to "homebrew/cask-versions", so
+ # it is safe to remove the old tap. Removing the old tap is necessary
+ # to avoid the error "Cask [name of cask] exists in multiple taps".
+ # See https://bugzilla.mozilla.org/show_bug.cgi?id=1544981
+ if b"caskroom/versions" in known_taps:
+ subprocess.check_output(
+ [to_optional_str(self.brew), "untap", "caskroom/versions"]
+ )
+
+ self._ensure_homebrew_packages(casks, is_for_cask=True)
+
+ def ensure_homebrew_browser_packages(self):
+ # TODO: Figure out what not to install for artifact mode
+ packages = ["yasm"]
+ self._ensure_homebrew_packages(packages)
+
+ def ensure_homebrew_installed(self):
+ """
+ Search for Homebrew in sys.path, if not found, prompt the user to install it.
+ Then assert our PATH ordering is correct.
+ """
+ homebrew_found = self._ensure_homebrew_found()
+ if not homebrew_found:
+ self.install_homebrew()
+
+ def ensure_sccache_packages(self):
+ from mozboot import sccache
+
+ self.install_toolchain_artifact(sccache.RUSTC_DIST_TOOLCHAIN, no_unpack=True)
+ self.install_toolchain_artifact(sccache.CLANG_DIST_TOOLCHAIN, no_unpack=True)
+
+ def install_homebrew(self):
+ print(BREW_INSTALL)
+ bootstrap = urlopen(
+ url=HOMEBREW_BOOTSTRAP, cafile=certifi.where(), timeout=20
+ ).read()
+ with tempfile.NamedTemporaryFile() as tf:
+ tf.write(bootstrap)
+ tf.flush()
+
+ subprocess.check_call(["bash", tf.name])
+
+ homebrew_found = self._ensure_homebrew_found()
+ if not homebrew_found:
+ print(
+ "Homebrew was just installed but can't be found on PATH. "
+ "Please file a bug."
+ )
+ sys.exit(1)
+
+ def _update_package_manager(self):
+ subprocess.check_call([to_optional_str(self.brew), "-v", "update"])
+
+ def _upgrade_package(self, package):
+ self._ensure_homebrew_installed()
+
+ try:
+ subprocess.check_output(
+ [to_optional_str(self.brew), "-v", "upgrade", package],
+ stderr=subprocess.STDOUT,
+ )
+ except subprocess.CalledProcessError as e:
+ if b"already installed" not in e.output:
+ raise
+
+ def upgrade_mercurial(self, current):
+ self._upgrade_package("mercurial")
diff --git a/python/mozboot/mozboot/rust.py b/python/mozboot/mozboot/rust.py
new file mode 100644
index 0000000000..90607fccff
--- /dev/null
+++ b/python/mozboot/mozboot/rust.py
@@ -0,0 +1,185 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this,
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import platform as platform_mod
+import sys
+
+# Base url for pulling the rustup installer.
+RUSTUP_URL_BASE = "https://static.rust-lang.org/rustup"
+
+# Pull this to get the lastest stable version number.
+RUSTUP_MANIFEST = RUSTUP_URL_BASE + "/release-stable.toml"
+
+# We bake in a known version number so we can verify a checksum.
+RUSTUP_VERSION = "1.23.1"
+
+# SHA-256 checksums of the installers, per platform.
+RUSTUP_HASHES = {
+ "x86_64-unknown-freebsd": "3fb56018ec6009c5a3e345f07d7ea2fbc67d4c6768e528c6d990c7ebe2388d09",
+ "aarch64-apple-darwin": "6d56735284181b2eb804ed7f57f76cf5ff924251e8ab69d9b5822c3be1ca1dc7",
+ "x86_64-apple-darwin": "39101feb178a7e3e4443b09b36338e794a9e00385e5f44a2f7789aefb91354a9",
+ "x86_64-unknown-linux-gnu": "ed7773edaf1d289656bdec2aacad12413b38ad0193fff54b2231f5140a4b07c5",
+ "x86_64-pc-windows-msvc": "a586cf9de3e4aa791fd5796b6a5f99ca05591ccef8bb94e53af5b69f0261fb03",
+ "x86_64-unknown-netbsd": "8b29918e765f2cec3b81a911652b164471c42f8f31241f7401bb89582d6a3ed5",
+}
+
+NO_PLATFORM = """
+Sorry, we have no installer configured for your platform.
+
+Please try installing rust for your system from https://rustup.rs/
+or from https://rust-lang.org/ or from your package manager.
+"""
+
+
+def rustup_url(host, version=RUSTUP_VERSION):
+ """Download url for a particular version of the installer."""
+ return "%(base)s/archive/%(version)s/%(host)s/rustup-init%(ext)s" % {
+ "base": RUSTUP_URL_BASE,
+ "version": version,
+ "host": host,
+ "ext": exe_suffix(host),
+ }
+
+
+def rustup_hash(host):
+ """Look up the checksum for the given installer."""
+ return RUSTUP_HASHES.get(host, None)
+
+
+def platform():
+ """Determine the appropriate rust platform string for the current host"""
+ if sys.platform.startswith("darwin"):
+ if platform_mod.machine() == "arm64":
+ return "aarch64-apple-darwin"
+ return "x86_64-apple-darwin"
+ elif sys.platform.startswith(("win32", "msys")):
+ # Bravely assume we'll be building 64-bit Firefox.
+ return "x86_64-pc-windows-msvc"
+ elif sys.platform.startswith("linux"):
+ return "x86_64-unknown-linux-gnu"
+ elif sys.platform.startswith("freebsd"):
+ return "x86_64-unknown-freebsd"
+ elif sys.platform.startswith("netbsd"):
+ return "x86_64-unknown-netbsd"
+
+ return None
+
+
+def exe_suffix(host=None):
+ if not host:
+ host = platform()
+ if "windows" in host:
+ return ".exe"
+ return ""
+
+
+USAGE = """
+python rust.py [--update]
+
+Pass the --update option print info for the latest release of rustup-init.
+
+When invoked without the --update option, it queries the latest version
+and verifies the current stored checksums against the distribution server,
+but doesn't update the version installed by `mach bootstrap`.
+"""
+
+
+def unquote(s):
+ """Strip outer quotation marks from a string."""
+ return s.strip("'").strip('"')
+
+
+def rustup_latest_version():
+ """Query the latest version of the rustup installer."""
+ import requests
+
+ r = requests.get(RUSTUP_MANIFEST)
+ # The manifest is toml, but we might not have the toml4 python module
+ # available, so use ad-hoc parsing to obtain the current release version.
+ #
+ # The manifest looks like:
+ #
+ # schema-version = '1'
+ # version = '0.6.5'
+ #
+ for line in r.iter_lines():
+ line = line.decode("utf-8")
+ key, value = map(str.strip, line.split("=", 2))
+ if key == "schema-version":
+ schema = int(unquote(value))
+ if schema != 1:
+ print("ERROR: Unknown manifest schema %s" % value)
+ sys.exit(1)
+ elif key == "version":
+ return unquote(value)
+ return None
+
+
+def http_download_and_hash(url):
+ import hashlib
+
+ import requests
+
+ h = hashlib.sha256()
+ r = requests.get(url, stream=True)
+ for data in r.iter_content(4096):
+ h.update(data)
+ return h.hexdigest()
+
+
+def make_checksums(version, validate=False):
+ hashes = []
+ for platform in RUSTUP_HASHES.keys():
+ if validate:
+ print("Checking %s... " % platform, end="", flush=True)
+ else:
+ print("Fetching %s... " % platform, end="", flush=True)
+ checksum = http_download_and_hash(rustup_url(platform, version))
+ if validate and checksum != rustup_hash(platform):
+ print(
+ "mismatch:\n script: %s\n server: %s"
+ % (RUSTUP_HASHES[platform], checksum)
+ )
+ else:
+ print("OK")
+ hashes.append((platform, checksum))
+ return hashes
+
+
+if __name__ == "__main__":
+ """Allow invoking the module as a utility to update checksums."""
+
+ update = False
+ if len(sys.argv) > 1:
+ if sys.argv[1] == "--update":
+ update = True
+ else:
+ print(USAGE)
+ sys.exit(1)
+
+ print("Checking latest installer version... ", end="", flush=True)
+ version = rustup_latest_version()
+ if not version:
+ print("ERROR: Could not query current rustup installer version.")
+ sys.exit(1)
+ print(version)
+
+ if version == RUSTUP_VERSION:
+ print("We're up to date. Validating checksums.")
+ make_checksums(version, validate=True)
+ exit()
+
+ if not update:
+ print("Out of date. We use %s. Validating checksums." % RUSTUP_VERSION)
+ make_checksums(RUSTUP_VERSION, validate=True)
+ exit()
+
+ print("Out of date. We use %s. Calculating checksums." % RUSTUP_VERSION)
+ hashes = make_checksums(version)
+ print("")
+ print("RUSTUP_VERSION = '%s'" % version)
+ print("RUSTUP_HASHES = {")
+ for item in hashes:
+ print(" '%s':\n '%s'," % item)
+ print("}")
diff --git a/python/mozboot/mozboot/sccache.py b/python/mozboot/mozboot/sccache.py
new file mode 100644
index 0000000000..b3cc9fbae9
--- /dev/null
+++ b/python/mozboot/mozboot/sccache.py
@@ -0,0 +1,9 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# sccache-dist currently expects clients to provide toolchains when
+# distributing from macOS or Windows, so we download linux binaries capable
+# of cross-compiling for these cases.
+RUSTC_DIST_TOOLCHAIN = "rustc-dist-toolchain"
+CLANG_DIST_TOOLCHAIN = "clang-dist-toolchain"
diff --git a/python/mozboot/mozboot/solus.py b/python/mozboot/mozboot/solus.py
new file mode 100644
index 0000000000..664b5285aa
--- /dev/null
+++ b/python/mozboot/mozboot/solus.py
@@ -0,0 +1,32 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from mozboot.base import BaseBootstrapper
+from mozboot.linux_common import LinuxBootstrapper
+
+
+class SolusBootstrapper(LinuxBootstrapper, BaseBootstrapper):
+ """Solus experimental bootstrapper."""
+
+ def __init__(self, version, dist_id, **kwargs):
+ print("Using an experimental bootstrapper for Solus.")
+ BaseBootstrapper.__init__(self, **kwargs)
+
+ def install_packages(self, packages):
+ self.package_install(*packages)
+
+ def _update_package_manager(self):
+ pass
+
+ def upgrade_mercurial(self, current):
+ self.package_install("mercurial")
+
+ def package_install(self, *packages):
+ command = ["eopkg", "install"]
+ if self.no_interactive:
+ command.append("--yes-all")
+
+ command.extend(packages)
+
+ self.run_as_root(command)
diff --git a/python/mozboot/mozboot/test/python.ini b/python/mozboot/mozboot/test/python.ini
new file mode 100644
index 0000000000..4947f160f5
--- /dev/null
+++ b/python/mozboot/mozboot/test/python.ini
@@ -0,0 +1,4 @@
+[DEFAULT]
+subsuite = mozbuild
+
+[test_mozconfig.py]
diff --git a/python/mozboot/mozboot/test/test_mozconfig.py b/python/mozboot/mozboot/test/test_mozconfig.py
new file mode 100644
index 0000000000..b7375e8529
--- /dev/null
+++ b/python/mozboot/mozboot/test/test_mozconfig.py
@@ -0,0 +1,229 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import sys
+import unittest
+from pathlib import Path
+from shutil import rmtree
+from tempfile import gettempdir, mkdtemp
+
+import pytest
+from mozunit import main
+
+from mozboot.mozconfig import (
+ DEFAULT_TOPSRCDIR_PATHS,
+ DEPRECATED_HOME_PATHS,
+ DEPRECATED_TOPSRCDIR_PATHS,
+ MozconfigFindException,
+ find_mozconfig,
+)
+
+
+class TestFindMozconfig(unittest.TestCase):
+ def setUp(self):
+ self._old_env = dict(os.environ)
+ os.environ.pop("MOZCONFIG", None)
+ os.environ.pop("MOZ_OBJDIR", None)
+ os.environ.pop("CC", None)
+ os.environ.pop("CXX", None)
+ self._temp_dirs = set()
+
+ def tearDown(self):
+ os.environ.clear()
+ os.environ.update(self._old_env)
+
+ for temp_dir in self._temp_dirs:
+ rmtree(str(temp_dir))
+
+ def get_temp_dir(self):
+ new_temp_dir = Path(mkdtemp())
+ self._temp_dirs.add(new_temp_dir)
+
+ return new_temp_dir
+
+ def test_find_legacy_env(self):
+ """Ensure legacy mozconfig path definitions result in error."""
+
+ os.environ["MOZ_MYCONFIG"] = "/foo"
+
+ with self.assertRaises(MozconfigFindException) as e:
+ find_mozconfig(self.get_temp_dir())
+
+ self.assertTrue(str(e.exception).startswith("The MOZ_MYCONFIG"))
+
+ def test_find_multiple_configs(self):
+ """Ensure multiple relative-path MOZCONFIGs result in error."""
+ relative_mozconfig = ".mconfig"
+ os.environ["MOZCONFIG"] = relative_mozconfig
+
+ src_dir = self.get_temp_dir()
+ cur_dir = self.get_temp_dir()
+ dirs = [src_dir, cur_dir]
+ for iter_dir in dirs:
+ path = iter_dir / relative_mozconfig
+ with open(path, "w") as file:
+ file.write(str(path))
+
+ orig_dir = Path.cwd()
+ try:
+ os.chdir(cur_dir)
+ with self.assertRaises(MozconfigFindException) as e:
+ find_mozconfig(src_dir)
+ finally:
+ os.chdir(orig_dir)
+
+ self.assertIn("exists in more than one of", str(e.exception))
+ for iter_dir in dirs:
+ self.assertIn(str(iter_dir.resolve()), str(e.exception))
+
+ def test_find_multiple_but_identical_configs(self):
+ """Ensure multiple relative-path MOZCONFIGs pointing at the same file are OK."""
+ relative_mozconfig = "../src/.mconfig"
+ os.environ["MOZCONFIG"] = relative_mozconfig
+
+ top_dir = self.get_temp_dir()
+ src_dir = top_dir / "src"
+ src_dir.mkdir()
+ cur_dir = top_dir / "obj"
+ cur_dir.mkdir()
+
+ path = src_dir / relative_mozconfig
+ with open(path, "w"):
+ pass
+
+ orig_dir = Path.cwd()
+ try:
+ os.chdir(cur_dir)
+ self.assertEqual(Path(find_mozconfig(src_dir)).resolve(), path.resolve())
+ finally:
+ os.chdir(orig_dir)
+
+ def test_find_no_relative_configs(self):
+ """Ensure a missing relative-path MOZCONFIG is detected."""
+ relative_mozconfig = ".mconfig"
+ os.environ["MOZCONFIG"] = relative_mozconfig
+
+ src_dir = self.get_temp_dir()
+ cur_dir = self.get_temp_dir()
+ dirs = [src_dir, cur_dir]
+
+ orig_dir = Path.cwd()
+ try:
+ os.chdir(cur_dir)
+ with self.assertRaises(MozconfigFindException) as e:
+ find_mozconfig(src_dir)
+ finally:
+ os.chdir(orig_dir)
+
+ self.assertIn("does not exist in any of", str(e.exception))
+ for iter_dir in dirs:
+ self.assertIn(str(iter_dir.resolve()), str(e.exception))
+
+ def test_find_relative_mozconfig(self):
+ """Ensure a relative MOZCONFIG can be found in the srcdir."""
+ relative_mozconfig = ".mconfig"
+ os.environ["MOZCONFIG"] = relative_mozconfig
+
+ src_dir = Path(self.get_temp_dir())
+ cur_dir = Path(self.get_temp_dir())
+
+ path = src_dir / relative_mozconfig
+ with open(path, "w"):
+ pass
+
+ orig_dir = Path.cwd()
+ try:
+ os.chdir(cur_dir)
+ self.assertEqual(
+ str(Path(find_mozconfig(src_dir)).resolve()), str(path.resolve())
+ )
+ finally:
+ os.chdir(orig_dir)
+
+ @pytest.mark.skipif(
+ sys.platform.startswith("win"),
+ reason="This test uses unix-style absolute paths, since we now use Pathlib, and "
+ "`is_absolute()` always returns `False` on Windows if there isn't a drive"
+ " letter, this test is invalid for Windows.",
+ )
+ def test_find_abs_path_not_exist(self):
+ """Ensure a missing absolute path is detected."""
+ os.environ["MOZCONFIG"] = "/foo/bar/does/not/exist"
+
+ with self.assertRaises(MozconfigFindException) as e:
+ find_mozconfig(self.get_temp_dir())
+
+ self.assertIn("path that does not exist", str(e.exception))
+ self.assertIn("/foo/bar/does/not/exist", str(e.exception))
+
+ def test_find_path_not_file(self):
+ """Ensure non-file paths are detected."""
+
+ os.environ["MOZCONFIG"] = gettempdir()
+
+ with self.assertRaises(MozconfigFindException) as e:
+ find_mozconfig(self.get_temp_dir())
+
+ self.assertIn("refers to a non-file", str(e.exception))
+ self.assertTrue(str(e.exception).endswith(gettempdir()))
+
+ def test_find_default_files(self):
+ """Ensure default paths are used when present."""
+ for default_dir in DEFAULT_TOPSRCDIR_PATHS:
+ temp_dir = self.get_temp_dir()
+ path = temp_dir / default_dir
+
+ with open(path, "w"):
+ pass
+
+ self.assertEqual(Path(find_mozconfig(temp_dir)), path)
+
+ def test_find_multiple_defaults(self):
+ """Ensure we error when multiple default files are present."""
+ self.assertGreater(len(DEFAULT_TOPSRCDIR_PATHS), 1)
+
+ temp_dir = self.get_temp_dir()
+ for default_dir in DEFAULT_TOPSRCDIR_PATHS:
+ with open(temp_dir / default_dir, "w"):
+ pass
+
+ with self.assertRaises(MozconfigFindException) as e:
+ find_mozconfig(temp_dir)
+
+ self.assertIn("Multiple default mozconfig files present", str(e.exception))
+
+ def test_find_deprecated_path_srcdir(self):
+ """Ensure we error when deprecated path locations are present."""
+ for deprecated_dir in DEPRECATED_TOPSRCDIR_PATHS:
+ temp_dir = self.get_temp_dir()
+ with open(temp_dir / deprecated_dir, "w"):
+ pass
+
+ with self.assertRaises(MozconfigFindException) as e:
+ find_mozconfig(temp_dir)
+
+ self.assertIn("This implicit location is no longer", str(e.exception))
+ self.assertIn(str(temp_dir), str(e.exception))
+
+ def test_find_deprecated_home_paths(self):
+ """Ensure we error when deprecated home directory paths are present."""
+
+ for deprecated_path in DEPRECATED_HOME_PATHS:
+ home = self.get_temp_dir()
+ os.environ["HOME"] = str(home)
+ path = home / deprecated_path
+
+ with open(path, "w"):
+ pass
+
+ with self.assertRaises(MozconfigFindException) as e:
+ find_mozconfig(self.get_temp_dir())
+
+ self.assertIn("This implicit location is no longer", str(e.exception))
+ self.assertIn(str(path), str(e.exception))
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozboot/mozboot/util.py b/python/mozboot/mozboot/util.py
new file mode 100644
index 0000000000..583c08bf76
--- /dev/null
+++ b/python/mozboot/mozboot/util.py
@@ -0,0 +1,49 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import hashlib
+import os
+from pathlib import Path
+from urllib.request import urlopen
+
+import certifi
+from mach.site import PythonVirtualenv
+from mach.util import get_state_dir
+
+MINIMUM_RUST_VERSION = "1.66.0"
+
+
+def get_tools_dir(srcdir=False):
+ if os.environ.get("MOZ_AUTOMATION") and "MOZ_FETCHES_DIR" in os.environ:
+ return os.environ["MOZ_FETCHES_DIR"]
+ return get_state_dir(srcdir)
+
+
+def get_mach_virtualenv_root():
+ return Path(get_state_dir(specific_to_topsrcdir=True)) / "_virtualenvs" / "mach"
+
+
+def get_mach_virtualenv_binary():
+ root = get_mach_virtualenv_root()
+ return Path(PythonVirtualenv(str(root)).python_path)
+
+
+def http_download_and_save(url, dest: Path, hexhash, digest="sha256"):
+ """Download the given url and save it to dest. hexhash is a checksum
+ that will be used to validate the downloaded file using the given
+ digest algorithm. The value of digest can be any value accepted by
+ hashlib.new. The default digest used is 'sha256'."""
+ f = urlopen(url, cafile=certifi.where())
+ h = hashlib.new(digest)
+ with open(dest, "wb") as out:
+ while True:
+ data = f.read(4096)
+ if data:
+ out.write(data)
+ h.update(data)
+ else:
+ break
+ if h.hexdigest() != hexhash:
+ dest.unlink()
+ raise ValueError("Hash of downloaded file does not match expected hash")
diff --git a/python/mozboot/mozboot/void.py b/python/mozboot/mozboot/void.py
new file mode 100644
index 0000000000..8ae0198ace
--- /dev/null
+++ b/python/mozboot/mozboot/void.py
@@ -0,0 +1,41 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from mozboot.base import BaseBootstrapper
+from mozboot.linux_common import LinuxBootstrapper
+
+
+class VoidBootstrapper(LinuxBootstrapper, BaseBootstrapper):
+ def __init__(self, version, dist_id, **kwargs):
+ BaseBootstrapper.__init__(self, **kwargs)
+
+ self.distro = "void"
+ self.version = version
+ self.dist_id = dist_id
+
+ def run_as_root(self, command):
+ # VoidLinux doesn't support users sudo'ing most commands by default because of the group
+ # configuration.
+ super().run_as_root(command, may_use_sudo=False)
+
+ def xbps_install(self, *packages):
+ command = ["xbps-install"]
+ if self.no_interactive:
+ command.append("-y")
+ command.extend(packages)
+
+ self.run_as_root(command)
+
+ def xbps_update(self):
+ command = ["xbps-install", "-Su"]
+ if self.no_interactive:
+ command.append("-y")
+
+ self.run_as_root(command)
+
+ def install_packages(self, packages):
+ self.xbps_install(*packages)
+
+ def _update_package_manager(self):
+ self.xbps_update()
diff --git a/python/mozboot/mozboot/windows.py b/python/mozboot/mozboot/windows.py
new file mode 100644
index 0000000000..8f70a70b2f
--- /dev/null
+++ b/python/mozboot/mozboot/windows.py
@@ -0,0 +1,127 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import ctypes
+import os
+import subprocess
+import sys
+
+from mozfile import which
+
+from mozboot.base import BaseBootstrapper
+
+
+def is_aarch64_host():
+ from ctypes import wintypes
+
+ kernel32 = ctypes.windll.kernel32
+ IMAGE_FILE_MACHINE_UNKNOWN = 0
+ IMAGE_FILE_MACHINE_ARM64 = 0xAA64
+
+ try:
+ iswow64process2 = kernel32.IsWow64Process2
+ except Exception:
+ # If we can't access the symbol, we know we're not on aarch64.
+ return False
+
+ currentProcess = kernel32.GetCurrentProcess()
+ processMachine = wintypes.USHORT(IMAGE_FILE_MACHINE_UNKNOWN)
+ nativeMachine = wintypes.USHORT(IMAGE_FILE_MACHINE_UNKNOWN)
+
+ gotValue = iswow64process2(
+ currentProcess, ctypes.byref(processMachine), ctypes.byref(nativeMachine)
+ )
+ # If this call fails, we have no idea.
+ if not gotValue:
+ return False
+
+ return nativeMachine.value == IMAGE_FILE_MACHINE_ARM64
+
+
+class WindowsBootstrapper(BaseBootstrapper):
+ """Bootstrapper for msys2 based environments for building in Windows."""
+
+ SYSTEM_PACKAGES = [
+ "mingw-w64-x86_64-make",
+ "mingw-w64-x86_64-perl",
+ "patch",
+ "patchutils",
+ "diffutils",
+ "tar",
+ "unzip",
+ "mingw-w64-x86_64-toolchain", # TODO: Remove when Mercurial is installable from a wheel.
+ "mingw-w64-i686-toolchain",
+ ]
+
+ BROWSER_PACKAGES = ["mingw-w64-x86_64-nasm", "mingw-w64-i686-nsis"]
+
+ def __init__(self, **kwargs):
+ if (
+ "MOZ_WINDOWS_BOOTSTRAP" not in os.environ
+ or os.environ["MOZ_WINDOWS_BOOTSTRAP"] != "1"
+ ):
+ raise NotImplementedError(
+ "Bootstrap support for Windows is under development. For "
+ "now use MozillaBuild to set up a build environment on "
+ "Windows. If you are testing Windows Bootstrap support, "
+ "try `export MOZ_WINDOWS_BOOTSTRAP=1`"
+ )
+ BaseBootstrapper.__init__(self, **kwargs)
+ if not which("pacman"):
+ raise NotImplementedError(
+ "The Windows bootstrapper only works with msys2 with "
+ "pacman. Get msys2 at http://msys2.github.io/"
+ )
+ print("Using an experimental bootstrapper for Windows.")
+
+ def install_system_packages(self):
+ self.pacman_install(*self.SYSTEM_PACKAGES)
+
+ def upgrade_mercurial(self, current):
+ self.pip_install("mercurial")
+
+ def install_browser_packages(self, mozconfig_builder):
+ self.pacman_install(*self.BROWSER_PACKAGES)
+
+ def install_mobile_android_packages(self, mozconfig_builder):
+ raise NotImplementedError(
+ "We do not support building Android on Windows. Sorry!"
+ )
+
+ def ensure_mobile_android_packages(self):
+ raise NotImplementedError(
+ "We do not support building Android on Windows. Sorry!"
+ )
+
+ def install_mobile_android_artifact_mode_packages(self, mozconfig_builder):
+ raise NotImplementedError(
+ "We do not support building Android on Windows. Sorry!"
+ )
+
+ def _update_package_manager(self):
+ self.pacman_update()
+
+ def run(self, command):
+ subprocess.check_call(command, stdin=sys.stdin)
+
+ def pacman_update(self):
+ command = ["pacman", "--sync", "--refresh"]
+ self.run(command)
+
+ def pacman_upgrade(self):
+ command = ["pacman", "--sync", "--refresh", "--sysupgrade"]
+ self.run(command)
+
+ def pacman_install(self, *packages):
+ command = ["pacman", "--sync", "--needed"]
+ if self.no_interactive:
+ command.append("--noconfirm")
+
+ command.extend(packages)
+ self.run(command)
+
+ def pip_install(self, *packages):
+ command = ["pip", "install", "--upgrade"]
+ command.extend(packages)
+ self.run(command)
diff --git a/python/mozboot/setup.py b/python/mozboot/setup.py
new file mode 100644
index 0000000000..234650dc8a
--- /dev/null
+++ b/python/mozboot/setup.py
@@ -0,0 +1,16 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from distutils.core import setup
+
+VERSION = "0.1"
+
+setup(
+ name="mozboot",
+ description="System bootstrap for building Mozilla projects.",
+ license="MPL 2.0",
+ packages=["mozboot"],
+ version=VERSION,
+ scripts=["bin/bootstrap.py"],
+)
diff --git a/python/mozbuild/.ruff.toml b/python/mozbuild/.ruff.toml
new file mode 100644
index 0000000000..ba54f854aa
--- /dev/null
+++ b/python/mozbuild/.ruff.toml
@@ -0,0 +1,9 @@
+extend = "../../pyproject.toml"
+src = [
+ # Treat direct imports in the test modules as first party.
+ "mozpack/test",
+ "mozbuild/test",
+]
+
+[isort]
+known-first-party = ["mozbuild"]
diff --git a/python/mozbuild/metrics.yaml b/python/mozbuild/metrics.yaml
new file mode 100644
index 0000000000..068dd6a389
--- /dev/null
+++ b/python/mozbuild/metrics.yaml
@@ -0,0 +1,140 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# If this file is changed, update the generated docs:
+# https://firefox-source-docs.mozilla.org/mach/telemetry.html#updating-generated-metrics-docs
+
+# Adding a new metric? We have docs for that!
+# https://mozilla.github.io/glean/book/user/metrics/adding-new-metrics.html
+
+---
+$schema: moz://mozilla.org/schemas/glean/metrics/1-0-0
+
+mozbuild:
+ compiler:
+ type: string
+ description: The compiler type in use (CC_TYPE), such as "clang" or "gcc".
+ lifetime: application
+ bugs:
+ - https://bugzilla.mozilla.org/show_bug.cgi?id=1291053
+ data_reviews:
+ - https://bugzilla.mozilla.org/show_bug.cgi?id=1291053#c34
+ notification_emails:
+ - build-telemetry@mozilla.com
+ - mhentges@mozilla.com
+ expires: never
+ send_in_pings:
+ - usage
+ artifact:
+ type: boolean
+ description: True if `--enable-artifact-builds`.
+ lifetime: application
+ bugs:
+ - https://bugzilla.mozilla.org/show_bug.cgi?id=1291053
+ data_reviews:
+ - https://bugzilla.mozilla.org/show_bug.cgi?id=1291053#c34
+ notification_emails:
+ - build-telemetry@mozilla.com
+ - mhentges@mozilla.com
+ expires: never
+ send_in_pings:
+ - usage
+ debug:
+ type: boolean
+ description: True if `--enable-debug`.
+ lifetime: application
+ bugs:
+ - https://bugzilla.mozilla.org/show_bug.cgi?id=1291053
+ data_reviews:
+ - https://bugzilla.mozilla.org/show_bug.cgi?id=1291053#c34
+ notification_emails:
+ - build-telemetry@mozilla.com
+ - mhentges@mozilla.com
+ expires: never
+ send_in_pings:
+ - usage
+ opt:
+ type: boolean
+ description: True if `--enable-optimize`.
+ lifetime: application
+ bugs:
+ - https://bugzilla.mozilla.org/show_bug.cgi?id=1291053
+ data_reviews:
+ - https://bugzilla.mozilla.org/show_bug.cgi?id=1291053#c34
+ notification_emails:
+ - build-telemetry@mozilla.com
+ - mhentges@mozilla.com
+ expires: never
+ send_in_pings:
+ - usage
+ ccache:
+ type: boolean
+ description: True if `--with-ccache`.
+ lifetime: application
+ bugs:
+ - https://bugzilla.mozilla.org/show_bug.cgi?id=1291053
+ data_reviews:
+ - https://bugzilla.mozilla.org/show_bug.cgi?id=1291053#c34
+ notification_emails:
+ - build-telemetry@mozilla.com
+ - mhentges@mozilla.com
+ expires: never
+ send_in_pings:
+ - usage
+ sccache:
+ type: boolean
+ description: True if ccache in use is sccache.
+ lifetime: application
+ bugs:
+ - https://bugzilla.mozilla.org/show_bug.cgi?id=1291053
+ data_reviews:
+ - https://bugzilla.mozilla.org/show_bug.cgi?id=1291053#c34
+ notification_emails:
+ - build-telemetry@mozilla.com
+ - mhentges@mozilla.com
+ expires: never
+ send_in_pings:
+ - usage
+ icecream:
+ type: boolean
+ description: True if icecream in use.
+ lifetime: application
+ bugs:
+ - https://bugzilla.mozilla.org/show_bug.cgi?id=1291053
+ data_reviews:
+ - https://bugzilla.mozilla.org/show_bug.cgi?id=1291053#c34
+ notification_emails:
+ - build-telemetry@mozilla.com
+ - mhentges@mozilla.com
+ expires: never
+ send_in_pings:
+ - usage
+ clobber:
+ type: boolean
+ description: True if the build was a clobber/full build.
+ lifetime: application
+ bugs:
+ - https://bugzilla.mozilla.org/show_bug.cgi?id=1526072
+ data_reviews:
+ - https://bugzilla.mozilla.org/show_bug.cgi?id=1526072#c15
+ notification_emails:
+ - build-telemetry@mozilla.com
+ - mhentges@mozilla.com
+ expires: never
+ send_in_pings:
+ - usage
+ project:
+ type: string
+ description: The project being built.
+ lifetime: application
+ bugs:
+ - https://bugzilla.mozilla.org/show_bug.cgi?id=1654084
+ data_reviews:
+ - https://bugzilla.mozilla.org/show_bug.cgi?id=1654084#c2
+ notification_emails:
+ - build-telemetry@mozilla.com
+ - mhentges@mozilla.com
+ expires: never
+ send_in_pings:
+ - usage
diff --git a/python/mozbuild/mozbuild/__init__.py b/python/mozbuild/mozbuild/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/__init__.py
diff --git a/python/mozbuild/mozbuild/action/__init__.py b/python/mozbuild/mozbuild/action/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/__init__.py
diff --git a/python/mozbuild/mozbuild/action/buildlist.py b/python/mozbuild/mozbuild/action/buildlist.py
new file mode 100644
index 0000000000..ab32ad92cc
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/buildlist.py
@@ -0,0 +1,49 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""A generic script to add entries to a file
+if the entry does not already exist.
+
+Usage: buildlist.py <filename> <entry> [<entry> ...]
+"""
+import io
+import os
+import sys
+
+from mozbuild.action.util import log_build_task
+from mozbuild.util import ensureParentDir, lock_file
+
+
+def addEntriesToListFile(listFile, entries):
+ """Given a file ``listFile`` containing one entry per line,
+ add each entry in ``entries`` to the file, unless it is already
+ present."""
+ ensureParentDir(listFile)
+ lock = lock_file(listFile + ".lck")
+ try:
+ if os.path.exists(listFile):
+ f = io.open(listFile)
+ existing = set(x.strip() for x in f.readlines())
+ f.close()
+ else:
+ existing = set()
+ for e in entries:
+ if e not in existing:
+ existing.add(e)
+ with io.open(listFile, "w", newline="\n") as f:
+ f.write("\n".join(sorted(existing)) + "\n")
+ finally:
+ del lock # Explicitly release the lock_file to free it
+
+
+def main(args):
+ if len(args) < 2:
+ print("Usage: buildlist.py <list file> <entry> [<entry> ...]", file=sys.stderr)
+ return 1
+
+ return addEntriesToListFile(args[0], args[1:])
+
+
+if __name__ == "__main__":
+ sys.exit(log_build_task(main, sys.argv[1:]))
diff --git a/python/mozbuild/mozbuild/action/check_binary.py b/python/mozbuild/mozbuild/action/check_binary.py
new file mode 100644
index 0000000000..baf39860de
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/check_binary.py
@@ -0,0 +1,343 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import argparse
+import os
+import re
+import subprocess
+import sys
+
+import buildconfig
+from mozpack.executables import ELF, UNKNOWN, get_type
+from packaging.version import Version
+
+from mozbuild.action.util import log_build_task
+from mozbuild.util import memoize
+
+STDCXX_MAX_VERSION = Version("3.4.19")
+CXXABI_MAX_VERSION = Version("1.3.7")
+GLIBC_MAX_VERSION = Version("2.17")
+LIBGCC_MAX_VERSION = Version("4.8")
+
+HOST = {"platform": buildconfig.substs["HOST_OS_ARCH"], "readelf": "readelf"}
+
+TARGET = {
+ "platform": buildconfig.substs["OS_TARGET"],
+ "readelf": buildconfig.substs.get("READELF", "readelf"),
+}
+
+ADDR_RE = re.compile(r"[0-9a-f]{8,16}")
+
+if buildconfig.substs.get("HAVE_64BIT_BUILD"):
+ GUESSED_NSMODULE_SIZE = 8
+else:
+ GUESSED_NSMODULE_SIZE = 4
+
+
+get_type = memoize(get_type)
+
+
+@memoize
+def get_output(*cmd):
+ env = dict(os.environ)
+ env[b"LC_ALL"] = b"C"
+ return subprocess.check_output(cmd, env=env, universal_newlines=True).splitlines()
+
+
+class Skip(RuntimeError):
+ pass
+
+
+class Empty(RuntimeError):
+ pass
+
+
+def at_least_one(iter):
+ saw_one = False
+ for item in iter:
+ saw_one = True
+ yield item
+ if not saw_one:
+ raise Empty()
+
+
+# Iterates the symbol table on ELF binaries.
+def iter_elf_symbols(target, binary, all=False):
+ ty = get_type(binary)
+ # Static libraries are ar archives. Assume they are ELF.
+ if ty == UNKNOWN and open(binary, "rb").read(8) == b"!<arch>\n":
+ ty = ELF
+ assert ty == ELF
+ for line in get_output(
+ target["readelf"], "--wide", "--syms" if all else "--dyn-syms", binary
+ ):
+ data = line.split()
+ if not (len(data) >= 8 and data[0].endswith(":") and data[0][:-1].isdigit()):
+ continue
+ n, addr, size, type, bind, vis, index, name = data[:8]
+
+ if "@" in name:
+ name, ver = name.rsplit("@", 1)
+ while name.endswith("@"):
+ name = name[:-1]
+ else:
+ ver = None
+ yield {
+ "addr": int(addr, 16),
+ # readelf output may contain decimal values or hexadecimal
+ # values prefixed with 0x for the size. Let python autodetect.
+ "size": int(size, 0),
+ "name": name,
+ "version": ver,
+ }
+
+
+def iter_readelf_dynamic(target, binary):
+ for line in get_output(target["readelf"], "-d", binary):
+ data = line.split(None, 2)
+ if data and len(data) == 3 and data[0].startswith("0x"):
+ yield data[1].rstrip(")").lstrip("("), data[2]
+
+
+def check_binary_compat(target, binary):
+ if get_type(binary) != ELF:
+ raise Skip()
+ checks = (
+ ("libstdc++", "GLIBCXX_", STDCXX_MAX_VERSION),
+ ("libstdc++", "CXXABI_", CXXABI_MAX_VERSION),
+ ("libgcc", "GCC_", LIBGCC_MAX_VERSION),
+ ("libc", "GLIBC_", GLIBC_MAX_VERSION),
+ )
+
+ unwanted = {}
+ try:
+ for sym in at_least_one(iter_elf_symbols(target, binary)):
+ # Only check versions on undefined symbols
+ if sym["addr"] != 0:
+ continue
+
+ # No version to check
+ if not sym["version"]:
+ continue
+
+ for _, prefix, max_version in checks:
+ if sym["version"].startswith(prefix):
+ version = Version(sym["version"][len(prefix) :])
+ if version > max_version:
+ unwanted.setdefault(prefix, []).append(sym)
+ except Empty:
+ raise RuntimeError("Could not parse llvm-objdump output?")
+ if unwanted:
+ error = []
+ for lib, prefix, _ in checks:
+ if prefix in unwanted:
+ error.append(
+ "We do not want these {} symbol versions to be used:".format(lib)
+ )
+ error.extend(
+ " {} ({})".format(s["name"], s["version"]) for s in unwanted[prefix]
+ )
+ raise RuntimeError("\n".join(error))
+
+
+def check_textrel(target, binary):
+ if target is HOST or get_type(binary) != ELF:
+ raise Skip()
+ try:
+ for tag, value in at_least_one(iter_readelf_dynamic(target, binary)):
+ if tag == "TEXTREL" or (tag == "FLAGS" and "TEXTREL" in value):
+ raise RuntimeError(
+ "We do not want text relocations in libraries and programs"
+ )
+ except Empty:
+ raise RuntimeError("Could not parse readelf output?")
+
+
+def ishex(s):
+ try:
+ int(s, 16)
+ return True
+ except ValueError:
+ return False
+
+
+def is_libxul(binary):
+ basename = os.path.basename(binary).lower()
+ return "xul" in basename
+
+
+def check_pt_load(target, binary):
+ if target is HOST or get_type(binary) != ELF or not is_libxul(binary):
+ raise Skip()
+ count = 0
+ for line in get_output(target["readelf"], "-l", binary):
+ data = line.split()
+ if data and data[0] == "LOAD":
+ count += 1
+ if count <= 1:
+ raise RuntimeError("Expected more than one PT_LOAD segment")
+
+
+def check_mozglue_order(target, binary):
+ if target is HOST or target["platform"] != "Android":
+ raise Skip()
+ # While this is very unlikely (libc being added by the compiler at the end
+ # of the linker command line), if libmozglue.so ends up after libc.so, all
+ # hell breaks loose, so better safe than sorry, and check it's actually the
+ # case.
+ try:
+ mozglue = libc = None
+ for n, (tag, value) in enumerate(
+ at_least_one(iter_readelf_dynamic(target, binary))
+ ):
+ if tag == "NEEDED":
+ if "[libmozglue.so]" in value:
+ mozglue = n
+ elif "[libc.so]" in value:
+ libc = n
+ if libc is None:
+ raise RuntimeError("libc.so is not linked?")
+ if mozglue is not None and libc < mozglue:
+ raise RuntimeError("libmozglue.so must be linked before libc.so")
+ except Empty:
+ raise RuntimeError("Could not parse readelf output?")
+
+
+def check_networking(target, binary):
+ retcode = 0
+ networking_functions = set(
+ [
+ # socketpair is not concerning; it is restricted to AF_UNIX
+ "connect",
+ "accept",
+ "listen",
+ "getsockname",
+ "getsockopt",
+ "recv",
+ "send",
+ # We would be concerned by recvmsg and sendmsg; but we believe
+ # they are okay as documented in 1376621#c23
+ "gethostbyname",
+ "gethostbyaddr",
+ "gethostent",
+ "sethostent",
+ "endhostent",
+ "gethostent_r",
+ "gethostbyname2",
+ "gethostbyaddr_r",
+ "gethostbyname_r",
+ "gethostbyname2_r",
+ "getservent",
+ "getservbyname",
+ "getservbyport",
+ "setservent",
+ "getprotoent",
+ "getprotobyname",
+ "getprotobynumber",
+ "setprotoent",
+ "endprotoent",
+ ]
+ )
+ bad_occurences_names = set()
+
+ try:
+ for sym in at_least_one(iter_elf_symbols(target, binary, all=True)):
+ if sym["addr"] == 0 and sym["name"] in networking_functions:
+ bad_occurences_names.add(sym["name"])
+ except Empty:
+ raise RuntimeError("Could not parse llvm-objdump output?")
+
+ basename = os.path.basename(binary)
+ if bad_occurences_names:
+ s = (
+ "TEST-UNEXPECTED-FAIL | check_networking | {} | Identified {} "
+ + "networking function(s) being imported in the rust static library ({})"
+ )
+ print(
+ s.format(
+ basename,
+ len(bad_occurences_names),
+ ",".join(sorted(bad_occurences_names)),
+ ),
+ file=sys.stderr,
+ )
+ retcode = 1
+ elif buildconfig.substs.get("MOZ_AUTOMATION"):
+ print("TEST-PASS | check_networking | {}".format(basename))
+ return retcode
+
+
+def checks(target, binary):
+ # The clang-plugin is built as target but is really a host binary.
+ # Cheat and pretend we were passed the right argument.
+ if "clang-plugin" in binary:
+ target = HOST
+ checks = []
+ if buildconfig.substs.get("MOZ_STDCXX_COMPAT") and target["platform"] == "Linux":
+ checks.append(check_binary_compat)
+
+ # Disabled for local builds because of readelf performance: See bug 1472496
+ if not buildconfig.substs.get("DEVELOPER_OPTIONS"):
+ checks.append(check_textrel)
+ checks.append(check_pt_load)
+ checks.append(check_mozglue_order)
+
+ retcode = 0
+ basename = os.path.basename(binary)
+ for c in checks:
+ try:
+ name = c.__name__
+ c(target, binary)
+ if buildconfig.substs.get("MOZ_AUTOMATION"):
+ print("TEST-PASS | {} | {}".format(name, basename))
+ except Skip:
+ pass
+ except RuntimeError as e:
+ print(
+ "TEST-UNEXPECTED-FAIL | {} | {} | {}".format(name, basename, str(e)),
+ file=sys.stderr,
+ )
+ retcode = 1
+ return retcode
+
+
+def main(args):
+ parser = argparse.ArgumentParser(description="Check built binaries")
+
+ parser.add_argument(
+ "--host", action="store_true", help="Perform checks for a host binary"
+ )
+ parser.add_argument(
+ "--target", action="store_true", help="Perform checks for a target binary"
+ )
+ parser.add_argument(
+ "--networking",
+ action="store_true",
+ help="Perform checks for networking functions",
+ )
+
+ parser.add_argument(
+ "binary", metavar="PATH", help="Location of the binary to check"
+ )
+
+ options = parser.parse_args(args)
+
+ if options.host == options.target:
+ print("Exactly one of --host or --target must be given", file=sys.stderr)
+ return 1
+
+ if options.networking and options.host:
+ print("--networking is only valid with --target", file=sys.stderr)
+ return 1
+
+ if options.networking:
+ return check_networking(TARGET, options.binary)
+ elif options.host:
+ return checks(HOST, options.binary)
+ elif options.target:
+ return checks(TARGET, options.binary)
+
+
+if __name__ == "__main__":
+ sys.exit(log_build_task(main, sys.argv[1:]))
diff --git a/python/mozbuild/mozbuild/action/download_wpt_manifest.py b/python/mozbuild/mozbuild/action/download_wpt_manifest.py
new file mode 100644
index 0000000000..84f4a15d14
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/download_wpt_manifest.py
@@ -0,0 +1,21 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This action is used to generate the wpt manifest
+
+import sys
+
+import buildconfig
+
+
+def main():
+ print("Downloading wpt manifest")
+ sys.path.insert(0, buildconfig.topsrcdir)
+ import manifestupdate
+
+ return 0 if manifestupdate.run(buildconfig.topsrcdir, buildconfig.topobjdir) else 1
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/python/mozbuild/mozbuild/action/dump_env.py b/python/mozbuild/mozbuild/action/dump_env.py
new file mode 100644
index 0000000000..ec178700eb
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/dump_env.py
@@ -0,0 +1,30 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# We invoke a Python program to dump our environment in order to get
+# native paths printed on Windows so that these paths can be incorporated
+# into Python configure's environment.
+import os
+import sys
+
+sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
+
+from shellutil import quote
+
+
+def environ():
+ # We would use six.ensure_text but the global Python isn't guaranteed to have
+ # the correct version of six installed.
+ def ensure_text(s):
+ if sys.version_info > (3, 0) or isinstance(s, unicode):
+ # os.environ always returns string keys and values in Python 3.
+ return s
+ else:
+ return s.decode("utf-8")
+
+ return [(ensure_text(k), ensure_text(v)) for (k, v) in os.environ.items()]
+
+
+for key, value in environ():
+ print("%s=%s" % (key, quote(value)))
diff --git a/python/mozbuild/mozbuild/action/dumpsymbols.py b/python/mozbuild/mozbuild/action/dumpsymbols.py
new file mode 100644
index 0000000000..0af2c1c4e5
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/dumpsymbols.py
@@ -0,0 +1,109 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import argparse
+import os
+import shutil
+import subprocess
+import sys
+
+import buildconfig
+
+
+def dump_symbols(target, tracking_file, count_ctors=False):
+ # Our tracking file, if present, will contain path(s) to the previously generated
+ # symbols. Remove them in this case so we don't simply accumulate old symbols
+ # during incremental builds.
+ if os.path.isfile(os.path.normpath(tracking_file)):
+ with open(tracking_file, "r") as fh:
+ files = fh.read().splitlines()
+ dirs = set(os.path.dirname(f) for f in files)
+ for d in dirs:
+ shutil.rmtree(
+ os.path.join(buildconfig.topobjdir, "dist", "crashreporter-symbols", d),
+ ignore_errors=True,
+ )
+
+ # Build default args for symbolstore.py based on platform.
+ sym_store_args = []
+
+ dump_syms_bin = buildconfig.substs["DUMP_SYMS"]
+ os_arch = buildconfig.substs["OS_ARCH"]
+ if os_arch == "WINNT":
+ sym_store_args.extend(["-c", "--vcs-info"])
+ if "PDBSTR" in buildconfig.substs:
+ sym_store_args.append("-i")
+ elif os_arch == "Darwin":
+ cpu = {
+ "x86": "i386",
+ "aarch64": "arm64",
+ }.get(buildconfig.substs["TARGET_CPU"], buildconfig.substs["TARGET_CPU"])
+ sym_store_args.extend(["-c", "-a", cpu, "--vcs-info"])
+ elif os_arch == "Linux":
+ sym_store_args.extend(["-c", "--vcs-info"])
+
+ sym_store_args.append(
+ "--install-manifest=%s,%s"
+ % (
+ os.path.join(
+ buildconfig.topobjdir, "_build_manifests", "install", "dist_include"
+ ),
+ os.path.join(buildconfig.topobjdir, "dist", "include"),
+ )
+ )
+ objcopy = buildconfig.substs.get("OBJCOPY")
+ if objcopy:
+ os.environ["OBJCOPY"] = objcopy
+
+ if buildconfig.substs.get("MOZ_THUNDERBIRD"):
+ sym_store_args.extend(["-s", os.path.join(buildconfig.topsrcdir, "comm")])
+
+ args = (
+ [
+ sys.executable,
+ os.path.join(
+ buildconfig.topsrcdir,
+ "toolkit",
+ "crashreporter",
+ "tools",
+ "symbolstore.py",
+ ),
+ ]
+ + sym_store_args
+ + [
+ "-s",
+ buildconfig.topsrcdir,
+ dump_syms_bin,
+ os.path.join(buildconfig.topobjdir, "dist", "crashreporter-symbols"),
+ os.path.abspath(target),
+ ]
+ )
+ if count_ctors:
+ args.append("--count-ctors")
+ print("Running: %s" % " ".join(args))
+ out_files = subprocess.check_output(args, universal_newlines=True)
+ with open(tracking_file, "w", encoding="utf-8", newline="\n") as fh:
+ fh.write(out_files)
+ fh.flush()
+
+
+def main(argv):
+ parser = argparse.ArgumentParser(
+ usage="Usage: dumpsymbols.py <library or program> <tracking file>"
+ )
+ parser.add_argument(
+ "--count-ctors",
+ action="store_true",
+ default=False,
+ help="Count static initializers",
+ )
+ parser.add_argument("library_or_program", help="Path to library or program")
+ parser.add_argument("tracking_file", help="Tracking file")
+ args = parser.parse_args()
+
+ return dump_symbols(args.library_or_program, args.tracking_file, args.count_ctors)
+
+
+if __name__ == "__main__":
+ sys.exit(main(sys.argv[1:]))
diff --git a/python/mozbuild/mozbuild/action/exe_7z_archive.py b/python/mozbuild/mozbuild/action/exe_7z_archive.py
new file mode 100644
index 0000000000..b0d35be2bf
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/exe_7z_archive.py
@@ -0,0 +1,89 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+
+import buildconfig
+import mozpack.path as mozpath
+
+from mozbuild.base import BuildEnvironmentNotFoundException
+
+
+def archive_exe(pkg_dir, tagfile, sfx_package, package, use_upx):
+ tmpdir = tempfile.mkdtemp(prefix="tmp")
+ try:
+ if pkg_dir:
+ shutil.move(pkg_dir, "core")
+
+ if use_upx:
+ final_sfx = mozpath.join(tmpdir, "7zSD.sfx")
+ upx = buildconfig.substs.get("UPX", "upx")
+ wine = buildconfig.substs.get("WINE")
+ if wine and upx.lower().endswith(".exe"):
+ cmd = [wine, upx]
+ else:
+ cmd = [upx]
+ subprocess.check_call(
+ cmd
+ + [
+ "--best",
+ "-o",
+ final_sfx,
+ sfx_package,
+ ]
+ )
+ else:
+ final_sfx = sfx_package
+
+ try:
+ sevenz = buildconfig.config.substs["7Z"]
+ except BuildEnvironmentNotFoundException:
+ # configure hasn't been run, just use the default
+ sevenz = "7z"
+ subprocess.check_call(
+ [
+ sevenz,
+ "a",
+ "-r",
+ "-t7z",
+ mozpath.join(tmpdir, "app.7z"),
+ "-mx",
+ "-m0=BCJ2",
+ "-m1=LZMA:d25",
+ "-m2=LZMA:d19",
+ "-m3=LZMA:d19",
+ "-mb0:1",
+ "-mb0s1:2",
+ "-mb0s2:3",
+ ]
+ )
+
+ with open(package, "wb") as o:
+ for i in [final_sfx, tagfile, mozpath.join(tmpdir, "app.7z")]:
+ shutil.copyfileobj(open(i, "rb"), o)
+ os.chmod(package, 0o0755)
+ finally:
+ if pkg_dir:
+ shutil.move("core", pkg_dir)
+ shutil.rmtree(tmpdir)
+
+
+def main(args):
+ if len(args) != 4:
+ print(
+ "Usage: exe_7z_archive.py <pkg_dir> <tagfile> <sfx_package> <package> <use_upx>",
+ file=sys.stderr,
+ )
+ return 1
+ else:
+ archive_exe(args[0], args[1], args[2], args[3], args[4])
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main(sys.argv[1:]))
diff --git a/python/mozbuild/mozbuild/action/fat_aar.py b/python/mozbuild/mozbuild/action/fat_aar.py
new file mode 100644
index 0000000000..d17d4696a0
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/fat_aar.py
@@ -0,0 +1,185 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+Fetch and unpack architecture-specific Maven zips, verify cross-architecture
+compatibility, and ready inputs to an Android multi-architecture fat AAR build.
+"""
+
+import argparse
+import sys
+from collections import OrderedDict, defaultdict
+from hashlib import sha1 # We don't need a strong hash to compare inputs.
+from io import BytesIO
+from zipfile import ZipFile
+
+import mozpack.path as mozpath
+import six
+from mozpack.copier import FileCopier
+from mozpack.files import JarFinder
+from mozpack.mozjar import JarReader
+from mozpack.packager.unpack import UnpackFinder
+
+
+def fat_aar(distdir, aars_paths, no_process=False, no_compatibility_check=False):
+ if no_process:
+ print("Not processing architecture-specific artifact Maven AARs.")
+ return 0
+
+ # Map {filename: {fingerprint: [arch1, arch2, ...]}}.
+ diffs = defaultdict(lambda: defaultdict(list))
+ missing_arch_prefs = set()
+ # Collect multi-architecture inputs to the fat AAR.
+ copier = FileCopier()
+
+ for arch, aar_path in aars_paths.items():
+ # Map old non-architecture-specific path to new architecture-specific path.
+ old_rewrite_map = {
+ "greprefs.js": "{}/greprefs.js".format(arch),
+ "defaults/pref/geckoview-prefs.js": "defaults/pref/{}/geckoview-prefs.js".format(
+ arch
+ ),
+ }
+
+ # Architecture-specific preferences files.
+ arch_prefs = set(old_rewrite_map.values())
+ missing_arch_prefs |= set(arch_prefs)
+
+ jar_finder = JarFinder(aar_path, JarReader(aar_path))
+ for path, fileobj in UnpackFinder(jar_finder):
+ # Native libraries go straight through.
+ if mozpath.match(path, "jni/**"):
+ copier.add(path, fileobj)
+
+ elif path in arch_prefs:
+ copier.add(path, fileobj)
+
+ elif path in ("classes.jar", "annotations.zip"):
+ # annotations.zip differs due to timestamps, but the contents should not.
+
+ # `JarReader` fails on the non-standard `classes.jar` produced by Gradle/aapt,
+ # and it's not worth working around, so we use Python's zip functionality
+ # instead.
+ z = ZipFile(BytesIO(fileobj.open().read()))
+ for r in z.namelist():
+ fingerprint = sha1(z.open(r).read()).hexdigest()
+ diffs["{}!/{}".format(path, r)][fingerprint].append(arch)
+
+ else:
+ fingerprint = sha1(six.ensure_binary(fileobj.open().read())).hexdigest()
+ # There's no need to distinguish `target.maven.zip` from `assets/omni.ja` here,
+ # since in practice they will never overlap.
+ diffs[path][fingerprint].append(arch)
+
+ missing_arch_prefs.discard(path)
+
+ # Some differences are allowed across the architecture-specific AARs. We could allow-list
+ # the actual content, but it's not necessary right now.
+ allow_pattern_list = {
+ "AndroidManifest.xml", # Min SDK version is different for 32- and 64-bit builds.
+ "classes.jar!/org/mozilla/gecko/util/HardwareUtils.class", # Min SDK as well.
+ "classes.jar!/org/mozilla/geckoview/BuildConfig.class",
+ # Each input captures its CPU architecture.
+ "chrome/toolkit/content/global/buildconfig.html",
+ # Bug 1556162: localized resources are not deterministic across
+ # per-architecture builds triggered from the same push.
+ "**/*.ftl",
+ "**/*.dtd",
+ "**/*.properties",
+ }
+
+ not_allowed = OrderedDict()
+
+ def format_diffs(ds):
+ # Like ' armeabi-v7a, arm64-v8a -> XXX\n x86, x86_64 -> YYY'.
+ return "\n".join(
+ sorted(
+ " {archs} -> {fingerprint}".format(
+ archs=", ".join(sorted(archs)), fingerprint=fingerprint
+ )
+ for fingerprint, archs in ds.items()
+ )
+ )
+
+ for p, ds in sorted(diffs.items()):
+ if len(ds) <= 1:
+ # Only one hash across all inputs: roll on.
+ continue
+
+ if any(mozpath.match(p, pat) for pat in allow_pattern_list):
+ print(
+ 'Allowed: Path "{path}" has architecture-specific versions:\n{ds_repr}'.format(
+ path=p, ds_repr=format_diffs(ds)
+ )
+ )
+ continue
+
+ not_allowed[p] = ds
+
+ for p, ds in not_allowed.items():
+ print(
+ 'Disallowed: Path "{path}" has architecture-specific versions:\n{ds_repr}'.format(
+ path=p, ds_repr=format_diffs(ds)
+ )
+ )
+
+ for missing in sorted(missing_arch_prefs):
+ print(
+ "Disallowed: Inputs missing expected architecture-specific input: {missing}".format(
+ missing=missing
+ )
+ )
+
+ if not no_compatibility_check and (missing_arch_prefs or not_allowed):
+ return 1
+
+ output_dir = mozpath.join(distdir, "output")
+ copier.copy(output_dir)
+
+ return 0
+
+
+_ALL_ARCHS = ("armeabi-v7a", "arm64-v8a", "x86_64", "x86")
+
+
+def main(argv):
+ description = """Unpack architecture-specific Maven AARs, verify cross-architecture
+compatibility, and ready inputs to an Android multi-architecture fat AAR build."""
+
+ parser = argparse.ArgumentParser(description=description)
+ parser.add_argument(
+ "--no-process", action="store_true", help="Do not process Maven AARs."
+ )
+ parser.add_argument(
+ "--no-compatibility-check",
+ action="store_true",
+ help="Do not fail if Maven AARs are not compatible.",
+ )
+ parser.add_argument("--distdir", required=True)
+
+ for arch in _ALL_ARCHS:
+ command_line_flag = arch.replace("_", "-")
+ parser.add_argument("--{}".format(command_line_flag), dest=arch)
+
+ args = parser.parse_args(argv)
+
+ args_dict = vars(args)
+
+ aars_paths = {
+ arch: args_dict.get(arch) for arch in _ALL_ARCHS if args_dict.get(arch)
+ }
+
+ if not aars_paths:
+ raise ValueError("You must provide at least one AAR file!")
+
+ return fat_aar(
+ args.distdir,
+ aars_paths,
+ no_process=args.no_process,
+ no_compatibility_check=args.no_compatibility_check,
+ )
+
+
+if __name__ == "__main__":
+ sys.exit(main(sys.argv[1:]))
diff --git a/python/mozbuild/mozbuild/action/file_generate.py b/python/mozbuild/mozbuild/action/file_generate.py
new file mode 100644
index 0000000000..98dec4e359
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/file_generate.py
@@ -0,0 +1,155 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# Given a Python script and arguments describing the output file, and
+# the arguments that can be used to generate the output file, call the
+# script's |main| method with appropriate arguments.
+
+import argparse
+import importlib.util
+import os
+import sys
+import traceback
+
+import buildconfig
+import six
+
+from mozbuild.action.util import log_build_task
+from mozbuild.makeutil import Makefile
+from mozbuild.pythonutil import iter_modules_in_path
+from mozbuild.util import FileAvoidWrite
+
+
+def main(argv):
+ parser = argparse.ArgumentParser(
+ "Generate a file from a Python script", add_help=False
+ )
+ parser.add_argument(
+ "--locale", metavar="locale", type=six.text_type, help="The locale in use."
+ )
+ parser.add_argument(
+ "python_script",
+ metavar="python-script",
+ type=six.text_type,
+ help="The Python script to run",
+ )
+ parser.add_argument(
+ "method_name",
+ metavar="method-name",
+ type=six.text_type,
+ help="The method of the script to invoke",
+ )
+ parser.add_argument(
+ "output_file",
+ metavar="output-file",
+ type=six.text_type,
+ help="The file to generate",
+ )
+ parser.add_argument(
+ "dep_file",
+ metavar="dep-file",
+ type=six.text_type,
+ help="File to write any additional make dependencies to",
+ )
+ parser.add_argument(
+ "dep_target",
+ metavar="dep-target",
+ type=six.text_type,
+ help="Make target to use in the dependencies file",
+ )
+ parser.add_argument(
+ "additional_arguments",
+ metavar="arg",
+ nargs=argparse.REMAINDER,
+ help="Additional arguments to the script's main() method",
+ )
+
+ args = parser.parse_args(argv)
+
+ kwargs = {}
+ if args.locale:
+ kwargs["locale"] = args.locale
+ script = args.python_script
+ # Permit the script to import modules from the same directory in which it
+ # resides. The justification for doing this is that if we were invoking
+ # the script as:
+ #
+ # python script arg1...
+ #
+ # then importing modules from the script's directory would come for free.
+ # Since we're invoking the script in a roundabout way, we provide this
+ # bit of convenience.
+ sys.path.append(os.path.dirname(script))
+ spec = importlib.util.spec_from_file_location("script", script)
+ module = importlib.util.module_from_spec(spec)
+ spec.loader.exec_module(module)
+ method = args.method_name
+ if not hasattr(module, method):
+ print(
+ 'Error: script "{0}" is missing a {1} method'.format(script, method),
+ file=sys.stderr,
+ )
+ return 1
+
+ ret = 1
+ try:
+ with FileAvoidWrite(args.output_file, readmode="rb") as output:
+ try:
+ ret = module.__dict__[method](
+ output, *args.additional_arguments, **kwargs
+ )
+ except Exception:
+ # Ensure that we don't overwrite the file if the script failed.
+ output.avoid_writing_to_file()
+ raise
+
+ # The following values indicate a statement of success:
+ # - a set() (see below)
+ # - 0
+ # - False
+ # - None
+ #
+ # Everything else is an error (so scripts can conveniently |return
+ # 1| or similar). If a set is returned, the elements of the set
+ # indicate additional dependencies that will be listed in the deps
+ # file. Python module imports are automatically included as
+ # dependencies.
+ if isinstance(ret, set):
+ deps = set(six.ensure_text(s) for s in ret)
+ # The script succeeded, so reset |ret| to indicate that.
+ ret = None
+ else:
+ deps = set()
+
+ # Only write out the dependencies if the script was successful
+ if not ret:
+ # Add dependencies on any python modules that were imported by
+ # the script.
+ deps |= set(
+ six.ensure_text(s)
+ for s in iter_modules_in_path(
+ buildconfig.topsrcdir, buildconfig.topobjdir
+ )
+ )
+ # Add dependencies on any buildconfig items that were accessed
+ # by the script.
+ deps |= set(six.ensure_text(s) for s in buildconfig.get_dependencies())
+
+ mk = Makefile()
+ mk.create_rule([args.dep_target]).add_dependencies(deps)
+ with FileAvoidWrite(args.dep_file) as dep_file:
+ mk.dump(dep_file)
+ else:
+ # Ensure that we don't overwrite the file if the script failed.
+ output.avoid_writing_to_file()
+
+ except IOError as e:
+ print('Error opening file "{0}"'.format(e.filename), file=sys.stderr)
+ traceback.print_exc()
+ return 1
+ return ret
+
+
+if __name__ == "__main__":
+ sys.exit(log_build_task(main, sys.argv[1:]))
diff --git a/python/mozbuild/mozbuild/action/file_generate_wrapper.py b/python/mozbuild/mozbuild/action/file_generate_wrapper.py
new file mode 100644
index 0000000000..b6c030bbf6
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/file_generate_wrapper.py
@@ -0,0 +1,38 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import json
+import os
+import subprocess
+import sys
+from pathlib import Path
+
+import buildconfig
+
+
+def action(fh, script, target_dir, *args):
+ fh.close()
+ os.unlink(fh.name)
+
+ args = list(args)
+ objdir = Path.cwd()
+ topsrcdir = Path(buildconfig.topsrcdir)
+
+ def make_absolute(base_path, p):
+ return Path(base_path) / Path(p.lstrip("/"))
+
+ try:
+ abs_target_dir = str(make_absolute(objdir, target_dir))
+ abs_script = make_absolute(topsrcdir, script)
+ script = [str(abs_script)]
+ if abs_script.suffix == ".py":
+ script = [sys.executable] + script
+ subprocess.check_call(script + args, cwd=abs_target_dir)
+ except Exception:
+ relative = os.path.relpath(__file__, topsrcdir)
+ print(
+ "%s:action caught exception. params=%s\n"
+ % (relative, json.dumps([script, target_dir] + args, indent=2))
+ )
+ raise
diff --git a/python/mozbuild/mozbuild/action/generate_symbols_file.py b/python/mozbuild/mozbuild/action/generate_symbols_file.py
new file mode 100644
index 0000000000..955a676c08
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/generate_symbols_file.py
@@ -0,0 +1,95 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import argparse
+import os
+from io import StringIO
+
+import buildconfig
+
+from mozbuild.preprocessor import Preprocessor
+from mozbuild.util import DefinesAction
+
+
+def generate_symbols_file(output, *args):
+ """ """
+ parser = argparse.ArgumentParser()
+ parser.add_argument("input")
+ parser.add_argument("-D", action=DefinesAction)
+ parser.add_argument("-U", action="append", default=[])
+ args = parser.parse_args(args)
+ input = os.path.abspath(args.input)
+
+ pp = Preprocessor()
+ pp.context.update(buildconfig.defines["ALLDEFINES"])
+ if args.D:
+ pp.context.update(args.D)
+ for undefine in args.U:
+ if undefine in pp.context:
+ del pp.context[undefine]
+ # Hack until MOZ_DEBUG_FLAGS are simply part of buildconfig.defines
+ if buildconfig.substs.get("MOZ_DEBUG"):
+ pp.context["DEBUG"] = "1"
+ # Ensure @DATA@ works as expected (see the Windows section further below)
+ if buildconfig.substs["OS_TARGET"] == "WINNT":
+ pp.context["DATA"] = "DATA"
+ else:
+ pp.context["DATA"] = ""
+ pp.out = StringIO()
+ pp.do_filter("substitution")
+ pp.do_include(input)
+
+ symbols = [s.strip() for s in pp.out.getvalue().splitlines() if s.strip()]
+
+ libname, ext = os.path.splitext(os.path.basename(output.name))
+
+ if buildconfig.substs["OS_TARGET"] == "WINNT":
+ # A def file is generated for MSVC link.exe that looks like the
+ # following:
+ # LIBRARY library.dll
+ # EXPORTS
+ # symbol1
+ # symbol2
+ # ...
+ #
+ # link.exe however requires special markers for data symbols, so in
+ # that case the symbols look like:
+ # data_symbol1 DATA
+ # data_symbol2 DATA
+ # ...
+ #
+ # In the input file, this is just annotated with the following syntax:
+ # data_symbol1 @DATA@
+ # data_symbol2 @DATA@
+ # ...
+ # The DATA variable is "simply" expanded by the preprocessor, to
+ # nothing on non-Windows, such that we only get the symbol name on
+ # those platforms, and to DATA on Windows, so that the "DATA" part
+ # is, in fact, part of the symbol name as far as the symbols variable
+ # is concerned.
+ assert ext == ".def"
+ output.write("LIBRARY %s\nEXPORTS\n %s\n" % (libname, "\n ".join(symbols)))
+ elif (
+ buildconfig.substs.get("GCC_USE_GNU_LD")
+ or buildconfig.substs["OS_TARGET"] == "SunOS"
+ ):
+ # A linker version script is generated for GNU LD that looks like the
+ # following:
+ # liblibrary.so {
+ # global:
+ # symbol1;
+ # symbol2;
+ # ...
+ # local:
+ # *;
+ # };
+ output.write(
+ "%s {\nglobal:\n %s;\nlocal:\n *;\n};" % (libname, ";\n ".join(symbols))
+ )
+ elif buildconfig.substs["OS_TARGET"] == "Darwin":
+ # A list of symbols is generated for Apple ld that simply lists all
+ # symbols, with an underscore prefix.
+ output.write("".join("_%s\n" % s for s in symbols))
+
+ return set(pp.includes)
diff --git a/python/mozbuild/mozbuild/action/html_fragment_preprocesor.py b/python/mozbuild/mozbuild/action/html_fragment_preprocesor.py
new file mode 100644
index 0000000000..f957318a7f
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/html_fragment_preprocesor.py
@@ -0,0 +1,101 @@
+import json
+import re
+import xml.etree.ElementTree as ET
+from pathlib import Path
+
+JS_FILE_TEMPLATE = """\
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+const EXPORTED_SYMBOLS = ["getHTMLFragment"];
+
+const Fragments = {json_string};
+
+/*
+ * Loads HTML fragment strings pulled from fragment documents.
+ * @param key - key identifying HTML fragment
+ *
+ * @return raw HTML/XHTML string
+ */
+const getHTMLFragment = key => Fragments[key];
+"""
+
+RE_COLLAPSE_WHITESPACE = re.compile(r"\s+")
+
+
+def get_fragment_key(path, template_name=None):
+ key = Path(path).stem
+ if template_name:
+ key += "/" + template_name
+ return key
+
+
+def fill_html_fragments_map(fragment_map, path, template, doctype=None):
+ # collape white space
+ for elm in template.iter():
+ if elm.text:
+ elm.text = RE_COLLAPSE_WHITESPACE.sub(" ", elm.text)
+ if elm.tail:
+ elm.tail = RE_COLLAPSE_WHITESPACE.sub(" ", elm.tail)
+ key = get_fragment_key(path, template.attrib.get("name"))
+ xml = "".join(ET.tostring(elm, encoding="unicode") for elm in template).strip()
+ if doctype:
+ xml = doctype + "\n" + xml
+ fragment_map[key] = xml
+
+
+def get_html_fragments_from_file(fragment_map, path):
+ for _, (name, value) in ET.iterparse(path, events=["start-ns"]):
+ ET.register_namespace(name, value)
+ tree = ET.parse(path)
+ root = tree.getroot()
+ sub_templates = root.findall("{http://www.w3.org/1999/xhtml}template")
+ # if all nested nodes are templates then treat as list of templates
+ if len(sub_templates) == len(root):
+ doctype = ""
+ for template in sub_templates:
+ if template.get("doctype") == "true":
+ doctype = template.text.strip()
+ break
+ for template in sub_templates:
+ if template.get("doctype") != "true":
+ fill_html_fragments_map(fragment_map, path, template, doctype)
+ else:
+ fill_html_fragments_map(fragment_map, path, root, None)
+
+
+def generate(output, *inputs):
+ """Builds an html fragments loader JS file from the input xml file(s)
+
+ The xml files are expected to be in the format of:
+ `<template>...xhtml markup...</template>`
+
+ or `<template><template name="fragment_name">...xhtml markup...</template>...</template>`
+ Where there are multiple templates. All markup is expected to be properly namespaced.
+
+ In the JS file, calling getHTMLFragment(key) will return the HTML string from the xml file
+ that matches the key.
+
+ The key format is `filename_without_extension/template_name` for files with
+ multiple templates, or just `filename_without_extension` for files with one template.
+ `filename_without_extension` is the xml filename without the .xml extension
+ and `template_name` is the name attribute of template node containing the xml fragment.
+
+ Arguments:
+ output -- File handle to JS file being generated
+ inputs -- list of xml filenames to include in loader
+
+ Returns:
+ The set of dependencies which should trigger this command to be re-run.
+ This is ultimately returned to the build system for use by the backend
+ to ensure that incremental rebuilds happen when any dependency changes.
+ """
+
+ fragment_map = {}
+ for file in inputs:
+ get_html_fragments_from_file(fragment_map, file)
+ json_string = json.dumps(fragment_map, separators=(",", ":"))
+ contents = JS_FILE_TEMPLATE.format(json_string=json_string)
+ output.write(contents)
+ return set(inputs)
diff --git a/python/mozbuild/mozbuild/action/install.py b/python/mozbuild/mozbuild/action/install.py
new file mode 100644
index 0000000000..02f0f2694a
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/install.py
@@ -0,0 +1,22 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# A simple script to invoke mozinstall from the command line without depending
+# on a build config.
+
+import sys
+
+import mozinstall
+
+
+def main(args):
+ if len(args) != 2:
+ print("Usage: install.py [src] [dest]")
+ return 1
+ src, dest = args
+ mozinstall.install(src, dest)
+
+
+if __name__ == "__main__":
+ sys.exit(main(sys.argv[1:]))
diff --git a/python/mozbuild/mozbuild/action/jar_maker.py b/python/mozbuild/mozbuild/action/jar_maker.py
new file mode 100644
index 0000000000..a244b66a52
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/jar_maker.py
@@ -0,0 +1,16 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import sys
+
+import mozbuild.jar
+from mozbuild.action.util import log_build_task
+
+
+def main(args):
+ return mozbuild.jar.main(args)
+
+
+if __name__ == "__main__":
+ sys.exit(log_build_task(main, sys.argv[1:]))
diff --git a/python/mozbuild/mozbuild/action/l10n_merge.py b/python/mozbuild/mozbuild/action/l10n_merge.py
new file mode 100644
index 0000000000..1a04d60107
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/l10n_merge.py
@@ -0,0 +1,42 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import argparse
+import os
+import shutil
+import sys
+
+from mozbuild.util import ensureParentDir
+
+
+def main(argv):
+ parser = argparse.ArgumentParser(description="Merge l10n files.")
+ parser.add_argument("--output", help="Path to write merged output")
+ parser.add_argument("--ref-file", help="Path to reference file (en-US)")
+ parser.add_argument("--l10n-file", help="Path to locale file")
+
+ args = parser.parse_args(argv)
+
+ from compare_locales.compare import ContentComparer, Observer
+ from compare_locales.paths import File
+
+ cc = ContentComparer([Observer()])
+ cc.compare(
+ File(args.ref_file, args.ref_file, ""),
+ File(args.l10n_file, args.l10n_file, ""),
+ args.output,
+ )
+
+ ensureParentDir(args.output)
+ if not os.path.exists(args.output):
+ src = args.l10n_file
+ if not os.path.exists(args.l10n_file):
+ src = args.ref_file
+ shutil.copy(src, args.output)
+
+ return 0
+
+
+if __name__ == "__main__":
+ main(sys.argv[1:])
diff --git a/python/mozbuild/mozbuild/action/langpack_localeNames.json b/python/mozbuild/mozbuild/action/langpack_localeNames.json
new file mode 100644
index 0000000000..9014c7717a
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/langpack_localeNames.json
@@ -0,0 +1,426 @@
+{
+ "ach": {
+ "english": "Acoli",
+ "native": "Acholi"
+ },
+ "af": {
+ "native": "Afrikaans"
+ },
+ "an": {
+ "english": "Aragonese",
+ "native": "Aragonés"
+ },
+ "ar": {
+ "english": "Arabic",
+ "native": "العربية"
+ },
+ "ast": {
+ "english": "Asturian",
+ "native": "Asturianu"
+ },
+ "az": {
+ "english": "Azerbaijani",
+ "native": "Azərbaycanca"
+ },
+ "be": {
+ "english": "Belarusian",
+ "native": "БеларуÑкаÑ"
+ },
+ "bg": {
+ "english": "Bulgarian",
+ "native": "БългарÑки"
+ },
+ "bn": {
+ "english": "Bangla",
+ "native": "বাংলা"
+ },
+ "bo": {
+ "english": "Tibetan",
+ "native": "བོད་སà¾à½‘"
+ },
+ "br": {
+ "english": "Breton",
+ "native": "Brezhoneg"
+ },
+ "brx": {
+ "english": "Bodo",
+ "native": "बड़ो"
+ },
+ "bs": {
+ "english": "Bosnian",
+ "native": "Bosanski"
+ },
+ "ca": {
+ "english": "Catalan",
+ "native": "Català"
+ },
+ "ca-valencia": {
+ "english": "Catalan, Valencian",
+ "native": "Català (Valencià)"
+ },
+ "cak": {
+ "native": "Kaqchikel"
+ },
+ "cs": {
+ "english": "Czech",
+ "native": "Čeština"
+ },
+ "cy": {
+ "english": "Welsh",
+ "native": "Cymraeg"
+ },
+ "da": {
+ "english": "Danish",
+ "native": "Dansk"
+ },
+ "de": {
+ "english": "German",
+ "native": "Deutsch"
+ },
+ "dsb": {
+ "english": "Lower Sorbian",
+ "native": "Dolnoserbšćina"
+ },
+ "el": {
+ "english": "Greek",
+ "native": "Ελληνικά"
+ },
+ "en-CA": {
+ "native": "English (CA)"
+ },
+ "en-GB": {
+ "native": "English (GB)"
+ },
+ "en-US": {
+ "native": "English (US)"
+ },
+ "eo": {
+ "native": "Esperanto"
+ },
+ "es-AR": {
+ "english": "Spanish, Argentina",
+ "native": "Español (AR)"
+ },
+ "es-CL": {
+ "english": "Spanish, Chile",
+ "native": "Español (CL)"
+ },
+ "es-ES": {
+ "english": "Spanish, Spain",
+ "native": "Español (ES)"
+ },
+ "es-MX": {
+ "english": "Spanish, Mexico",
+ "native": "Español (MX)"
+ },
+ "et": {
+ "english": "Estonian",
+ "native": "Eesti"
+ },
+ "eu": {
+ "english": "Basque",
+ "native": "Euskara"
+ },
+ "fa": {
+ "english": "Persian",
+ "native": "Ùارسی"
+ },
+ "ff": {
+ "english": "Fulah",
+ "native": "Pulaar"
+ },
+ "fi": {
+ "english": "Finnish",
+ "native": "Suomi"
+ },
+ "fr": {
+ "english": "French",
+ "native": "Français"
+ },
+ "fur": {
+ "english": "Friulian",
+ "native": "Furlan"
+ },
+ "fy-NL": {
+ "english": "Frisian",
+ "native": "Frysk"
+ },
+ "ga-IE": {
+ "english": "Irish",
+ "native": "Gaeilge"
+ },
+ "gd": {
+ "english": "Scottish Gaelic",
+ "native": "Gàidhlig"
+ },
+ "gl": {
+ "english": "Galician",
+ "native": "Galego"
+ },
+ "gn": {
+ "native": "Guarani"
+ },
+ "gu-IN": {
+ "english": "Gujarati",
+ "native": "ગà«àªœàª°àª¾àª¤à«€"
+ },
+ "he": {
+ "english": "Hebrew",
+ "native": "עברית"
+ },
+ "hi-IN": {
+ "english": "Hindi",
+ "native": "हिनà¥à¤¦à¥€"
+ },
+ "hr": {
+ "english": "Croatian",
+ "native": "Hrvatski"
+ },
+ "hsb": {
+ "english": "Upper Sorbian",
+ "native": "Hornjoserbšćina"
+ },
+ "hu": {
+ "english": "Hungarian",
+ "native": "Magyar"
+ },
+ "hy-AM": {
+ "english": "Armenian",
+ "native": "Õ°Õ¡ÕµÕ¥Ö€Õ¥Õ¶"
+ },
+ "ia": {
+ "native": "Interlingua"
+ },
+ "id": {
+ "english": "Indonesian",
+ "native": "Indonesia"
+ },
+ "is": {
+ "english": "Icelandic",
+ "native": "Islenska"
+ },
+ "it": {
+ "english": "Italian",
+ "native": "Italiano"
+ },
+ "ja": {
+ "english": "Japanese",
+ "native": "日本語"
+ },
+ "ja-JP-mac": {
+ "english": "Japanese",
+ "native": "日本語"
+ },
+ "ka": {
+ "english": "Georgian",
+ "native": "ქáƒáƒ áƒ—ული"
+ },
+ "kab": {
+ "english": "Kabyle",
+ "native": "Taqbaylit"
+ },
+ "kk": {
+ "english": "Kazakh",
+ "native": "қазақ тілі"
+ },
+ "km": {
+ "english": "Khmer",
+ "native": "ážáŸ’មែរ"
+ },
+ "kn": {
+ "english": "Kannada",
+ "native": "ಕನà³à²¨à²¡"
+ },
+ "ko": {
+ "english": "Korean",
+ "native": "한국어"
+ },
+ "lij": {
+ "english": "Ligurian",
+ "native": "Ligure"
+ },
+ "lo": {
+ "english": "Lao",
+ "native": "ລາວ"
+ },
+ "lt": {
+ "english": "Lithuanian",
+ "native": "Lietuvių"
+ },
+ "ltg": {
+ "english": "Latgalian",
+ "native": "Latgalīšu"
+ },
+ "lv": {
+ "english": "Latvian",
+ "native": "Latviešu"
+ },
+ "mk": {
+ "english": "Macedonian",
+ "native": "македонÑки"
+ },
+ "ml": {
+ "english": "Malayalam",
+ "native": "മലയാളം"
+ },
+ "mr": {
+ "english": "Marathi",
+ "native": "मराठी"
+ },
+ "ms": {
+ "english": "Malay",
+ "native": "Melayu"
+ },
+ "my": {
+ "english": "Burmese",
+ "native": "မြန်မာ"
+ },
+ "nb-NO": {
+ "english": "Norwegian Bokmål",
+ "native": "Norsk Bokmål"
+ },
+ "ne-NP": {
+ "english": "Nepali",
+ "native": "नेपाली"
+ },
+ "nl": {
+ "english": "Dutch",
+ "native": "Nederlands"
+ },
+ "nn-NO": {
+ "english": "Norwegian Nynorsk",
+ "native": "Nynorsk"
+ },
+ "oc": {
+ "native": "Occitan"
+ },
+ "or": {
+ "english": "Odia",
+ "native": "ଓଡ଼ିଆ"
+ },
+ "pa-IN": {
+ "english": "Punjabi",
+ "native": "ਪੰਜਾਬੀ"
+ },
+ "pl": {
+ "english": "Polish",
+ "native": "Polski"
+ },
+ "pt-BR": {
+ "english": "Brazilian Portuguese",
+ "native": "Português (BR)"
+ },
+ "pt-PT": {
+ "english": "Portuguese",
+ "native": "Português (PT)"
+ },
+ "rm": {
+ "english": "Romansh",
+ "native": "Rumantsch"
+ },
+ "ro": {
+ "english": "Romanian",
+ "native": "Română"
+ },
+ "ru": {
+ "english": "Russian",
+ "native": "РуÑÑкий"
+ },
+ "sc": {
+ "english": "Sardinian",
+ "native": "Sardu"
+ },
+ "sco": {
+ "native": "Scots"
+ },
+ "si": {
+ "english": "Sinhala",
+ "native": "සිංහල"
+ },
+ "sk": {
+ "english": "Slovak",
+ "native": "SlovenÄina"
+ },
+ "sl": {
+ "english": "Slovenian",
+ "native": "SlovenÅ¡Äina"
+ },
+ "son": {
+ "english": "Songhai",
+ "native": "Soŋay"
+ },
+ "sq": {
+ "english": "Albanian",
+ "native": "Shqip"
+ },
+ "sr": {
+ "english": "Serbian",
+ "native": "CрпÑки"
+ },
+ "sv-SE": {
+ "english": "Swedish",
+ "native": "Svenska"
+ },
+ "szl": {
+ "english": "Silesian",
+ "native": "ÅšlÅnsko"
+ },
+ "ta": {
+ "english": "Tamil",
+ "native": "தமிழà¯"
+ },
+ "te": {
+ "english": "Telugu",
+ "native": "తెలà±à°—à±"
+ },
+ "tg": {
+ "english": "Tajik",
+ "native": "Тоҷикӣ"
+ },
+ "th": {
+ "english": "Thai",
+ "native": "ไทย"
+ },
+ "tl": {
+ "english": "Filipino",
+ "native": "Tagalog"
+ },
+ "tr": {
+ "english": "Turkish",
+ "native": "Türkçe"
+ },
+ "trs": {
+ "native": "Triqui"
+ },
+ "uk": {
+ "english": "Ukrainian",
+ "native": "УкраїнÑька"
+ },
+ "ur": {
+ "english": "Urdu",
+ "native": "اردو"
+ },
+ "uz": {
+ "english": "Uzbek",
+ "native": "O‘zbek"
+ },
+ "vi": {
+ "english": "Vietnamese",
+ "native": "Tiếng Việt"
+ },
+ "wo": {
+ "native": "Wolof"
+ },
+ "xh": {
+ "english": "Xhosa",
+ "native": "IsiXhosa"
+ },
+ "zh-CN": {
+ "english": "Simplified Chinese",
+ "native": "简体中文"
+ },
+ "zh-TW": {
+ "english": "Traditional Chinese",
+ "native": "正體中文"
+ }
+}
diff --git a/python/mozbuild/mozbuild/action/langpack_manifest.py b/python/mozbuild/mozbuild/action/langpack_manifest.py
new file mode 100644
index 0000000000..c79539cbce
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/langpack_manifest.py
@@ -0,0 +1,587 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+###
+# This script generates a web manifest JSON file based on the xpi-stage
+# directory structure. It extracts data necessary to produce the complete
+# manifest file for a language pack:
+# from the `langpack-manifest.ftl` file in the locale directory;
+# from chrome registry entries;
+# and from other information in the `xpi-stage` directory.
+###
+
+import argparse
+import datetime
+import io
+import json
+import logging
+import os
+import re
+import sys
+import time
+
+import fluent.syntax.ast as FTL
+import mozpack.path as mozpath
+import mozversioncontrol
+import requests
+from fluent.syntax.parser import FluentParser
+from mozpack.chrome.manifest import Manifest, ManifestLocale, parse_manifest
+
+from mozbuild.configure.util import Version
+
+
+def write_file(path, content):
+ with io.open(path, "w", encoding="utf-8") as out:
+ out.write(content + "\n")
+
+
+pushlog_api_url = "{0}/json-rev/{1}"
+
+
+def get_build_date():
+ """Return the current date or SOURCE_DATE_EPOCH, if set."""
+ return datetime.datetime.utcfromtimestamp(
+ int(os.environ.get("SOURCE_DATE_EPOCH", time.time()))
+ )
+
+
+###
+# Retrieves a UTC datetime of the push for the current commit from a
+# mercurial clone directory. The SOURCE_DATE_EPOCH environment
+# variable is honored, for reproducibility.
+#
+# Args:
+# path (str) - path to a directory
+#
+# Returns:
+# (datetime) - a datetime object
+#
+# Example:
+# dt = get_dt_from_hg("/var/vcs/l10n-central/pl")
+# dt == datetime(2017, 10, 11, 23, 31, 54, 0)
+###
+def get_dt_from_hg(path):
+ with mozversioncontrol.get_repository_object(path=path) as repo:
+ phase = repo._run("log", "-r", ".", "-T" "{phase}")
+ if phase.strip() != "public":
+ return get_build_date()
+ repo_url = repo._run("paths", "default")
+ repo_url = repo_url.strip().replace("ssh://", "https://")
+ repo_url = repo_url.replace("hg://", "https://")
+ cs = repo._run("log", "-r", ".", "-T" "{node}")
+
+ url = pushlog_api_url.format(repo_url, cs)
+ session = requests.Session()
+ try:
+ response = session.get(url)
+ except Exception as e:
+ msg = "Failed to retrieve push timestamp using {}\nError: {}".format(url, e)
+ raise Exception(msg)
+
+ data = response.json()
+
+ try:
+ date = data["pushdate"][0]
+ except KeyError as exc:
+ msg = "{}\ndata is: {}".format(
+ str(exc), json.dumps(data, indent=2, sort_keys=True)
+ )
+ raise KeyError(msg)
+
+ return datetime.datetime.utcfromtimestamp(date)
+
+
+###
+# Generates timestamp for a locale based on its path.
+# If possible, will use the commit timestamp from HG repository,
+# and if that fails, will generate the timestamp for `now`.
+#
+# The timestamp format is "{year}{month}{day}{hour}{minute}{second}" and
+# the datetime stored in it is using UTC timezone.
+#
+# Args:
+# path (str) - path to the locale directory
+#
+# Returns:
+# (str) - a timestamp string
+#
+# Example:
+# ts = get_timestamp_for_locale("/var/vcs/l10n-central/pl")
+# ts == "20170914215617"
+###
+def get_timestamp_for_locale(path):
+ dt = None
+ if os.path.isdir(os.path.join(path, ".hg")):
+ dt = get_dt_from_hg(path)
+
+ if dt is None:
+ dt = get_build_date()
+
+ dt = dt.replace(microsecond=0)
+ return dt.strftime("%Y%m%d%H%M%S")
+
+
+###
+# Parses an FTL file into a key-value pair object.
+# Does not support attributes, terms, variables, functions or selectors;
+# only messages with values consisting of text elements and literals.
+#
+# Args:
+# path (str) - a path to an FTL file
+#
+# Returns:
+# (dict) - A mapping of message keys to formatted string values.
+# Empty if the file at `path` was not found.
+#
+# Example:
+# res = parse_flat_ftl('./browser/langpack-metadata.ftl')
+# res == {
+# 'langpack-title': 'Polski',
+# 'langpack-creator': 'mozilla.org',
+# 'langpack-contributors': 'Joe Solon, Suzy Solon'
+# }
+###
+def parse_flat_ftl(path):
+ parser = FluentParser(with_spans=False)
+ try:
+ with open(path, encoding="utf-8") as file:
+ res = parser.parse(file.read())
+ except FileNotFoundError as err:
+ logging.warning(err)
+ return {}
+
+ result = {}
+ for entry in res.body:
+ if isinstance(entry, FTL.Message) and isinstance(entry.value, FTL.Pattern):
+ flat = ""
+ for elem in entry.value.elements:
+ if isinstance(elem, FTL.TextElement):
+ flat += elem.value
+ elif isinstance(elem.expression, FTL.Literal):
+ flat += elem.expression.parse()["value"]
+ else:
+ name = type(elem.expression).__name__
+ raise Exception(f"Unsupported {name} for {entry.id.name} in {path}")
+ result[entry.id.name] = flat.strip()
+ return result
+
+
+##
+# Generates the title and description for the langpack.
+#
+# Uses data stored in a JSON file next to this source,
+# which is expected to have the following format:
+# Record<string, { native: string, english?: string }>
+#
+# If an English name is given and is different from the native one,
+# it will be included in the description and, if within the character limits,
+# also in the name.
+#
+# Length limit for names is 45 characters, for descriptions is 132,
+# return values are truncated if needed.
+#
+# NOTE: If you're updating the native locale names,
+# you should also update the data in
+# toolkit/components/mozintl/mozIntl.sys.mjs.
+#
+# Args:
+# app (str) - Application name
+# locale (str) - Locale identifier
+#
+# Returns:
+# (str, str) - Tuple of title and description
+#
+###
+def get_title_and_description(app, locale):
+ dir = os.path.dirname(__file__)
+ with open(os.path.join(dir, "langpack_localeNames.json"), encoding="utf-8") as nf:
+ names = json.load(nf)
+
+ nameCharLimit = 45
+ descCharLimit = 132
+ nameTemplate = "Language: {}"
+ descTemplate = "{} Language Pack for {}"
+
+ if locale in names:
+ data = names[locale]
+ native = data["native"]
+ english = data["english"] if "english" in data else native
+
+ if english != native:
+ title = nameTemplate.format(f"{native} ({english})")
+ if len(title) > nameCharLimit:
+ title = nameTemplate.format(native)
+ description = descTemplate.format(app, f"{native} ({locale}) – {english}")
+ else:
+ title = nameTemplate.format(native)
+ description = descTemplate.format(app, f"{native} ({locale})")
+ else:
+ title = nameTemplate.format(locale)
+ description = descTemplate.format(app, locale)
+
+ return title[:nameCharLimit], description[:descCharLimit]
+
+
+###
+# Build the manifest author string based on the author string
+# and optionally adding the list of contributors, if provided.
+#
+# Args:
+# ftl (dict) - a key-value mapping of locale-specific strings
+#
+# Returns:
+# (str) - a string to be placed in the author field of the manifest.json
+#
+# Example:
+# s = get_author({
+# 'langpack-creator': 'mozilla.org',
+# 'langpack-contributors': 'Joe Solon, Suzy Solon'
+# })
+# s == 'mozilla.org (contributors: Joe Solon, Suzy Solon)'
+###
+def get_author(ftl):
+ author = ftl["langpack-creator"] if "langpack-creator" in ftl else "mozilla.org"
+ contrib = ftl["langpack-contributors"] if "langpack-contributors" in ftl else ""
+ if contrib:
+ return f"{author} (contributors: {contrib})"
+ else:
+ return author
+
+
+##
+# Converts the list of chrome manifest entry flags to the list of platforms
+# for the langpack manifest.
+#
+# The list of result platforms is taken from AppConstants.platform.
+#
+# Args:
+# flags (FlagList) - a list of Chrome Manifest entry flags
+#
+# Returns:
+# (list) - a list of platform the entry applies to
+#
+# Example:
+# str(flags) == "os==MacOS os==Windows"
+# platforms = convert_entry_flags_to_platform_codes(flags)
+# platforms == ['mac', 'win']
+#
+# The method supports only `os` flag name and equality operator.
+# It will throw if tried with other flags or operators.
+###
+def convert_entry_flags_to_platform_codes(flags):
+ if not flags:
+ return None
+
+ ret = []
+ for key in flags:
+ if key != "os":
+ raise Exception("Unknown flag name")
+
+ for value in flags[key].values:
+ if value[0] != "==":
+ raise Exception("Inequality flag cannot be converted")
+
+ if value[1] == "Android":
+ ret.append("android")
+ elif value[1] == "LikeUnix":
+ ret.append("linux")
+ elif value[1] == "Darwin":
+ ret.append("macosx")
+ elif value[1] == "WINNT":
+ ret.append("win")
+ else:
+ raise Exception("Unknown flag value {0}".format(value[1]))
+
+ return ret
+
+
+###
+# Recursively parse a chrome manifest file appending new entries
+# to the result list
+#
+# The function can handle two entry types: 'locale' and 'manifest'
+#
+# Args:
+# path (str) - a path to a chrome manifest
+# base_path (str) - a path to the base directory all chrome registry
+# entries will be relative to
+# chrome_entries (list) - a list to which entries will be appended to
+#
+# Example:
+#
+# chrome_entries = {}
+# parse_manifest('./chrome.manifest', './', chrome_entries)
+#
+# chrome_entries == [
+# {
+# 'type': 'locale',
+# 'alias': 'devtools',
+# 'locale': 'pl',
+# 'platforms': null,
+# 'path': 'chrome/pl/locale/pl/devtools/'
+# },
+# {
+# 'type': 'locale',
+# 'alias': 'autoconfig',
+# 'locale': 'pl',
+# 'platforms': ['win', 'mac'],
+# 'path': 'chrome/pl/locale/pl/autoconfig/'
+# },
+# ]
+###
+def parse_chrome_manifest(path, base_path, chrome_entries):
+ for entry in parse_manifest(None, path):
+ if isinstance(entry, Manifest):
+ parse_chrome_manifest(
+ os.path.join(os.path.dirname(path), entry.relpath),
+ base_path,
+ chrome_entries,
+ )
+ elif isinstance(entry, ManifestLocale):
+ entry_path = os.path.join(
+ os.path.relpath(os.path.dirname(path), base_path), entry.relpath
+ )
+ chrome_entries.append(
+ {
+ "type": "locale",
+ "alias": entry.name,
+ "locale": entry.id,
+ "platforms": convert_entry_flags_to_platform_codes(entry.flags),
+ "path": mozpath.normsep(entry_path),
+ }
+ )
+ else:
+ raise Exception("Unknown type {0}".format(entry.name))
+
+
+###
+# Gets the version to use in the langpack.
+#
+# This uses the env variable MOZ_BUILD_DATE if it exists to expand the version
+# to be unique in automation.
+#
+# Args:
+# app_version - Application version
+#
+# Returns:
+# str - Version to use
+#
+###
+def get_version_maybe_buildid(app_version):
+ def _extract_numeric_part(part):
+ matches = re.compile("[^\d]").search(part)
+ if matches:
+ part = part[0 : matches.start()]
+ if len(part) == 0:
+ return "0"
+ return part
+
+ parts = [_extract_numeric_part(part) for part in app_version.split(".")]
+
+ buildid = os.environ.get("MOZ_BUILD_DATE")
+ if buildid and len(buildid) != 14:
+ print("Ignoring invalid MOZ_BUILD_DATE: %s" % buildid, file=sys.stderr)
+ buildid = None
+
+ if buildid:
+ # Use simple versioning format, see: Bug 1793925 - The version string
+ # should start with: <firefox major>.<firefox minor>
+ version = ".".join(parts[0:2])
+ # We then break the buildid into two version parts so that the full
+ # version looks like: <firefox major>.<firefox minor>.YYYYMMDD.HHmmss
+ date, time = buildid[:8], buildid[8:]
+ # Leading zeros are not allowed.
+ time = time.lstrip("0")
+ if len(time) == 0:
+ time = "0"
+ version = f"{version}.{date}.{time}"
+ else:
+ version = ".".join(parts)
+
+ return version
+
+
+###
+# Generates a new web manifest dict with values specific for a language pack.
+#
+# Args:
+# locstr (str) - A string with a comma separated list of locales
+# for which resources are embedded in the
+# language pack
+# min_app_ver (str) - A minimum version of the application the language
+# resources are for
+# max_app_ver (str) - A maximum version of the application the language
+# resources are for
+# app_name (str) - The name of the application the language
+# resources are for
+# ftl (dict) - A dictionary of locale-specific strings
+# chrome_entries (dict) - A dictionary of chrome registry entries
+#
+# Returns:
+# (dict) - a web manifest
+#
+# Example:
+# manifest = create_webmanifest(
+# 'pl',
+# '57.0',
+# '57.0.*',
+# 'Firefox',
+# '/var/vcs/l10n-central',
+# {'langpack-title': 'Polski'},
+# chrome_entries
+# )
+# manifest == {
+# 'languages': {
+# 'pl': {
+# 'version': '201709121481',
+# 'chrome_resources': {
+# 'alert': 'chrome/pl/locale/pl/alert/',
+# 'branding': 'browser/chrome/pl/locale/global/',
+# 'global-platform': {
+# 'macosx': 'chrome/pl/locale/pl/global-platform/mac/',
+# 'win': 'chrome/pl/locale/pl/global-platform/win/',
+# 'linux': 'chrome/pl/locale/pl/global-platform/unix/',
+# 'android': 'chrome/pl/locale/pl/global-platform/unix/',
+# },
+# 'forms': 'browser/chrome/pl/locale/forms/',
+# ...
+# }
+# }
+# },
+# 'sources': {
+# 'browser': {
+# 'base_path': 'browser/'
+# }
+# },
+# 'browser_specific_settings': {
+# 'gecko': {
+# 'strict_min_version': '57.0',
+# 'strict_max_version': '57.0.*',
+# 'id': 'langpack-pl@mozilla.org',
+# }
+# },
+# 'version': '57.0',
+# 'name': 'Polski Language Pack',
+# ...
+# }
+###
+def create_webmanifest(
+ locstr,
+ version,
+ min_app_ver,
+ max_app_ver,
+ app_name,
+ l10n_basedir,
+ langpack_eid,
+ ftl,
+ chrome_entries,
+):
+ locales = list(map(lambda loc: loc.strip(), locstr.split(",")))
+ main_locale = locales[0]
+ title, description = get_title_and_description(app_name, main_locale)
+ author = get_author(ftl)
+
+ manifest = {
+ "langpack_id": main_locale,
+ "manifest_version": 2,
+ "browser_specific_settings": {
+ "gecko": {
+ "id": langpack_eid,
+ "strict_min_version": min_app_ver,
+ "strict_max_version": max_app_ver,
+ }
+ },
+ "name": title,
+ "description": description,
+ "version": get_version_maybe_buildid(version),
+ "languages": {},
+ "sources": {"browser": {"base_path": "browser/"}},
+ "author": author,
+ }
+
+ cr = {}
+ for entry in chrome_entries:
+ if entry["type"] == "locale":
+ platforms = entry["platforms"]
+ if platforms:
+ if entry["alias"] not in cr:
+ cr[entry["alias"]] = {}
+ for platform in platforms:
+ cr[entry["alias"]][platform] = entry["path"]
+ else:
+ assert entry["alias"] not in cr
+ cr[entry["alias"]] = entry["path"]
+ else:
+ raise Exception("Unknown type {0}".format(entry["type"]))
+
+ for loc in locales:
+ manifest["languages"][loc] = {
+ "version": get_timestamp_for_locale(os.path.join(l10n_basedir, loc)),
+ "chrome_resources": cr,
+ }
+
+ return json.dumps(manifest, indent=2, ensure_ascii=False)
+
+
+def main(args):
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ "--locales", help="List of language codes provided by the langpack"
+ )
+ parser.add_argument("--app-version", help="Version of the application")
+ parser.add_argument(
+ "--max-app-ver", help="Max version of the application the langpack is for"
+ )
+ parser.add_argument(
+ "--app-name", help="Name of the application the langpack is for"
+ )
+ parser.add_argument(
+ "--l10n-basedir", help="Base directory for locales used in the language pack"
+ )
+ parser.add_argument(
+ "--langpack-eid", help="Language pack id to use for this locale"
+ )
+ parser.add_argument(
+ "--metadata",
+ help="FTL file defining langpack metadata",
+ )
+ parser.add_argument("--input", help="Langpack directory.")
+
+ args = parser.parse_args(args)
+
+ chrome_entries = []
+ parse_chrome_manifest(
+ os.path.join(args.input, "chrome.manifest"), args.input, chrome_entries
+ )
+
+ ftl = parse_flat_ftl(args.metadata)
+
+ # Mangle the app version to set min version (remove patch level)
+ min_app_version = args.app_version
+ if "a" not in min_app_version: # Don't mangle alpha versions
+ v = Version(min_app_version)
+ if args.app_name == "SeaMonkey":
+ # SeaMonkey is odd in that <major> hasn't changed for many years.
+ # So min is <major>.<minor>.0
+ min_app_version = "{}.{}.0".format(v.major, v.minor)
+ else:
+ # Language packs should be minversion of {major}.0
+ min_app_version = "{}.0".format(v.major)
+
+ res = create_webmanifest(
+ args.locales,
+ args.app_version,
+ min_app_version,
+ args.max_app_ver,
+ args.app_name,
+ args.l10n_basedir,
+ args.langpack_eid,
+ ftl,
+ chrome_entries,
+ )
+ write_file(os.path.join(args.input, "manifest.json"), res)
+
+
+if __name__ == "__main__":
+ main(sys.argv[1:])
diff --git a/python/mozbuild/mozbuild/action/make_dmg.py b/python/mozbuild/mozbuild/action/make_dmg.py
new file mode 100644
index 0000000000..6dc19450fb
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/make_dmg.py
@@ -0,0 +1,67 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import argparse
+import platform
+import sys
+from pathlib import Path
+
+from mozpack import dmg
+
+from mozbuild.bootstrap import bootstrap_toolchain
+from mozbuild.repackaging.application_ini import get_application_ini_value
+
+is_linux = platform.system() == "Linux"
+
+
+def main(args):
+ parser = argparse.ArgumentParser(
+ description="Explode a DMG into its relevant files"
+ )
+
+ parser.add_argument("--dsstore", help="DSStore file from")
+ parser.add_argument("--background", help="Background file from")
+ parser.add_argument("--icon", help="Icon file from")
+ parser.add_argument("--volume-name", help="Disk image volume name")
+
+ parser.add_argument("inpath", metavar="PATH_IN", help="Location of files to pack")
+ parser.add_argument("dmgfile", metavar="DMG_OUT", help="DMG File to create")
+
+ options = parser.parse_args(args)
+
+ extra_files = []
+ if options.dsstore:
+ extra_files.append((options.dsstore, ".DS_Store"))
+ if options.background:
+ extra_files.append((options.background, ".background/background.png"))
+ if options.icon:
+ extra_files.append((options.icon, ".VolumeIcon.icns"))
+
+ if options.volume_name:
+ volume_name = options.volume_name
+ else:
+ volume_name = get_application_ini_value(
+ options.inpath, "App", "CodeName", fallback="Name"
+ )
+
+ # Resolve required tools
+ dmg_tool = bootstrap_toolchain("dmg/dmg")
+ hfs_tool = bootstrap_toolchain("dmg/hfsplus")
+ mkfshfs_tool = bootstrap_toolchain("hfsplus/newfs_hfs")
+
+ dmg.create_dmg(
+ source_directory=Path(options.inpath),
+ output_dmg=Path(options.dmgfile),
+ volume_name=volume_name,
+ extra_files=extra_files,
+ dmg_tool=dmg_tool,
+ hfs_tool=hfs_tool,
+ mkfshfs_tool=mkfshfs_tool,
+ )
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main(sys.argv[1:]))
diff --git a/python/mozbuild/mozbuild/action/make_unzip.py b/python/mozbuild/mozbuild/action/make_unzip.py
new file mode 100644
index 0000000000..e4d2902f53
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/make_unzip.py
@@ -0,0 +1,25 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import subprocess
+import sys
+
+import buildconfig
+
+
+def make_unzip(package):
+ subprocess.check_call([buildconfig.substs["UNZIP"], package])
+
+
+def main(args):
+ if len(args) != 1:
+ print("Usage: make_unzip.py <package>", file=sys.stderr)
+ return 1
+ else:
+ make_unzip(args[0])
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main(sys.argv[1:]))
diff --git a/python/mozbuild/mozbuild/action/node.py b/python/mozbuild/mozbuild/action/node.py
new file mode 100644
index 0000000000..fca0745b80
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/node.py
@@ -0,0 +1,137 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import pipes
+import subprocess
+import sys
+
+import buildconfig
+import six
+
+SCRIPT_ALLOWLIST = [buildconfig.topsrcdir + "/devtools/client/shared/build/build.js"]
+
+ALLOWLIST_ERROR = """
+%s is not
+in SCRIPT_ALLOWLIST in python/mozbuild/mozbuild/action/node.py.
+Using NodeJS from moz.build is currently in beta, and node
+scripts to be executed need to be added to the allowlist and
+reviewed by a build peer so that we can get a better sense of
+how support should evolve. (To consult a build peer, raise a
+question in the #build channel at https://chat.mozilla.org.)
+"""
+
+
+def is_script_in_allowlist(script_path):
+ if script_path in SCRIPT_ALLOWLIST:
+ return True
+
+ return False
+
+
+def execute_node_cmd(node_cmd_list):
+ """Execute the given node command list.
+
+ Arguments:
+ node_cmd_list -- a list of the command and arguments to be executed
+
+ Returns:
+ The set of dependencies which should trigger this command to be re-run.
+ This is ultimately returned to the build system for use by the backend
+ to ensure that incremental rebuilds happen when any dependency changes.
+
+ The node script is expected to output lines for all of the dependencies
+ to stdout, each prefixed by the string "dep:". These lines will make up
+ the returned set of dependencies. Any line not so-prefixed will simply be
+ printed to stderr instead.
+ """
+
+ try:
+ printable_cmd = " ".join(pipes.quote(arg) for arg in node_cmd_list)
+ print('Executing "{}"'.format(printable_cmd), file=sys.stderr)
+ sys.stderr.flush()
+
+ # We need to redirect stderr to a pipe because
+ # https://github.com/nodejs/node/issues/14752 causes issues with make.
+ proc = subprocess.Popen(
+ node_cmd_list, stdout=subprocess.PIPE, stderr=subprocess.PIPE
+ )
+
+ stdout, stderr = proc.communicate()
+ retcode = proc.wait()
+
+ if retcode != 0:
+ print(stderr, file=sys.stderr)
+ sys.stderr.flush()
+ sys.exit(retcode)
+
+ # Process the node script output
+ #
+ # XXX Starting with an empty list means that node scripts can
+ # (intentionally or inadvertently) remove deps. Do we want this?
+ deps = []
+ for line in stdout.splitlines():
+ line = six.ensure_text(line)
+ if "dep:" in line:
+ deps.append(line.replace("dep:", ""))
+ else:
+ print(line, file=sys.stderr)
+ sys.stderr.flush()
+
+ return set(deps)
+
+ except subprocess.CalledProcessError as err:
+ # XXX On Mac (and elsewhere?) "OSError: [Errno 13] Permission denied"
+ # (at least sometimes) means "node executable not found". Can we
+ # disambiguate this from real "Permission denied" errors so that we
+ # can log such problems more clearly?
+ print(
+ """Failed with %s. Be sure to check that your mozconfig doesn't
+ have --disable-nodejs in it. If it does, try removing that line and
+ building again."""
+ % str(err),
+ file=sys.stderr,
+ )
+ sys.exit(1)
+
+
+def generate(output, node_script, *files):
+ """Call the given node_script to transform the given modules.
+
+ Arguments:
+ output -- a dummy file, used by the build system. Can be ignored.
+ node_script -- the script to be executed. Must be in the SCRIPT_ALLOWLIST
+ files -- files to be transformed, will be passed to the script as arguments
+
+ Returns:
+ The set of dependencies which should trigger this command to be re-run.
+ This is ultimately returned to the build system for use by the backend
+ to ensure that incremental rebuilds happen when any dependency changes.
+ """
+
+ node_interpreter = buildconfig.substs.get("NODEJS")
+ if not node_interpreter:
+ print(
+ """NODEJS not set. Be sure to check that your mozconfig doesn't
+ have --disable-nodejs in it. If it does, try removing that line
+ and building again.""",
+ file=sys.stderr,
+ )
+ sys.exit(1)
+
+ node_script = six.ensure_text(node_script)
+ if not isinstance(node_script, six.text_type):
+ print(
+ "moz.build file didn't pass a valid node script name to execute",
+ file=sys.stderr,
+ )
+ sys.exit(1)
+
+ if not is_script_in_allowlist(node_script):
+ print(ALLOWLIST_ERROR % (node_script), file=sys.stderr)
+ sys.exit(1)
+
+ node_cmd_list = [node_interpreter, node_script]
+ node_cmd_list.extend(files)
+
+ return execute_node_cmd(node_cmd_list)
diff --git a/python/mozbuild/mozbuild/action/package_generated_sources.py b/python/mozbuild/mozbuild/action/package_generated_sources.py
new file mode 100644
index 0000000000..d87a75fc6f
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/package_generated_sources.py
@@ -0,0 +1,42 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import argparse
+import sys
+
+import buildconfig
+import mozpack.path as mozpath
+from mozpack.archive import create_tar_gz_from_files
+from mozpack.files import BaseFile
+
+from mozbuild.generated_sources import get_generated_sources
+
+
+def main(argv):
+ parser = argparse.ArgumentParser(description="Produce archive of generated sources")
+ parser.add_argument("outputfile", help="File to write output to")
+ args = parser.parse_args(argv)
+
+ objdir_abspath = mozpath.abspath(buildconfig.topobjdir)
+
+ def is_valid_entry(entry):
+ if isinstance(entry[1], BaseFile):
+ entry_abspath = mozpath.abspath(entry[1].path)
+ else:
+ entry_abspath = mozpath.abspath(entry[1])
+ if not entry_abspath.startswith(objdir_abspath):
+ print(
+ "Warning: omitting generated source [%s] from archive" % entry_abspath,
+ file=sys.stderr,
+ )
+ return False
+ return True
+
+ files = dict(filter(is_valid_entry, get_generated_sources()))
+ with open(args.outputfile, "wb") as fh:
+ create_tar_gz_from_files(fh, files, compresslevel=5)
+
+
+if __name__ == "__main__":
+ sys.exit(main(sys.argv[1:]))
diff --git a/python/mozbuild/mozbuild/action/preprocessor.py b/python/mozbuild/mozbuild/action/preprocessor.py
new file mode 100644
index 0000000000..c59a05a90b
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/preprocessor.py
@@ -0,0 +1,24 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import sys
+
+from mozbuild.action.util import log_build_task
+from mozbuild.preprocessor import Preprocessor
+
+
+def generate(output, *args):
+ pp = Preprocessor()
+ pp.out = output
+ pp.handleCommandLine(list(args), True)
+ return set(pp.includes)
+
+
+def main(args):
+ pp = Preprocessor()
+ pp.handleCommandLine(args, True)
+
+
+if __name__ == "__main__":
+ log_build_task(main, sys.argv[1:])
diff --git a/python/mozbuild/mozbuild/action/process_define_files.py b/python/mozbuild/mozbuild/action/process_define_files.py
new file mode 100644
index 0000000000..d775b52b57
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/process_define_files.py
@@ -0,0 +1,115 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import argparse
+import os
+import re
+import sys
+
+import mozpack.path as mozpath
+from buildconfig import topobjdir, topsrcdir
+
+from mozbuild.backend.configenvironment import PartialConfigEnvironment
+
+
+def process_define_file(output, input):
+ """Creates the given config header. A config header is generated by
+ taking the corresponding source file and replacing some *#define/#undef*
+ occurences:
+
+ - "#undef NAME" is turned into "#define NAME VALUE"
+ - "#define NAME" is unchanged
+ - "#define NAME ORIGINAL_VALUE" is turned into "#define NAME VALUE"
+ - "#undef UNKNOWN_NAME" is turned into "/* #undef UNKNOWN_NAME */"
+ - Whitespaces are preserved.
+
+ As a special rule, "#undef ALLDEFINES" is turned into "#define NAME
+ VALUE" for all the defined variables.
+ """
+
+ path = os.path.abspath(input)
+
+ config = PartialConfigEnvironment(topobjdir)
+
+ if mozpath.basedir(
+ path, [mozpath.join(topsrcdir, "js/src")]
+ ) and not config.substs.get("JS_STANDALONE"):
+ config = PartialConfigEnvironment(mozpath.join(topobjdir, "js", "src"))
+
+ with open(path, "r") as input:
+ r = re.compile(
+ "^\s*#\s*(?P<cmd>[a-z]+)(?:\s+(?P<name>\S+)(?:\s+(?P<value>\S+))?)?", re.U
+ )
+ for l in input:
+ m = r.match(l)
+ if m:
+ cmd = m.group("cmd")
+ name = m.group("name")
+ value = m.group("value")
+ if name:
+ if name == "ALLDEFINES":
+ if cmd == "define":
+ raise Exception(
+ "`#define ALLDEFINES` is not allowed in a "
+ "CONFIGURE_DEFINE_FILE"
+ )
+
+ def define_for_name(name, val):
+ """WebRTC files like to define WINVER and _WIN32_WINNT
+ via the command line, which raises a mass of macro
+ redefinition warnings. Just handle those macros
+ specially here."""
+ define = "#define {name} {val}".format(name=name, val=val)
+ if name in ("_WIN32_IE", "_WIN32_WINNT", "WIN32", "WINVER"):
+ return "#if !defined({name})\n{define}\n#endif".format(
+ name=name, define=define
+ )
+ return define
+
+ defines = "\n".join(
+ sorted(
+ define_for_name(name, val)
+ for name, val in config.defines["ALLDEFINES"].items()
+ )
+ )
+ l = l[: m.start("cmd") - 1] + defines + l[m.end("name") :]
+ elif cmd == "define":
+ if value and name in config.defines:
+ l = (
+ l[: m.start("value")]
+ + str(config.defines[name])
+ + l[m.end("value") :]
+ )
+ elif cmd == "undef":
+ if name in config.defines:
+ l = (
+ l[: m.start("cmd")]
+ + "define"
+ + l[m.end("cmd") : m.end("name")]
+ + " "
+ + str(config.defines[name])
+ + l[m.end("name") :]
+ )
+ else:
+ l = "/* " + l[: m.end("name")] + " */" + l[m.end("name") :]
+
+ output.write(l)
+
+ deps = {path}
+ deps.update(config.get_dependencies())
+ return deps
+
+
+def main(argv):
+ parser = argparse.ArgumentParser(description="Process define files.")
+
+ parser.add_argument("input", help="Input define file.")
+
+ args = parser.parse_args(argv)
+
+ return process_define_file(sys.stdout, args.input)
+
+
+if __name__ == "__main__":
+ sys.exit(main(sys.argv[1:]))
diff --git a/python/mozbuild/mozbuild/action/process_install_manifest.py b/python/mozbuild/mozbuild/action/process_install_manifest.py
new file mode 100644
index 0000000000..faf1376dba
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/process_install_manifest.py
@@ -0,0 +1,125 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import argparse
+import os
+import sys
+import time
+
+from mozpack.copier import FileCopier, FileRegistry
+from mozpack.errors import errors
+from mozpack.files import BaseFile, FileFinder
+from mozpack.manifests import InstallManifest
+
+from mozbuild.action.util import log_build_task
+from mozbuild.util import DefinesAction
+
+COMPLETE = (
+ "Elapsed: {elapsed:.2f}s; From {dest}: Kept {existing} existing; "
+ "Added/updated {updated}; "
+ "Removed {rm_files} files and {rm_dirs} directories."
+)
+
+
+def process_manifest(destdir, paths, track, no_symlinks=False, defines={}):
+
+ if os.path.exists(track):
+ # We use the same format as install manifests for the tracking
+ # data.
+ manifest = InstallManifest(path=track)
+ remove_unaccounted = FileRegistry()
+ dummy_file = BaseFile()
+
+ finder = FileFinder(destdir, find_dotfiles=True)
+ for dest in manifest._dests:
+ for p, f in finder.find(dest):
+ remove_unaccounted.add(p, dummy_file)
+
+ remove_empty_directories = True
+ remove_all_directory_symlinks = True
+
+ else:
+ # If tracking is enabled and there is no file, we don't want to
+ # be removing anything.
+ remove_unaccounted = False
+ remove_empty_directories = False
+ remove_all_directory_symlinks = False
+
+ manifest = InstallManifest()
+ for path in paths:
+ manifest |= InstallManifest(path=path)
+
+ copier = FileCopier()
+ link_policy = "copy" if no_symlinks else "symlink"
+ manifest.populate_registry(
+ copier, defines_override=defines, link_policy=link_policy
+ )
+ with errors.accumulate():
+ result = copier.copy(
+ destdir,
+ remove_unaccounted=remove_unaccounted,
+ remove_all_directory_symlinks=remove_all_directory_symlinks,
+ remove_empty_directories=remove_empty_directories,
+ )
+
+ if track:
+ # We should record files that we actually copied.
+ # It is too late to expand wildcards when the track file is read.
+ manifest.write(path=track, expand_pattern=True)
+
+ return result
+
+
+def main(argv):
+ parser = argparse.ArgumentParser(description="Process install manifest files.")
+
+ parser.add_argument("destdir", help="Destination directory.")
+ parser.add_argument("manifests", nargs="+", help="Path to manifest file(s).")
+ parser.add_argument(
+ "--no-symlinks",
+ action="store_true",
+ help="Do not install symbolic links. Always copy files",
+ )
+ parser.add_argument(
+ "--track",
+ metavar="PATH",
+ required=True,
+ help="Use installed files tracking information from the given path.",
+ )
+ parser.add_argument(
+ "-D",
+ action=DefinesAction,
+ dest="defines",
+ metavar="VAR[=VAL]",
+ help="Define a variable to override what is specified in the manifest",
+ )
+
+ args = parser.parse_args(argv)
+
+ start = time.monotonic()
+
+ result = process_manifest(
+ args.destdir,
+ args.manifests,
+ track=args.track,
+ no_symlinks=args.no_symlinks,
+ defines=args.defines,
+ )
+
+ elapsed = time.monotonic() - start
+
+ print(
+ COMPLETE.format(
+ elapsed=elapsed,
+ dest=args.destdir,
+ existing=result.existing_files_count,
+ updated=result.updated_files_count,
+ rm_files=result.removed_files_count,
+ rm_dirs=result.removed_directories_count,
+ )
+ )
+
+
+if __name__ == "__main__":
+ log_build_task(main, sys.argv[1:])
diff --git a/python/mozbuild/mozbuild/action/symbols_archive.py b/python/mozbuild/mozbuild/action/symbols_archive.py
new file mode 100644
index 0000000000..75ecb71d17
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/symbols_archive.py
@@ -0,0 +1,89 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import argparse
+import os
+import sys
+
+import mozpack.path as mozpath
+from mozpack.files import FileFinder
+
+
+def make_archive(archive_name, base, exclude, include):
+ compress = ["**/*.sym"]
+ finder = FileFinder(base, ignore=exclude)
+ if not include:
+ include = ["*"]
+ archive_basename = os.path.basename(archive_name)
+
+ def fill_archive(add_file):
+ for pat in include:
+ for p, f in finder.find(pat):
+ print(' Adding to "%s":\n\t"%s"' % (archive_basename, p))
+ add_file(p, f)
+
+ with open(archive_name, "wb") as fh:
+ if archive_basename.endswith(".zip"):
+ from mozpack.mozjar import JarWriter
+
+ with JarWriter(fileobj=fh, compress_level=5) as writer:
+
+ def add_file(p, f):
+ should_compress = any(mozpath.match(p, pat) for pat in compress)
+ writer.add(
+ p.encode("utf-8"),
+ f,
+ mode=f.mode,
+ compress=should_compress,
+ skip_duplicates=True,
+ )
+
+ fill_archive(add_file)
+ elif archive_basename.endswith(".tar.zst"):
+ import tarfile
+
+ import zstandard
+
+ ctx = zstandard.ZstdCompressor(threads=-1)
+ with ctx.stream_writer(fh) as zstdwriter:
+ with tarfile.open(
+ mode="w|", fileobj=zstdwriter, bufsize=1024 * 1024
+ ) as tar:
+
+ def add_file(p, f):
+ info = tar.gettarinfo(os.path.join(base, p), p)
+ tar.addfile(info, f.open())
+
+ fill_archive(add_file)
+ else:
+ raise Exception(
+ "Unsupported archive format for {}".format(archive_basename)
+ )
+
+
+def main(argv):
+ parser = argparse.ArgumentParser(description="Produce a symbols archive")
+ parser.add_argument("archive", help="Which archive to generate")
+ parser.add_argument("base", help="Base directory to package")
+ parser.add_argument(
+ "--full-archive", action="store_true", help="Generate a full symbol archive"
+ )
+
+ args = parser.parse_args(argv)
+
+ excludes = []
+ includes = []
+
+ if args.full_archive:
+ # We allow symbols for tests to be included when building on try
+ if os.environ.get("MH_BRANCH", "unknown") != "try":
+ excludes = ["*test*", "*Test*"]
+ else:
+ includes = ["**/*.sym"]
+
+ make_archive(args.archive, args.base, excludes, includes)
+
+
+if __name__ == "__main__":
+ main(sys.argv[1:])
diff --git a/python/mozbuild/mozbuild/action/test_archive.py b/python/mozbuild/mozbuild/action/test_archive.py
new file mode 100644
index 0000000000..06fef60f8d
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/test_archive.py
@@ -0,0 +1,875 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This action is used to produce test archives.
+#
+# Ideally, the data in this file should be defined in moz.build files.
+# It is defined inline because this was easiest to make test archive
+# generation faster.
+
+import argparse
+import itertools
+import os
+import sys
+import time
+
+import buildconfig
+import mozpack.path as mozpath
+from manifestparser import TestManifest
+from mozpack.archive import create_tar_gz_from_files
+from mozpack.copier import FileRegistry
+from mozpack.files import ExistingFile, FileFinder
+from mozpack.manifests import InstallManifest
+from mozpack.mozjar import JarWriter
+from reftest import ReftestManifest
+
+from mozbuild.util import ensureParentDir
+
+STAGE = mozpath.join(buildconfig.topobjdir, "dist", "test-stage")
+
+TEST_HARNESS_BINS = [
+ "BadCertAndPinningServer",
+ "DelegatedCredentialsServer",
+ "EncryptedClientHelloServer",
+ "FaultyServer",
+ "GenerateOCSPResponse",
+ "OCSPStaplingServer",
+ "SanctionsTestServer",
+ "SmokeDMD",
+ "certutil",
+ "crashinject",
+ "geckodriver",
+ "http3server",
+ "minidumpwriter",
+ "pk12util",
+ "screenshot",
+ "screentopng",
+ "ssltunnel",
+ "xpcshell",
+]
+
+TEST_HARNESS_DLLS = ["crashinjectdll", "mozglue"]
+
+GMP_TEST_PLUGIN_DIRS = ["gmp-fake/**", "gmp-fakeopenh264/**"]
+
+# These entries will be used by artifact builds to re-construct an
+# objdir with the appropriate generated support files.
+OBJDIR_TEST_FILES = {
+ "xpcshell": {
+ "source": buildconfig.topobjdir,
+ "base": "_tests/xpcshell",
+ "pattern": "**",
+ "dest": "xpcshell/tests",
+ },
+ "mochitest": {
+ "source": buildconfig.topobjdir,
+ "base": "_tests/testing",
+ "pattern": "mochitest/**",
+ },
+}
+
+
+ARCHIVE_FILES = {
+ "common": [
+ {
+ "source": STAGE,
+ "base": "",
+ "pattern": "**",
+ "ignore": [
+ "cppunittest/**",
+ "condprof/**",
+ "gtest/**",
+ "mochitest/**",
+ "reftest/**",
+ "talos/**",
+ "raptor/**",
+ "awsy/**",
+ "web-platform/**",
+ "xpcshell/**",
+ "updater-dep/**",
+ "jsreftest/**",
+ "jit-test/**",
+ "jittest/**", # To make the ignore checker happy
+ "perftests/**",
+ "fuzztest/**",
+ ],
+ },
+ {"source": buildconfig.topobjdir, "base": "_tests", "pattern": "modules/**"},
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "testing/marionette",
+ "patterns": ["client/**", "harness/**", "mach_test_package_commands.py"],
+ "dest": "marionette",
+ "ignore": ["client/docs", "harness/marionette_harness/tests"],
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "",
+ "manifests": [
+ "testing/marionette/harness/marionette_harness/tests/unit-tests.ini"
+ ],
+ # We also need the manifests and harness_unit tests
+ "pattern": "testing/marionette/harness/marionette_harness/tests/**",
+ "dest": "marionette/tests",
+ },
+ {"source": buildconfig.topobjdir, "base": "_tests", "pattern": "mozbase/**"},
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "testing",
+ "pattern": "firefox-ui/**",
+ "ignore": ["firefox-ui/tests"],
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "",
+ "pattern": "testing/firefox-ui/tests",
+ "dest": "firefox-ui/tests",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "toolkit/components/telemetry/tests/marionette",
+ "pattern": "/**",
+ "dest": "telemetry/marionette",
+ },
+ {"source": buildconfig.topsrcdir, "base": "testing", "pattern": "tps/**"},
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "services/sync/",
+ "pattern": "tps/**",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "services/sync/tests/tps",
+ "pattern": "**",
+ "dest": "tps/tests",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "testing/web-platform/tests/tools/wptserve",
+ "pattern": "**",
+ "dest": "tools/wptserve",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "testing/web-platform/tests/tools/third_party",
+ "pattern": "**",
+ "dest": "tools/wpt_third_party",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "python/mozterm",
+ "pattern": "**",
+ "dest": "tools/mozterm",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "xpcom/geckoprocesstypes_generator",
+ "pattern": "**",
+ "dest": "tools/geckoprocesstypes_generator",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "third_party/python/six",
+ "pattern": "**",
+ "dest": "tools/six",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "third_party/python/distro",
+ "pattern": "**",
+ "dest": "tools/distro",
+ },
+ {"source": buildconfig.topobjdir, "base": "", "pattern": "mozinfo.json"},
+ {
+ "source": buildconfig.topobjdir,
+ "base": "dist/bin",
+ "patterns": [
+ "%s%s" % (f, buildconfig.substs["BIN_SUFFIX"])
+ for f in TEST_HARNESS_BINS
+ ]
+ + [
+ "%s%s%s"
+ % (
+ buildconfig.substs["DLL_PREFIX"],
+ f,
+ buildconfig.substs["DLL_SUFFIX"],
+ )
+ for f in TEST_HARNESS_DLLS
+ ],
+ "dest": "bin",
+ },
+ {
+ "source": buildconfig.topobjdir,
+ "base": "dist/bin",
+ "patterns": GMP_TEST_PLUGIN_DIRS,
+ "dest": "bin/plugins",
+ },
+ {
+ "source": buildconfig.topobjdir,
+ "base": "dist/bin",
+ "patterns": ["dmd.py", "fix_stacks.py"],
+ "dest": "bin",
+ },
+ {
+ "source": buildconfig.topobjdir,
+ "base": "dist/bin/components",
+ "patterns": ["httpd.js"],
+ "dest": "bin/components",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "build/pgo/certs",
+ "pattern": "**",
+ "dest": "certs",
+ },
+ ],
+ "cppunittest": [
+ {"source": STAGE, "base": "", "pattern": "cppunittest/**"},
+ # We don't ship these files if startup cache is disabled, which is
+ # rare. But it shouldn't matter for test archives.
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "startupcache/test",
+ "pattern": "TestStartupCacheTelemetry.*",
+ "dest": "cppunittest",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "testing",
+ "pattern": "runcppunittests.py",
+ "dest": "cppunittest",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "testing",
+ "pattern": "remotecppunittests.py",
+ "dest": "cppunittest",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "testing",
+ "pattern": "cppunittest.ini",
+ "dest": "cppunittest",
+ },
+ {
+ "source": buildconfig.topobjdir,
+ "base": "",
+ "pattern": "mozinfo.json",
+ "dest": "cppunittest",
+ },
+ ],
+ "gtest": [{"source": STAGE, "base": "", "pattern": "gtest/**"}],
+ "mochitest": [
+ OBJDIR_TEST_FILES["mochitest"],
+ {
+ "source": buildconfig.topobjdir,
+ "base": "_tests/testing",
+ "pattern": "mochitest/**",
+ },
+ {"source": STAGE, "base": "", "pattern": "mochitest/**"},
+ {
+ "source": buildconfig.topobjdir,
+ "base": "",
+ "pattern": "mozinfo.json",
+ "dest": "mochitest",
+ },
+ {
+ "source": buildconfig.topobjdir,
+ "base": "dist/xpi-stage",
+ "pattern": "mochijar/**",
+ "dest": "mochitest",
+ },
+ {
+ "source": buildconfig.topobjdir,
+ "base": "dist/xpi-stage",
+ "pattern": "specialpowers/**",
+ "dest": "mochitest/extensions",
+ },
+ ],
+ "mozharness": [
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "testing/mozharness",
+ "pattern": "**",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "",
+ "pattern": "third_party/python/_venv/**",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "testing/mozbase/manifestparser",
+ "pattern": "manifestparser/**",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "testing/mozbase/mozfile",
+ "pattern": "mozfile/**",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "testing/mozbase/mozinfo",
+ "pattern": "mozinfo/**",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "testing/mozbase/mozprocess",
+ "pattern": "mozprocess/**",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "third_party/python/six",
+ "pattern": "six.py",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "third_party/python/distro",
+ "pattern": "distro.py",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "third_party/python/packaging",
+ "pattern": "**",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "python/mozbuild/mozbuild/action",
+ "pattern": "tooltool.py",
+ "dest": "external_tools",
+ },
+ ],
+ "reftest": [
+ {"source": buildconfig.topobjdir, "base": "_tests", "pattern": "reftest/**"},
+ {
+ "source": buildconfig.topobjdir,
+ "base": "",
+ "pattern": "mozinfo.json",
+ "dest": "reftest",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "",
+ "manifests": [
+ "layout/reftests/reftest.list",
+ "layout/reftests/reftest-qr.list",
+ "testing/crashtest/crashtests.list",
+ ],
+ "dest": "reftest/tests",
+ },
+ {
+ "source": buildconfig.topobjdir,
+ "base": "dist/xpi-stage",
+ "pattern": "reftest/**",
+ "dest": "reftest",
+ },
+ {
+ "source": buildconfig.topobjdir,
+ "base": "dist/xpi-stage",
+ "pattern": "specialpowers/**",
+ "dest": "reftest",
+ },
+ ],
+ "talos": [
+ {"source": buildconfig.topsrcdir, "base": "testing", "pattern": "talos/**"},
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "testing/profiles",
+ "pattern": "**",
+ "dest": "talos/talos/profile_data",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "third_party/webkit/PerformanceTests",
+ "pattern": "**",
+ "dest": "talos/talos/tests/webkit/PerformanceTests/",
+ },
+ ],
+ "perftests": [
+ {"source": buildconfig.topsrcdir, "pattern": "testing/mozbase/**"},
+ {"source": buildconfig.topsrcdir, "pattern": "testing/condprofile/**"},
+ {"source": buildconfig.topsrcdir, "pattern": "testing/performance/**"},
+ {"source": buildconfig.topsrcdir, "pattern": "third_party/python/**"},
+ {"source": buildconfig.topsrcdir, "pattern": "tools/lint/eslint/**"},
+ {"source": buildconfig.topsrcdir, "pattern": "**/perftest_*.js"},
+ {"source": buildconfig.topsrcdir, "pattern": "**/hooks_*py"},
+ {"source": buildconfig.topsrcdir, "pattern": "build/autoconf/**"},
+ {"source": buildconfig.topsrcdir, "pattern": "build/moz.configure/**"},
+ {"source": buildconfig.topsrcdir, "pattern": "python/**"},
+ {"source": buildconfig.topsrcdir, "pattern": "build/mach_initialize.py"},
+ {
+ "source": buildconfig.topsrcdir,
+ "pattern": "python/sites/build.txt",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "pattern": "python/sites/common.txt",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "pattern": "python/sites/mach.txt",
+ },
+ {"source": buildconfig.topsrcdir, "pattern": "mach/**"},
+ {
+ "source": buildconfig.topsrcdir,
+ "pattern": "testing/web-platform/tests/tools/third_party/certifi/**",
+ },
+ {"source": buildconfig.topsrcdir, "pattern": "testing/mozharness/**"},
+ {"source": buildconfig.topsrcdir, "pattern": "browser/config/**"},
+ {
+ "source": buildconfig.topobjdir,
+ "base": "_tests/modules",
+ "pattern": "**",
+ "dest": "bin/modules",
+ },
+ {
+ "source": buildconfig.topobjdir,
+ "base": "dist/bin",
+ "patterns": [
+ "browser/**",
+ "chrome/**",
+ "chrome.manifest",
+ "components/**",
+ "http3server",
+ "*.ini",
+ "localization/**",
+ "modules/**",
+ "update.locale",
+ "greprefs.js",
+ ],
+ "dest": "bin",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "netwerk/test/http3serverDB",
+ "pattern": "**",
+ "dest": "netwerk/test/http3serverDB",
+ },
+ ],
+ "condprof": [
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "testing",
+ "pattern": "condprofile/**",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "testing/mozbase/mozfile",
+ "pattern": "**",
+ "dest": "condprofile/mozfile",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "testing/mozbase/mozprofile",
+ "pattern": "**",
+ "dest": "condprofile/mozprofile",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "testing/mozbase/mozdevice",
+ "pattern": "**",
+ "dest": "condprofile/mozdevice",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "testing/mozbase/mozlog",
+ "pattern": "**",
+ "dest": "condprofile/mozlog",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "third_party/python/virtualenv",
+ "pattern": "**",
+ "dest": "condprofile/virtualenv",
+ },
+ ],
+ "raptor": [
+ {"source": buildconfig.topsrcdir, "base": "testing", "pattern": "raptor/**"},
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "testing/profiles",
+ "pattern": "**",
+ "dest": "raptor/raptor/profile_data",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "third_party/webkit/PerformanceTests",
+ "pattern": "**",
+ "dest": "raptor/raptor/tests/webkit/PerformanceTests/",
+ },
+ ],
+ "awsy": [
+ {"source": buildconfig.topsrcdir, "base": "testing", "pattern": "awsy/**"}
+ ],
+ "web-platform": [
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "testing",
+ "pattern": "web-platform/meta/**",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "testing",
+ "pattern": "web-platform/mozilla/**",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "testing",
+ "pattern": "web-platform/tests/**",
+ "ignore": ["web-platform/tests/tools/wpt_third_party"],
+ },
+ {
+ "source": buildconfig.topobjdir,
+ "base": "_tests",
+ "pattern": "web-platform/**",
+ },
+ {
+ "source": buildconfig.topobjdir,
+ "base": "",
+ "pattern": "mozinfo.json",
+ "dest": "web-platform",
+ },
+ ],
+ "xpcshell": [
+ OBJDIR_TEST_FILES["xpcshell"],
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "testing/xpcshell",
+ "patterns": [
+ "head.js",
+ "mach_test_package_commands.py",
+ "moz-http2/**",
+ "node-http2/**",
+ "node_ip/**",
+ "node-ws/**",
+ "dns-packet/**",
+ "remotexpcshelltests.py",
+ "runxpcshelltests.py",
+ "selftest.py",
+ "xpcshellcommandline.py",
+ ],
+ "dest": "xpcshell",
+ },
+ {"source": STAGE, "base": "", "pattern": "xpcshell/**"},
+ {
+ "source": buildconfig.topobjdir,
+ "base": "",
+ "pattern": "mozinfo.json",
+ "dest": "xpcshell",
+ },
+ {
+ "source": buildconfig.topobjdir,
+ "base": "build",
+ "pattern": "automation.py",
+ "dest": "xpcshell",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "testing/profiles",
+ "pattern": "**",
+ "dest": "xpcshell/profile_data",
+ },
+ {
+ "source": buildconfig.topobjdir,
+ "base": "dist/bin",
+ "pattern": "http3server%s" % buildconfig.substs["BIN_SUFFIX"],
+ "dest": "xpcshell/http3server",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "netwerk/test/http3serverDB",
+ "pattern": "**",
+ "dest": "xpcshell/http3server/http3serverDB",
+ },
+ ],
+ "updater-dep": [
+ {
+ "source": buildconfig.topobjdir,
+ "base": "_tests/updater-dep",
+ "pattern": "**",
+ "dest": "updater-dep",
+ },
+ # Required by the updater on Linux
+ {
+ "source": buildconfig.topobjdir,
+ "base": "config/external/sqlite",
+ "pattern": "libmozsqlite3.so",
+ "dest": "updater-dep",
+ },
+ ],
+ "jsreftest": [{"source": STAGE, "base": "", "pattern": "jsreftest/**"}],
+ "fuzztest": [
+ {"source": buildconfig.topsrcdir, "pattern": "tools/fuzzing/smoke/**"}
+ ],
+ "jittest": [
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "js/src",
+ "pattern": "jit-test/**",
+ "dest": "jit-test",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "js/src/tests",
+ "pattern": "non262/shell.js",
+ "dest": "jit-test/tests",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "js/src/tests",
+ "pattern": "non262/Math/shell.js",
+ "dest": "jit-test/tests",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "js/src/tests",
+ "pattern": "non262/reflect-parse/Match.js",
+ "dest": "jit-test/tests",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "js/src/tests",
+ "pattern": "lib/**",
+ "dest": "jit-test/tests",
+ },
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "js/src",
+ "pattern": "jsapi.h",
+ "dest": "jit-test",
+ },
+ ],
+}
+
+if buildconfig.substs.get("MOZ_CODE_COVERAGE"):
+ ARCHIVE_FILES["common"].append(
+ {
+ "source": buildconfig.topsrcdir,
+ "base": "python/mozbuild/",
+ "patterns": ["mozpack/**", "mozbuild/codecoverage/**"],
+ }
+ )
+
+
+if buildconfig.substs.get("MOZ_ASAN") and buildconfig.substs.get("CLANG_CL"):
+ asan_dll = {
+ "source": buildconfig.topobjdir,
+ "base": "dist/bin",
+ "pattern": os.path.basename(buildconfig.substs["MOZ_CLANG_RT_ASAN_LIB_PATH"]),
+ "dest": "bin",
+ }
+ ARCHIVE_FILES["common"].append(asan_dll)
+
+
+if buildconfig.substs.get("commtopsrcdir"):
+ commtopsrcdir = buildconfig.substs.get("commtopsrcdir")
+ mozharness_comm = {
+ "source": commtopsrcdir,
+ "base": "testing/mozharness",
+ "pattern": "**",
+ }
+ ARCHIVE_FILES["mozharness"].append(mozharness_comm)
+ marionette_comm = {
+ "source": commtopsrcdir,
+ "base": "",
+ "manifest": "testing/marionette/unit-tests.ini",
+ "dest": "marionette/tests/comm",
+ }
+ ARCHIVE_FILES["common"].append(marionette_comm)
+ thunderbirdinstance = {
+ "source": commtopsrcdir,
+ "base": "testing/marionette",
+ "pattern": "thunderbirdinstance.py",
+ "dest": "marionette/client/marionette_driver",
+ }
+ ARCHIVE_FILES["common"].append(thunderbirdinstance)
+
+
+# "common" is our catch all archive and it ignores things from other archives.
+# Verify nothing sneaks into ARCHIVE_FILES without a corresponding exclusion
+# rule in the "common" archive.
+for k, v in ARCHIVE_FILES.items():
+ # Skip mozharness because it isn't staged.
+ if k in ("common", "mozharness"):
+ continue
+
+ ignores = set(
+ itertools.chain(*(e.get("ignore", []) for e in ARCHIVE_FILES["common"]))
+ )
+
+ if not any(p.startswith("%s/" % k) for p in ignores):
+ raise Exception('"common" ignore list probably should contain %s' % k)
+
+
+def find_generated_harness_files():
+ # TEST_HARNESS_FILES end up in an install manifest at
+ # $topsrcdir/_build_manifests/install/_tests.
+ manifest = InstallManifest(
+ mozpath.join(buildconfig.topobjdir, "_build_manifests", "install", "_tests")
+ )
+ registry = FileRegistry()
+ manifest.populate_registry(registry)
+ # Conveniently, the generated files we care about will already
+ # exist in the objdir, so we can identify relevant files if
+ # they're an `ExistingFile` instance.
+ return [
+ mozpath.join("_tests", p)
+ for p in registry.paths()
+ if isinstance(registry[p], ExistingFile)
+ ]
+
+
+def find_files(archive):
+ extra_entries = []
+ generated_harness_files = find_generated_harness_files()
+
+ if archive == "common":
+ # Construct entries ensuring all our generated harness files are
+ # packaged in the common tests archive.
+ packaged_paths = set()
+ for entry in OBJDIR_TEST_FILES.values():
+ pat = mozpath.join(entry["base"], entry["pattern"])
+ del entry["pattern"]
+ patterns = []
+ for path in generated_harness_files:
+ if mozpath.match(path, pat):
+ patterns.append(path[len(entry["base"]) + 1 :])
+ packaged_paths.add(path)
+ if patterns:
+ entry["patterns"] = patterns
+ extra_entries.append(entry)
+ entry = {"source": buildconfig.topobjdir, "base": "_tests", "patterns": []}
+ for path in set(generated_harness_files) - packaged_paths:
+ entry["patterns"].append(path[len("_tests") + 1 :])
+ extra_entries.append(entry)
+
+ for entry in ARCHIVE_FILES[archive] + extra_entries:
+ source = entry["source"]
+ dest = entry.get("dest")
+ base = entry.get("base", "")
+
+ pattern = entry.get("pattern")
+ patterns = entry.get("patterns", [])
+ if pattern:
+ patterns.append(pattern)
+
+ manifest = entry.get("manifest")
+ manifests = entry.get("manifests", [])
+ if manifest:
+ manifests.append(manifest)
+ if manifests:
+ dirs = find_manifest_dirs(os.path.join(source, base), manifests)
+ patterns.extend({"{}/**".format(d) for d in dirs})
+
+ ignore = list(entry.get("ignore", []))
+ ignore.extend(["**/.flake8", "**/.mkdir.done", "**/*.pyc"])
+
+ if archive not in ("common", "updater-dep") and base.startswith("_tests"):
+ # We may have generated_harness_files to exclude from this entry.
+ for path in generated_harness_files:
+ if path.startswith(base):
+ ignore.append(path[len(base) + 1 :])
+
+ common_kwargs = {"find_dotfiles": True, "ignore": ignore}
+
+ finder = FileFinder(os.path.join(source, base), **common_kwargs)
+
+ for pattern in patterns:
+ for p, f in finder.find(pattern):
+ if dest:
+ p = mozpath.join(dest, p)
+ yield p, f
+
+
+def find_manifest_dirs(topsrcdir, manifests):
+ """Routine to retrieve directories specified in a manifest, relative to topsrcdir.
+
+ It does not recurse into manifests, as we currently have no need for that.
+ """
+ dirs = set()
+
+ for p in manifests:
+ p = os.path.join(topsrcdir, p)
+
+ if p.endswith(".ini"):
+ test_manifest = TestManifest()
+ test_manifest.read(p)
+ dirs |= set([os.path.dirname(m) for m in test_manifest.manifests()])
+
+ elif p.endswith(".list"):
+ m = ReftestManifest()
+ m.load(p)
+ dirs |= m.dirs
+
+ else:
+ raise Exception(
+ '"{}" is not a supported manifest format.'.format(
+ os.path.splitext(p)[1]
+ )
+ )
+
+ dirs = {mozpath.normpath(d[len(topsrcdir) :]).lstrip("/") for d in dirs}
+
+ # Filter out children captured by parent directories because duplicates
+ # will confuse things later on.
+ def parents(p):
+ while True:
+ p = mozpath.dirname(p)
+ if not p:
+ break
+ yield p
+
+ seen = set()
+ for d in sorted(dirs, key=len):
+ if not any(p in seen for p in parents(d)):
+ seen.add(d)
+
+ return sorted(seen)
+
+
+def main(argv):
+ parser = argparse.ArgumentParser(description="Produce test archives")
+ parser.add_argument("archive", help="Which archive to generate")
+ parser.add_argument("outputfile", help="File to write output to")
+
+ args = parser.parse_args(argv)
+
+ out_file = args.outputfile
+ if not out_file.endswith((".tar.gz", ".zip")):
+ raise Exception("expected tar.gz or zip output file")
+
+ file_count = 0
+ t_start = time.monotonic()
+ ensureParentDir(out_file)
+ res = find_files(args.archive)
+ with open(out_file, "wb") as fh:
+ # Experimentation revealed that level 5 is significantly faster and has
+ # marginally larger sizes than higher values and is the sweet spot
+ # for optimal compression. Read the detailed commit message that
+ # introduced this for raw numbers.
+ if out_file.endswith(".tar.gz"):
+ files = dict(res)
+ create_tar_gz_from_files(fh, files, compresslevel=5)
+ file_count = len(files)
+ elif out_file.endswith(".zip"):
+ with JarWriter(fileobj=fh, compress_level=5) as writer:
+ for p, f in res:
+ writer.add(
+ p.encode("utf-8"), f.read(), mode=f.mode, skip_duplicates=True
+ )
+ file_count += 1
+ else:
+ raise Exception("unhandled file extension: %s" % out_file)
+
+ duration = time.monotonic() - t_start
+ zip_size = os.path.getsize(args.outputfile)
+ basename = os.path.basename(args.outputfile)
+ print(
+ "Wrote %d files in %d bytes to %s in %.2fs"
+ % (file_count, zip_size, basename, duration)
+ )
+
+
+if __name__ == "__main__":
+ sys.exit(main(sys.argv[1:]))
diff --git a/python/mozbuild/mozbuild/action/tooltool.py b/python/mozbuild/mozbuild/action/tooltool.py
new file mode 100755
index 0000000000..002360cd65
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/tooltool.py
@@ -0,0 +1,1714 @@
+#!/usr/bin/env python3
+
+# tooltool is a lookaside cache implemented in Python
+# Copyright (C) 2011 John H. Ford <john@johnford.info>
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation version 2
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
+# 02110-1301, USA.
+
+# A manifest file specifies files in that directory that are stored
+# elsewhere. This file should only list files in the same directory
+# in which the manifest file resides and it should be called
+# 'manifest.tt'
+
+import base64
+import calendar
+import hashlib
+import hmac
+import json
+import logging
+import math
+import optparse
+import os
+import pprint
+import re
+import shutil
+import ssl
+import stat
+import sys
+import tarfile
+import tempfile
+import threading
+import time
+import zipfile
+from contextlib import closing, contextmanager
+from functools import wraps
+from io import BytesIO, open
+from random import random
+from subprocess import PIPE, Popen
+
+if os.name == "nt":
+ import certifi
+
+__version__ = "1.4.0"
+
+# Allowed request header characters:
+# !#$%&'()*+,-./:;<=>?@[]^_`{|}~ and space, a-z, A-Z, 0-9, \, "
+REQUEST_HEADER_ATTRIBUTE_CHARS = re.compile(
+ r"^[ a-zA-Z0-9_\!#\$%&'\(\)\*\+,\-\./\:;<\=>\?@\[\]\^`\{\|\}~]*$"
+)
+DEFAULT_MANIFEST_NAME = "manifest.tt"
+TOOLTOOL_PACKAGE_SUFFIX = ".TOOLTOOL-PACKAGE"
+HAWK_VER = 1
+PY3 = sys.version_info[0] == 3
+
+if PY3:
+ six_binary_type = bytes
+ unicode = (
+ str # Silence `pyflakes` from reporting `undefined name 'unicode'` in Python 3.
+ )
+ import urllib.request as urllib2
+ from http.client import HTTPConnection, HTTPSConnection
+ from urllib.error import HTTPError, URLError
+ from urllib.parse import urljoin, urlparse
+ from urllib.request import Request
+else:
+ six_binary_type = str
+ import urllib2
+ from httplib import HTTPConnection, HTTPSConnection
+ from urllib2 import HTTPError, Request, URLError
+ from urlparse import urljoin, urlparse
+
+
+log = logging.getLogger(__name__)
+
+
+# Vendored code from `redo` module
+def retrier(attempts=5, sleeptime=10, max_sleeptime=300, sleepscale=1.5, jitter=1):
+ """
+ This function originates from redo 2.0.3 https://github.com/mozilla-releng/redo
+ A generator function that sleeps between retries, handles exponential
+ backoff and jitter. The action you are retrying is meant to run after
+ retrier yields.
+ """
+ jitter = jitter or 0 # py35 barfs on the next line if jitter is None
+ if jitter > sleeptime:
+ # To prevent negative sleep times
+ raise Exception(
+ "jitter ({}) must be less than sleep time ({})".format(jitter, sleeptime)
+ )
+
+ sleeptime_real = sleeptime
+ for _ in range(attempts):
+ log.debug("attempt %i/%i", _ + 1, attempts)
+
+ yield sleeptime_real
+
+ if jitter:
+ sleeptime_real = sleeptime + random.uniform(-jitter, jitter)
+ # our jitter should scale along with the sleeptime
+ jitter = jitter * sleepscale
+ else:
+ sleeptime_real = sleeptime
+
+ sleeptime *= sleepscale
+
+ if sleeptime_real > max_sleeptime:
+ sleeptime_real = max_sleeptime
+
+ # Don't need to sleep the last time
+ if _ < attempts - 1:
+ log.debug(
+ "sleeping for %.2fs (attempt %i/%i)", sleeptime_real, _ + 1, attempts
+ )
+ time.sleep(sleeptime_real)
+
+
+def retry(
+ action,
+ attempts=5,
+ sleeptime=60,
+ max_sleeptime=5 * 60,
+ sleepscale=1.5,
+ jitter=1,
+ retry_exceptions=(Exception,),
+ cleanup=None,
+ args=(),
+ kwargs={},
+ log_args=True,
+):
+ """
+ This function originates from redo 2.0.3 https://github.com/mozilla-releng/redo
+ Calls an action function until it succeeds, or we give up.
+ """
+ assert callable(action)
+ assert not cleanup or callable(cleanup)
+
+ action_name = getattr(action, "__name__", action)
+ if log_args and (args or kwargs):
+ log_attempt_args = (
+ "retry: calling %s with args: %s," " kwargs: %s, attempt #%d",
+ action_name,
+ args,
+ kwargs,
+ )
+ else:
+ log_attempt_args = ("retry: calling %s, attempt #%d", action_name)
+
+ if max_sleeptime < sleeptime:
+ log.debug("max_sleeptime %d less than sleeptime %d", max_sleeptime, sleeptime)
+
+ n = 1
+ for _ in retrier(
+ attempts=attempts,
+ sleeptime=sleeptime,
+ max_sleeptime=max_sleeptime,
+ sleepscale=sleepscale,
+ jitter=jitter,
+ ):
+ try:
+ logfn = log.info if n != 1 else log.debug
+ logfn_args = log_attempt_args + (n,)
+ logfn(*logfn_args)
+ return action(*args, **kwargs)
+ except retry_exceptions:
+ log.debug("retry: Caught exception: ", exc_info=True)
+ if cleanup:
+ cleanup()
+ if n == attempts:
+ log.info("retry: Giving up on %s", action_name)
+ raise
+ continue
+ finally:
+ n += 1
+
+
+def retriable(*retry_args, **retry_kwargs):
+ """
+ This function originates from redo 2.0.3 https://github.com/mozilla-releng/redo
+ A decorator factory for retry(). Wrap your function in @retriable(...) to
+ give it retry powers!
+ """
+
+ def _retriable_factory(func):
+ @wraps(func)
+ def _retriable_wrapper(*args, **kwargs):
+ return retry(func, args=args, kwargs=kwargs, *retry_args, **retry_kwargs)
+
+ return _retriable_wrapper
+
+ return _retriable_factory
+
+
+# end of vendored code from redo module
+
+
+def request_has_data(req):
+ if PY3:
+ return req.data is not None
+ return req.has_data()
+
+
+def get_hexdigest(val):
+ return hashlib.sha512(val).hexdigest()
+
+
+class FileRecordJSONEncoderException(Exception):
+ pass
+
+
+class InvalidManifest(Exception):
+ pass
+
+
+class ExceptionWithFilename(Exception):
+ def __init__(self, filename):
+ Exception.__init__(self)
+ self.filename = filename
+
+
+class BadFilenameException(ExceptionWithFilename):
+ pass
+
+
+class DigestMismatchException(ExceptionWithFilename):
+ pass
+
+
+class MissingFileException(ExceptionWithFilename):
+ pass
+
+
+class InvalidCredentials(Exception):
+ pass
+
+
+class BadHeaderValue(Exception):
+ pass
+
+
+def parse_url(url):
+ url_parts = urlparse(url)
+ url_dict = {
+ "scheme": url_parts.scheme,
+ "hostname": url_parts.hostname,
+ "port": url_parts.port,
+ "path": url_parts.path,
+ "resource": url_parts.path,
+ "query": url_parts.query,
+ }
+ if len(url_dict["query"]) > 0:
+ url_dict["resource"] = "%s?%s" % (
+ url_dict["resource"], # pragma: no cover
+ url_dict["query"],
+ )
+
+ if url_parts.port is None:
+ if url_parts.scheme == "http":
+ url_dict["port"] = 80
+ elif url_parts.scheme == "https": # pragma: no cover
+ url_dict["port"] = 443
+ return url_dict
+
+
+def utc_now(offset_in_seconds=0.0):
+ return int(math.floor(calendar.timegm(time.gmtime()) + float(offset_in_seconds)))
+
+
+def random_string(length):
+ return base64.urlsafe_b64encode(os.urandom(length))[:length]
+
+
+def prepare_header_val(val):
+ if isinstance(val, six_binary_type):
+ val = val.decode("utf-8")
+
+ if not REQUEST_HEADER_ATTRIBUTE_CHARS.match(val):
+ raise BadHeaderValue( # pragma: no cover
+ "header value value={val} contained an illegal character".format(
+ val=repr(val)
+ )
+ )
+
+ return val
+
+
+def parse_content_type(content_type): # pragma: no cover
+ if content_type:
+ return content_type.split(";")[0].strip().lower()
+ else:
+ return ""
+
+
+def calculate_payload_hash(algorithm, payload, content_type): # pragma: no cover
+ parts = [
+ part if isinstance(part, six_binary_type) else part.encode("utf8")
+ for part in [
+ "hawk." + str(HAWK_VER) + ".payload\n",
+ parse_content_type(content_type) + "\n",
+ payload or "",
+ "\n",
+ ]
+ ]
+
+ p_hash = hashlib.new(algorithm)
+ for p in parts:
+ p_hash.update(p)
+
+ log.debug(
+ "calculating payload hash from:\n{parts}".format(parts=pprint.pformat(parts))
+ )
+
+ return base64.b64encode(p_hash.digest())
+
+
+def validate_taskcluster_credentials(credentials):
+ if not hasattr(credentials, "__getitem__"):
+ raise InvalidCredentials(
+ "credentials must be a dict-like object"
+ ) # pragma: no cover
+ try:
+ credentials["clientId"]
+ credentials["accessToken"]
+ except KeyError: # pragma: no cover
+ etype, val, tb = sys.exc_info()
+ raise InvalidCredentials("{etype}: {val}".format(etype=etype, val=val))
+
+
+def normalize_header_attr(val):
+ if isinstance(val, six_binary_type):
+ return val.decode("utf-8")
+ return val # pragma: no cover
+
+
+def normalize_string(
+ mac_type,
+ timestamp,
+ nonce,
+ method,
+ name,
+ host,
+ port,
+ content_hash,
+):
+ return "\n".join(
+ [
+ normalize_header_attr(header)
+ # The blank lines are important. They follow what the Node Hawk lib does.
+ for header in [
+ "hawk." + str(HAWK_VER) + "." + mac_type,
+ timestamp,
+ nonce,
+ method or "",
+ name or "",
+ host,
+ port,
+ content_hash or "",
+ "", # for ext which is empty in this case
+ "", # Add trailing new line.
+ ]
+ ]
+ )
+
+
+def calculate_mac(
+ mac_type,
+ access_token,
+ algorithm,
+ timestamp,
+ nonce,
+ method,
+ name,
+ host,
+ port,
+ content_hash,
+):
+ normalized = normalize_string(
+ mac_type, timestamp, nonce, method, name, host, port, content_hash
+ )
+ log.debug("normalized resource for mac calc: {norm}".format(norm=normalized))
+ digestmod = getattr(hashlib, algorithm)
+
+ if not isinstance(normalized, six_binary_type):
+ normalized = normalized.encode("utf8")
+
+ if not isinstance(access_token, six_binary_type):
+ access_token = access_token.encode("ascii")
+
+ result = hmac.new(access_token, normalized, digestmod)
+ return base64.b64encode(result.digest())
+
+
+def make_taskcluster_header(credentials, req):
+ validate_taskcluster_credentials(credentials)
+
+ url = req.get_full_url()
+ method = req.get_method()
+ algorithm = "sha256"
+ timestamp = str(utc_now())
+ nonce = random_string(6)
+ url_parts = parse_url(url)
+
+ content_hash = None
+ if request_has_data(req):
+ if PY3:
+ data = req.data
+ else:
+ data = req.get_data()
+ content_hash = calculate_payload_hash( # pragma: no cover
+ algorithm,
+ data,
+ # maybe we should detect this from req.headers but we anyway expect json
+ content_type="application/json",
+ )
+
+ mac = calculate_mac(
+ "header",
+ credentials["accessToken"],
+ algorithm,
+ timestamp,
+ nonce,
+ method,
+ url_parts["resource"],
+ url_parts["hostname"],
+ str(url_parts["port"]),
+ content_hash,
+ )
+
+ header = 'Hawk mac="{}"'.format(prepare_header_val(mac))
+
+ if content_hash: # pragma: no cover
+ header = '{}, hash="{}"'.format(header, prepare_header_val(content_hash))
+
+ header = '{header}, id="{id}", ts="{ts}", nonce="{nonce}"'.format(
+ header=header,
+ id=prepare_header_val(credentials["clientId"]),
+ ts=prepare_header_val(timestamp),
+ nonce=prepare_header_val(nonce),
+ )
+
+ log.debug("Hawk header for URL={} method={}: {}".format(url, method, header))
+
+ return header
+
+
+class FileRecord(object):
+ def __init__(
+ self,
+ filename,
+ size,
+ digest,
+ algorithm,
+ unpack=False,
+ version=None,
+ visibility=None,
+ ):
+ object.__init__(self)
+ if "/" in filename or "\\" in filename:
+ log.error(
+ "The filename provided contains path information and is, therefore, invalid."
+ )
+ raise BadFilenameException(filename=filename)
+ self.filename = filename
+ self.size = size
+ self.digest = digest
+ self.algorithm = algorithm
+ self.unpack = unpack
+ self.version = version
+ self.visibility = visibility
+
+ def __eq__(self, other):
+ if self is other:
+ return True
+ if (
+ self.filename == other.filename
+ and self.size == other.size
+ and self.digest == other.digest
+ and self.algorithm == other.algorithm
+ and self.version == other.version
+ and self.visibility == other.visibility
+ ):
+ return True
+ else:
+ return False
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def __str__(self):
+ return repr(self)
+
+ def __repr__(self):
+ return (
+ "%s.%s(filename='%s', size=%s, digest='%s', algorithm='%s', visibility=%r)"
+ % (
+ __name__,
+ self.__class__.__name__,
+ self.filename,
+ self.size,
+ self.digest,
+ self.algorithm,
+ self.visibility,
+ )
+ )
+
+ def present(self):
+ # Doesn't check validity
+ return os.path.exists(self.filename)
+
+ def validate_size(self):
+ if self.present():
+ return self.size == os.path.getsize(self.filename)
+ else:
+ log.debug("trying to validate size on a missing file, %s", self.filename)
+ raise MissingFileException(filename=self.filename)
+
+ def validate_digest(self):
+ if self.present():
+ with open(self.filename, "rb") as f:
+ return self.digest == digest_file(f, self.algorithm)
+ else:
+ log.debug("trying to validate digest on a missing file, %s', self.filename")
+ raise MissingFileException(filename=self.filename)
+
+ def validate(self):
+ if self.size is None or self.validate_size():
+ if self.validate_digest():
+ return True
+ return False
+
+ def describe(self):
+ if self.present() and self.validate():
+ return "'%s' is present and valid" % self.filename
+ elif self.present():
+ return "'%s' is present and invalid" % self.filename
+ else:
+ return "'%s' is absent" % self.filename
+
+
+def create_file_record(filename, algorithm):
+ fo = open(filename, "rb")
+ stored_filename = os.path.split(filename)[1]
+ fr = FileRecord(
+ stored_filename,
+ os.path.getsize(filename),
+ digest_file(fo, algorithm),
+ algorithm,
+ )
+ fo.close()
+ return fr
+
+
+class FileRecordJSONEncoder(json.JSONEncoder):
+ def encode_file_record(self, obj):
+ if not issubclass(type(obj), FileRecord):
+ err = (
+ "FileRecordJSONEncoder is only for FileRecord and lists of FileRecords, "
+ "not %s" % obj.__class__.__name__
+ )
+ log.warn(err)
+ raise FileRecordJSONEncoderException(err)
+ else:
+ rv = {
+ "filename": obj.filename,
+ "size": obj.size,
+ "algorithm": obj.algorithm,
+ "digest": obj.digest,
+ }
+ if obj.unpack:
+ rv["unpack"] = True
+ if obj.version:
+ rv["version"] = obj.version
+ if obj.visibility is not None:
+ rv["visibility"] = obj.visibility
+ return rv
+
+ def default(self, f):
+ if issubclass(type(f), list):
+ record_list = []
+ for i in f:
+ record_list.append(self.encode_file_record(i))
+ return record_list
+ else:
+ return self.encode_file_record(f)
+
+
+class FileRecordJSONDecoder(json.JSONDecoder):
+
+ """I help the json module materialize a FileRecord from
+ a JSON file. I understand FileRecords and lists of
+ FileRecords. I ignore things that I don't expect for now"""
+
+ # TODO: make this more explicit in what it's looking for
+ # and error out on unexpected things
+
+ def process_file_records(self, obj):
+ if isinstance(obj, list):
+ record_list = []
+ for i in obj:
+ record = self.process_file_records(i)
+ if issubclass(type(record), FileRecord):
+ record_list.append(record)
+ return record_list
+ required_fields = [
+ "filename",
+ "size",
+ "algorithm",
+ "digest",
+ ]
+ if isinstance(obj, dict):
+ missing = False
+ for req in required_fields:
+ if req not in obj:
+ missing = True
+ break
+
+ if not missing:
+ unpack = obj.get("unpack", False)
+ version = obj.get("version", None)
+ visibility = obj.get("visibility", None)
+ rv = FileRecord(
+ obj["filename"],
+ obj["size"],
+ obj["digest"],
+ obj["algorithm"],
+ unpack,
+ version,
+ visibility,
+ )
+ log.debug("materialized %s" % rv)
+ return rv
+ return obj
+
+ def decode(self, s):
+ decoded = json.JSONDecoder.decode(self, s)
+ rv = self.process_file_records(decoded)
+ return rv
+
+
+class Manifest(object):
+
+ valid_formats = ("json",)
+
+ def __init__(self, file_records=None):
+ self.file_records = file_records or []
+
+ def __eq__(self, other):
+ if self is other:
+ return True
+ if len(self.file_records) != len(other.file_records):
+ log.debug("Manifests differ in number of files")
+ return False
+ # sort the file records by filename before comparing
+ mine = sorted((fr.filename, fr) for fr in self.file_records)
+ theirs = sorted((fr.filename, fr) for fr in other.file_records)
+ return mine == theirs
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def __deepcopy__(self, memo):
+ # This is required for a deep copy
+ return Manifest(self.file_records[:])
+
+ def __copy__(self):
+ return Manifest(self.file_records)
+
+ def copy(self):
+ return Manifest(self.file_records[:])
+
+ def present(self):
+ return all(i.present() for i in self.file_records)
+
+ def validate_sizes(self):
+ return all(i.validate_size() for i in self.file_records)
+
+ def validate_digests(self):
+ return all(i.validate_digest() for i in self.file_records)
+
+ def validate(self):
+ return all(i.validate() for i in self.file_records)
+
+ def load(self, data_file, fmt="json"):
+ assert fmt in self.valid_formats
+ if fmt == "json":
+ try:
+ self.file_records.extend(
+ json.load(data_file, cls=FileRecordJSONDecoder)
+ )
+ except ValueError:
+ raise InvalidManifest("trying to read invalid manifest file")
+
+ def loads(self, data_string, fmt="json"):
+ assert fmt in self.valid_formats
+ if fmt == "json":
+ try:
+ self.file_records.extend(
+ json.loads(data_string, cls=FileRecordJSONDecoder)
+ )
+ except ValueError:
+ raise InvalidManifest("trying to read invalid manifest file")
+
+ def dump(self, output_file, fmt="json"):
+ assert fmt in self.valid_formats
+ if fmt == "json":
+ return json.dump(
+ self.file_records,
+ output_file,
+ indent=2,
+ separators=(",", ": "),
+ cls=FileRecordJSONEncoder,
+ )
+
+ def dumps(self, fmt="json"):
+ assert fmt in self.valid_formats
+ if fmt == "json":
+ return json.dumps(
+ self.file_records,
+ indent=2,
+ separators=(",", ": "),
+ cls=FileRecordJSONEncoder,
+ )
+
+
+def digest_file(f, a):
+ """I take a file like object 'f' and return a hex-string containing
+ of the result of the algorithm 'a' applied to 'f'."""
+ h = hashlib.new(a)
+ chunk_size = 1024 * 10
+ data = f.read(chunk_size)
+ while data:
+ h.update(data)
+ data = f.read(chunk_size)
+ name = repr(f.name) if hasattr(f, "name") else "a file"
+ log.debug("hashed %s with %s to be %s", name, a, h.hexdigest())
+ return h.hexdigest()
+
+
+def execute(cmd):
+ """Execute CMD, logging its stdout at the info level"""
+ process = Popen(cmd, shell=True, stdout=PIPE)
+ while True:
+ line = process.stdout.readline()
+ if not line:
+ break
+ log.info(line.replace("\n", " "))
+ return process.wait() == 0
+
+
+def open_manifest(manifest_file):
+ """I know how to take a filename and load it into a Manifest object"""
+ if os.path.exists(manifest_file):
+ manifest = Manifest()
+ with open(manifest_file, "r" if PY3 else "rb") as f:
+ manifest.load(f)
+ log.debug("loaded manifest from file '%s'" % manifest_file)
+ return manifest
+ else:
+ log.debug("tried to load absent file '%s' as manifest" % manifest_file)
+ raise InvalidManifest("manifest file '%s' does not exist" % manifest_file)
+
+
+def list_manifest(manifest_file):
+ """I know how print all the files in a location"""
+ try:
+ manifest = open_manifest(manifest_file)
+ except InvalidManifest as e:
+ log.error(
+ "failed to load manifest file at '%s': %s"
+ % (
+ manifest_file,
+ str(e),
+ )
+ )
+ return False
+ for f in manifest.file_records:
+ print(
+ "{}\t{}\t{}".format(
+ "P" if f.present() else "-",
+ "V" if f.present() and f.validate() else "-",
+ f.filename,
+ )
+ )
+ return True
+
+
+def validate_manifest(manifest_file):
+ """I validate that all files in a manifest are present and valid but
+ don't fetch or delete them if they aren't"""
+ try:
+ manifest = open_manifest(manifest_file)
+ except InvalidManifest as e:
+ log.error(
+ "failed to load manifest file at '%s': %s"
+ % (
+ manifest_file,
+ str(e),
+ )
+ )
+ return False
+ invalid_files = []
+ absent_files = []
+ for f in manifest.file_records:
+ if not f.present():
+ absent_files.append(f)
+ elif not f.validate():
+ invalid_files.append(f)
+ if len(invalid_files + absent_files) == 0:
+ return True
+ else:
+ return False
+
+
+def add_files(manifest_file, algorithm, filenames, version, visibility, unpack):
+ # returns True if all files successfully added, False if not
+ # and doesn't catch library Exceptions. If any files are already
+ # tracked in the manifest, return will be False because they weren't
+ # added
+ all_files_added = True
+ # Create a old_manifest object to add to
+ if os.path.exists(manifest_file):
+ old_manifest = open_manifest(manifest_file)
+ else:
+ old_manifest = Manifest()
+ log.debug("creating a new manifest file")
+ new_manifest = Manifest() # use a different manifest for the output
+ for filename in filenames:
+ log.debug("adding %s" % filename)
+ path, name = os.path.split(filename)
+ new_fr = create_file_record(filename, algorithm)
+ new_fr.version = version
+ new_fr.visibility = visibility
+ new_fr.unpack = unpack
+ log.debug("appending a new file record to manifest file")
+ add = True
+ for fr in old_manifest.file_records:
+ log.debug(
+ "manifest file has '%s'"
+ % "', ".join([x.filename for x in old_manifest.file_records])
+ )
+ if new_fr == fr:
+ log.info("file already in old_manifest")
+ add = False
+ elif filename == fr.filename:
+ log.error(
+ "manifest already contains a different file named %s" % filename
+ )
+ add = False
+ if add:
+ new_manifest.file_records.append(new_fr)
+ log.debug("added '%s' to manifest" % filename)
+ else:
+ all_files_added = False
+ # copy any files in the old manifest that aren't in the new one
+ new_filenames = set(fr.filename for fr in new_manifest.file_records)
+ for old_fr in old_manifest.file_records:
+ if old_fr.filename not in new_filenames:
+ new_manifest.file_records.append(old_fr)
+ if PY3:
+ with open(manifest_file, mode="w") as output:
+ new_manifest.dump(output, fmt="json")
+ else:
+ with open(manifest_file, mode="wb") as output:
+ new_manifest.dump(output, fmt="json")
+ return all_files_added
+
+
+def touch(f):
+ """Used to modify mtime in cached files;
+ mtime is used by the purge command"""
+ try:
+ os.utime(f, None)
+ except OSError:
+ log.warn("impossible to update utime of file %s" % f)
+
+
+def _urlopen(req):
+ ssl_context = None
+ if os.name == "nt":
+ ssl_context = ssl.create_default_context(cafile=certifi.where())
+ return urllib2.urlopen(req, context=ssl_context)
+
+
+@contextmanager
+@retriable(sleeptime=2)
+def request(url, auth_file=None):
+ req = Request(url)
+ _authorize(req, auth_file)
+ with closing(_urlopen(req)) as f:
+ log.debug("opened %s for reading" % url)
+ yield f
+
+
+def fetch_file(base_urls, file_record, grabchunk=1024 * 4, auth_file=None, region=None):
+ # A file which is requested to be fetched that exists locally will be
+ # overwritten by this function
+ fd, temp_path = tempfile.mkstemp(dir=os.getcwd())
+ os.close(fd)
+ fetched_path = None
+ for base_url in base_urls:
+ # Generate the URL for the file on the server side
+ url = urljoin(base_url, "%s/%s" % (file_record.algorithm, file_record.digest))
+ if region is not None:
+ url += "?region=" + region
+
+ log.info("Attempting to fetch from '%s'..." % base_url)
+
+ # Well, the file doesn't exist locally. Let's fetch it.
+ try:
+ with request(url, auth_file) as f, open(temp_path, mode="wb") as out:
+ k = True
+ size = 0
+ while k:
+ # TODO: print statistics as file transfers happen both for info and to stop
+ # buildbot timeouts
+ indata = f.read(grabchunk)
+ out.write(indata)
+ size += len(indata)
+ if len(indata) == 0:
+ k = False
+ log.info(
+ "File %s fetched from %s as %s"
+ % (file_record.filename, base_url, temp_path)
+ )
+ fetched_path = temp_path
+ break
+ except (URLError, HTTPError, ValueError):
+ log.info(
+ "...failed to fetch '%s' from %s" % (file_record.filename, base_url),
+ exc_info=True,
+ )
+ except IOError: # pragma: no cover
+ log.info(
+ "failed to write to temporary file for '%s'" % file_record.filename,
+ exc_info=True,
+ )
+
+ # cleanup temp file in case of issues
+ if fetched_path:
+ return os.path.split(fetched_path)[1]
+ else:
+ try:
+ os.remove(temp_path)
+ except OSError: # pragma: no cover
+ pass
+ return None
+
+
+def clean_path(dirname):
+ """Remove a subtree if is exists. Helper for unpack_file()."""
+ if os.path.exists(dirname):
+ log.info("rm tree: %s" % dirname)
+ shutil.rmtree(dirname)
+
+
+CHECKSUM_SUFFIX = ".checksum"
+
+
+def validate_tar_member(member, path):
+ def _is_within_directory(directory, target):
+ real_directory = os.path.realpath(directory)
+ real_target = os.path.realpath(target)
+ prefix = os.path.commonprefix([real_directory, real_target])
+ return prefix == real_directory
+
+ member_path = os.path.join(path, member.name)
+ if not _is_within_directory(path, member_path):
+ raise Exception("Attempted path traversal in tar file: " + member.name)
+ if member.issym():
+ link_path = os.path.join(os.path.dirname(member_path), member.linkname)
+ if not _is_within_directory(path, link_path):
+ raise Exception("Attempted link path traversal in tar file: " + member.name)
+ if member.mode & (stat.S_ISUID | stat.S_ISGID):
+ raise Exception("Attempted setuid or setgid in tar file: " + member.name)
+
+
+def safe_extract(tar, path=".", *, numeric_owner=False):
+ def _files(tar, path):
+ for member in tar:
+ validate_tar_member(member, path)
+ yield member
+
+ tar.extractall(path, members=_files(tar, path), numeric_owner=numeric_owner)
+
+
+def unpack_file(filename):
+ """Untar `filename`, assuming it is uncompressed or compressed with bzip2,
+ xz, gzip, zst, or unzip a zip file. The file is assumed to contain a single
+ directory with a name matching the base of the given filename.
+ Xz support is handled by shelling out to 'tar'."""
+ if os.path.isfile(filename) and tarfile.is_tarfile(filename):
+ tar_file, zip_ext = os.path.splitext(filename)
+ base_file, tar_ext = os.path.splitext(tar_file)
+ clean_path(base_file)
+ log.info('untarring "%s"' % filename)
+ with tarfile.open(filename) as tar:
+ safe_extract(tar)
+ elif os.path.isfile(filename) and filename.endswith(".tar.xz"):
+ base_file = filename.replace(".tar.xz", "")
+ clean_path(base_file)
+ log.info('untarring "%s"' % filename)
+ # Not using tar -Jxf because it fails on Windows for some reason.
+ process = Popen(["xz", "-d", "-c", filename], stdout=PIPE)
+ stdout, stderr = process.communicate()
+ if process.returncode != 0:
+ return False
+ fileobj = BytesIO()
+ fileobj.write(stdout)
+ fileobj.seek(0)
+ with tarfile.open(fileobj=fileobj, mode="r|") as tar:
+ safe_extract(tar)
+ elif os.path.isfile(filename) and filename.endswith(".tar.zst"):
+ import zstandard
+
+ base_file = filename.replace(".tar.zst", "")
+ clean_path(base_file)
+ log.info('untarring "%s"' % filename)
+ dctx = zstandard.ZstdDecompressor()
+ with dctx.stream_reader(open(filename, "rb")) as fileobj:
+ with tarfile.open(fileobj=fileobj, mode="r|") as tar:
+ safe_extract(tar)
+ elif os.path.isfile(filename) and zipfile.is_zipfile(filename):
+ base_file = filename.replace(".zip", "")
+ clean_path(base_file)
+ log.info('unzipping "%s"' % filename)
+ z = zipfile.ZipFile(filename)
+ z.extractall()
+ z.close()
+ else:
+ log.error("Unknown archive extension for filename '%s'" % filename)
+ return False
+ return True
+
+
+def fetch_files(
+ manifest_file,
+ base_urls,
+ filenames=[],
+ cache_folder=None,
+ auth_file=None,
+ region=None,
+):
+ # Lets load the manifest file
+ try:
+ manifest = open_manifest(manifest_file)
+ except InvalidManifest as e:
+ log.error(
+ "failed to load manifest file at '%s': %s"
+ % (
+ manifest_file,
+ str(e),
+ )
+ )
+ return False
+
+ # we want to track files already in current working directory AND valid
+ # we will not need to fetch these
+ present_files = []
+
+ # We want to track files that fail to be fetched as well as
+ # files that are fetched
+ failed_files = []
+ fetched_files = []
+
+ # Files that we want to unpack.
+ unpack_files = []
+
+ # Lets go through the manifest and fetch the files that we want
+ for f in manifest.file_records:
+ # case 1: files are already present
+ if f.present():
+ if f.validate():
+ present_files.append(f.filename)
+ if f.unpack:
+ unpack_files.append(f.filename)
+ else:
+ # we have an invalid file here, better to cleanup!
+ # this invalid file needs to be replaced with a good one
+ # from the local cash or fetched from a tooltool server
+ log.info(
+ "File %s is present locally but it is invalid, so I will remove it "
+ "and try to fetch it" % f.filename
+ )
+ os.remove(os.path.join(os.getcwd(), f.filename))
+
+ # check if file is already in cache
+ if cache_folder and f.filename not in present_files:
+ try:
+ shutil.copy(
+ os.path.join(cache_folder, f.digest),
+ os.path.join(os.getcwd(), f.filename),
+ )
+ log.info(
+ "File %s retrieved from local cache %s" % (f.filename, cache_folder)
+ )
+ touch(os.path.join(cache_folder, f.digest))
+
+ filerecord_for_validation = FileRecord(
+ f.filename, f.size, f.digest, f.algorithm
+ )
+ if filerecord_for_validation.validate():
+ present_files.append(f.filename)
+ if f.unpack:
+ unpack_files.append(f.filename)
+ else:
+ # the file copied from the cache is invalid, better to
+ # clean up the cache version itself as well
+ log.warn(
+ "File %s retrieved from cache is invalid! I am deleting it from the "
+ "cache as well" % f.filename
+ )
+ os.remove(os.path.join(os.getcwd(), f.filename))
+ os.remove(os.path.join(cache_folder, f.digest))
+ except IOError:
+ log.info(
+ "File %s not present in local cache folder %s"
+ % (f.filename, cache_folder)
+ )
+
+ # now I will try to fetch all files which are not already present and
+ # valid, appending a suffix to avoid race conditions
+ temp_file_name = None
+ # 'filenames' is the list of filenames to be managed, if this variable
+ # is a non empty list it can be used to filter if filename is in
+ # present_files, it means that I have it already because it was already
+ # either in the working dir or in the cache
+ if (
+ f.filename in filenames or len(filenames) == 0
+ ) and f.filename not in present_files:
+ log.debug("fetching %s" % f.filename)
+ temp_file_name = fetch_file(
+ base_urls, f, auth_file=auth_file, region=region
+ )
+ if temp_file_name:
+ fetched_files.append((f, temp_file_name))
+ else:
+ failed_files.append(f.filename)
+ else:
+ log.debug("skipping %s" % f.filename)
+
+ # lets ensure that fetched files match what the manifest specified
+ for localfile, temp_file_name in fetched_files:
+ # since I downloaded to a temp file, I need to perform all validations on the temp file
+ # this is why filerecord_for_validation is created
+
+ filerecord_for_validation = FileRecord(
+ temp_file_name, localfile.size, localfile.digest, localfile.algorithm
+ )
+
+ if filerecord_for_validation.validate():
+ # great!
+ # I can rename the temp file
+ log.info(
+ "File integrity verified, renaming %s to %s"
+ % (temp_file_name, localfile.filename)
+ )
+ os.rename(
+ os.path.join(os.getcwd(), temp_file_name),
+ os.path.join(os.getcwd(), localfile.filename),
+ )
+
+ if localfile.unpack:
+ unpack_files.append(localfile.filename)
+
+ # if I am using a cache and a new file has just been retrieved from a
+ # remote location, I need to update the cache as well
+ if cache_folder:
+ log.info("Updating local cache %s..." % cache_folder)
+ try:
+ if not os.path.exists(cache_folder):
+ log.info("Creating cache in %s..." % cache_folder)
+ os.makedirs(cache_folder, 0o0700)
+ shutil.copy(
+ os.path.join(os.getcwd(), localfile.filename),
+ os.path.join(cache_folder, localfile.digest),
+ )
+ log.info(
+ "Local cache %s updated with %s"
+ % (cache_folder, localfile.filename)
+ )
+ touch(os.path.join(cache_folder, localfile.digest))
+ except (OSError, IOError):
+ log.warning(
+ "Impossible to add file %s to cache folder %s"
+ % (localfile.filename, cache_folder),
+ exc_info=True,
+ )
+ else:
+ failed_files.append(localfile.filename)
+ log.error("'%s'" % filerecord_for_validation.describe())
+ os.remove(temp_file_name)
+
+ # Unpack files that need to be unpacked.
+ for filename in unpack_files:
+ if not unpack_file(filename):
+ failed_files.append(filename)
+
+ # If we failed to fetch or validate a file, we need to fail
+ if len(failed_files) > 0:
+ log.error("The following files failed: '%s'" % "', ".join(failed_files))
+ return False
+ return True
+
+
+def freespace(p):
+ "Returns the number of bytes free under directory `p`"
+ if sys.platform == "win32": # pragma: no cover
+ # os.statvfs doesn't work on Windows
+ import win32file
+
+ secsPerClus, bytesPerSec, nFreeClus, totClus = win32file.GetDiskFreeSpace(p)
+ return secsPerClus * bytesPerSec * nFreeClus
+ else:
+ r = os.statvfs(p)
+ return r.f_frsize * r.f_bavail
+
+
+def purge(folder, gigs):
+ """If gigs is non 0, it deletes files in `folder` until `gigs` GB are free,
+ starting from older files. If gigs is 0, a full purge will be performed.
+ No recursive deletion of files in subfolder is performed."""
+
+ full_purge = bool(gigs == 0)
+ gigs *= 1024 * 1024 * 1024
+
+ if not full_purge and freespace(folder) >= gigs:
+ log.info("No need to cleanup")
+ return
+
+ files = []
+ for f in os.listdir(folder):
+ p = os.path.join(folder, f)
+ # it deletes files in folder without going into subfolders,
+ # assuming the cache has a flat structure
+ if not os.path.isfile(p):
+ continue
+ mtime = os.path.getmtime(p)
+ files.append((mtime, p))
+
+ # iterate files sorted by mtime
+ for _, f in sorted(files):
+ log.info("removing %s to free up space" % f)
+ try:
+ os.remove(f)
+ except OSError:
+ log.info("Impossible to remove %s" % f, exc_info=True)
+ if not full_purge and freespace(folder) >= gigs:
+ break
+
+
+def _log_api_error(e):
+ if hasattr(e, "hdrs") and e.hdrs["content-type"] == "application/json":
+ json_resp = json.load(e.fp)
+ log.error(
+ "%s: %s" % (json_resp["error"]["name"], json_resp["error"]["description"])
+ )
+ else:
+ log.exception("Error making RelengAPI request:")
+
+
+def _authorize(req, auth_file):
+ is_taskcluster_auth = False
+
+ if not auth_file:
+ try:
+ taskcluster_env_keys = {
+ "clientId": "TASKCLUSTER_CLIENT_ID",
+ "accessToken": "TASKCLUSTER_ACCESS_TOKEN",
+ }
+ auth_content = {k: os.environ[v] for k, v in taskcluster_env_keys.items()}
+ is_taskcluster_auth = True
+ except KeyError:
+ return
+ else:
+ with open(auth_file) as f:
+ auth_content = f.read().strip()
+ try:
+ auth_content = json.loads(auth_content)
+ is_taskcluster_auth = True
+ except Exception:
+ pass
+
+ if is_taskcluster_auth:
+ taskcluster_header = make_taskcluster_header(auth_content, req)
+ log.debug("Using taskcluster credentials in %s" % auth_file)
+ req.add_unredirected_header("Authorization", taskcluster_header)
+ else:
+ log.debug("Using Bearer token in %s" % auth_file)
+ req.add_unredirected_header("Authorization", "Bearer %s" % auth_content)
+
+
+def _send_batch(base_url, auth_file, batch, region):
+ url = urljoin(base_url, "upload")
+ if region is not None:
+ url += "?region=" + region
+ data = json.dumps(batch)
+ if PY3:
+ data = data.encode("utf-8")
+ req = Request(url, data, {"Content-Type": "application/json"})
+ _authorize(req, auth_file)
+ try:
+ resp = _urlopen(req)
+ except (URLError, HTTPError) as e:
+ _log_api_error(e)
+ return None
+ return json.load(resp)["result"]
+
+
+def _s3_upload(filename, file):
+ # urllib2 does not support streaming, so we fall back to good old httplib
+ url = urlparse(file["put_url"])
+ cls = HTTPSConnection if url.scheme == "https" else HTTPConnection
+ host, port = url.netloc.split(":") if ":" in url.netloc else (url.netloc, 443)
+ port = int(port)
+ conn = cls(host, port)
+ try:
+ req_path = "%s?%s" % (url.path, url.query) if url.query else url.path
+ with open(filename, "rb") as f:
+ content = f.read()
+ content_length = len(content)
+ f.seek(0)
+ conn.request(
+ "PUT",
+ req_path,
+ f,
+ {
+ "Content-Type": "application/octet-stream",
+ "Content-Length": str(content_length),
+ },
+ )
+ resp = conn.getresponse()
+ resp_body = resp.read()
+ conn.close()
+ if resp.status != 200:
+ raise RuntimeError(
+ "Non-200 return from AWS: %s %s\n%s"
+ % (resp.status, resp.reason, resp_body)
+ )
+ except Exception:
+ file["upload_exception"] = sys.exc_info()
+ file["upload_ok"] = False
+ else:
+ file["upload_ok"] = True
+
+
+def _notify_upload_complete(base_url, auth_file, file):
+ req = Request(urljoin(base_url, "upload/complete/%(algorithm)s/%(digest)s" % file))
+ _authorize(req, auth_file)
+ try:
+ _urlopen(req)
+ except HTTPError as e:
+ if e.code != 409:
+ _log_api_error(e)
+ return
+ # 409 indicates that the upload URL hasn't expired yet and we
+ # should retry after a delay
+ to_wait = int(e.headers.get("X-Retry-After", 60))
+ log.warning("Waiting %d seconds for upload URLs to expire" % to_wait)
+ time.sleep(to_wait)
+ _notify_upload_complete(base_url, auth_file, file)
+ except Exception:
+ log.exception("While notifying server of upload completion:")
+
+
+def upload(manifest, message, base_urls, auth_file, region):
+ try:
+ manifest = open_manifest(manifest)
+ except InvalidManifest:
+ log.exception("failed to load manifest file at '%s'")
+ return False
+
+ # verify the manifest, since we'll need the files present to upload
+ if not manifest.validate():
+ log.error("manifest is invalid")
+ return False
+
+ if any(fr.visibility is None for fr in manifest.file_records):
+ log.error("All files in a manifest for upload must have a visibility set")
+
+ # convert the manifest to an upload batch
+ batch = {
+ "message": message,
+ "files": {},
+ }
+ for fr in manifest.file_records:
+ batch["files"][fr.filename] = {
+ "size": fr.size,
+ "digest": fr.digest,
+ "algorithm": fr.algorithm,
+ "visibility": fr.visibility,
+ }
+
+ # make the upload request
+ resp = _send_batch(base_urls[0], auth_file, batch, region)
+ if not resp:
+ return None
+ files = resp["files"]
+
+ # Upload the files, each in a thread. This allows us to start all of the
+ # uploads before any of the URLs expire.
+ threads = {}
+ for filename, file in files.items():
+ if "put_url" in file:
+ log.info("%s: starting upload" % (filename,))
+ thd = threading.Thread(target=_s3_upload, args=(filename, file))
+ thd.daemon = 1
+ thd.start()
+ threads[filename] = thd
+ else:
+ log.info("%s: already exists on server" % (filename,))
+
+ # re-join all of those threads as they exit
+ success = True
+ while threads:
+ for filename, thread in list(threads.items()):
+ if not thread.is_alive():
+ # _s3_upload has annotated file with result information
+ file = files[filename]
+ thread.join()
+ if file["upload_ok"]:
+ log.info("%s: uploaded" % filename)
+ else:
+ log.error(
+ "%s: failed" % filename, exc_info=file["upload_exception"]
+ )
+ success = False
+ del threads[filename]
+
+ # notify the server that the uploads are completed. If the notification
+ # fails, we don't consider that an error (the server will notice
+ # eventually)
+ for filename, file in files.items():
+ if "put_url" in file and file["upload_ok"]:
+ log.info("notifying server of upload completion for %s" % (filename,))
+ _notify_upload_complete(base_urls[0], auth_file, file)
+
+ return success
+
+
+def send_operation_on_file(data, base_urls, digest, auth_file):
+ url = base_urls[0]
+ url = urljoin(url, "file/sha512/" + digest)
+
+ data = json.dumps(data)
+
+ req = Request(url, data, {"Content-Type": "application/json"})
+ req.get_method = lambda: "PATCH"
+
+ _authorize(req, auth_file)
+
+ try:
+ _urlopen(req)
+ except (URLError, HTTPError) as e:
+ _log_api_error(e)
+ return False
+ return True
+
+
+def change_visibility(base_urls, digest, visibility, auth_file):
+ data = [
+ {
+ "op": "set_visibility",
+ "visibility": visibility,
+ }
+ ]
+ return send_operation_on_file(data, base_urls, digest, auth_file)
+
+
+def delete_instances(base_urls, digest, auth_file):
+ data = [
+ {
+ "op": "delete_instances",
+ }
+ ]
+ return send_operation_on_file(data, base_urls, digest, auth_file)
+
+
+def process_command(options, args):
+ """I know how to take a list of program arguments and
+ start doing the right thing with them"""
+ cmd = args[0]
+ cmd_args = args[1:]
+ log.debug("processing '%s' command with args '%s'" % (cmd, '", "'.join(cmd_args)))
+ log.debug("using options: %s" % options)
+
+ if cmd == "list":
+ return list_manifest(options["manifest"])
+ if cmd == "validate":
+ return validate_manifest(options["manifest"])
+ elif cmd == "add":
+ return add_files(
+ options["manifest"],
+ options["algorithm"],
+ cmd_args,
+ options["version"],
+ options["visibility"],
+ options["unpack"],
+ )
+ elif cmd == "purge":
+ if options["cache_folder"]:
+ purge(folder=options["cache_folder"], gigs=options["size"])
+ else:
+ log.critical("please specify the cache folder to be purged")
+ return False
+ elif cmd == "fetch":
+ return fetch_files(
+ options["manifest"],
+ options["base_url"],
+ cmd_args,
+ cache_folder=options["cache_folder"],
+ auth_file=options.get("auth_file"),
+ region=options.get("region"),
+ )
+ elif cmd == "upload":
+ if not options.get("message"):
+ log.critical("upload command requires a message")
+ return False
+ return upload(
+ options.get("manifest"),
+ options.get("message"),
+ options.get("base_url"),
+ options.get("auth_file"),
+ options.get("region"),
+ )
+ elif cmd == "change-visibility":
+ if not options.get("digest"):
+ log.critical("change-visibility command requires a digest option")
+ return False
+ if not options.get("visibility"):
+ log.critical("change-visibility command requires a visibility option")
+ return False
+ return change_visibility(
+ options.get("base_url"),
+ options.get("digest"),
+ options.get("visibility"),
+ options.get("auth_file"),
+ )
+ elif cmd == "delete":
+ if not options.get("digest"):
+ log.critical("change-visibility command requires a digest option")
+ return False
+ return delete_instances(
+ options.get("base_url"),
+ options.get("digest"),
+ options.get("auth_file"),
+ )
+ else:
+ log.critical('command "%s" is not implemented' % cmd)
+ return False
+
+
+def main(argv, _skip_logging=False):
+ # Set up option parsing
+ parser = optparse.OptionParser()
+ parser.add_option(
+ "-q",
+ "--quiet",
+ default=logging.INFO,
+ dest="loglevel",
+ action="store_const",
+ const=logging.ERROR,
+ )
+ parser.add_option(
+ "-v", "--verbose", dest="loglevel", action="store_const", const=logging.DEBUG
+ )
+ parser.add_option(
+ "-m",
+ "--manifest",
+ default=DEFAULT_MANIFEST_NAME,
+ dest="manifest",
+ action="store",
+ help="specify the manifest file to be operated on",
+ )
+ parser.add_option(
+ "-d",
+ "--algorithm",
+ default="sha512",
+ dest="algorithm",
+ action="store",
+ help="hashing algorithm to use (only sha512 is allowed)",
+ )
+ parser.add_option(
+ "--digest",
+ default=None,
+ dest="digest",
+ action="store",
+ help="digest hash to change visibility for",
+ )
+ parser.add_option(
+ "--visibility",
+ default=None,
+ dest="visibility",
+ choices=["internal", "public"],
+ help='Visibility level of this file; "internal" is for '
+ "files that cannot be distributed out of the company "
+ 'but not for secrets; "public" files are available to '
+ "anyone without restriction",
+ )
+ parser.add_option(
+ "--unpack",
+ default=False,
+ dest="unpack",
+ action="store_true",
+ help="Request unpacking this file after fetch."
+ " This is helpful with tarballs.",
+ )
+ parser.add_option(
+ "--version",
+ default=None,
+ dest="version",
+ action="store",
+ help="Version string for this file. This annotates the "
+ "manifest entry with a version string to help "
+ "identify the contents.",
+ )
+ parser.add_option(
+ "-o",
+ "--overwrite",
+ default=False,
+ dest="overwrite",
+ action="store_true",
+ help="UNUSED; present for backward compatibility",
+ )
+ parser.add_option(
+ "--url",
+ dest="base_url",
+ action="append",
+ help="RelengAPI URL ending with /tooltool/; default "
+ "is appropriate for Mozilla",
+ )
+ parser.add_option(
+ "-c", "--cache-folder", dest="cache_folder", help="Local cache folder"
+ )
+ parser.add_option(
+ "-s",
+ "--size",
+ help="free space required (in GB)",
+ dest="size",
+ type="float",
+ default=0.0,
+ )
+ parser.add_option(
+ "-r",
+ "--region",
+ help="Preferred AWS region for upload or fetch; " "example: --region=us-west-2",
+ )
+ parser.add_option(
+ "--message",
+ help='The "commit message" for an upload; format with a bug number '
+ "and brief comment",
+ dest="message",
+ )
+ parser.add_option(
+ "--authentication-file",
+ help="Use the RelengAPI token found in the given file to "
+ "authenticate to the RelengAPI server.",
+ dest="auth_file",
+ )
+
+ (options_obj, args) = parser.parse_args(argv[1:])
+
+ if not options_obj.base_url:
+ tooltool_host = os.environ.get("TOOLTOOL_HOST", "tooltool.mozilla-releng.net")
+ taskcluster_proxy_url = os.environ.get("TASKCLUSTER_PROXY_URL")
+ if taskcluster_proxy_url:
+ tooltool_url = "{}/{}".format(taskcluster_proxy_url, tooltool_host)
+ else:
+ tooltool_url = "https://{}".format(tooltool_host)
+
+ options_obj.base_url = [tooltool_url]
+
+ # ensure all URLs have a trailing slash
+ def add_slash(url):
+ return url if url.endswith("/") else (url + "/")
+
+ options_obj.base_url = [add_slash(u) for u in options_obj.base_url]
+
+ # expand ~ in --authentication-file
+ if options_obj.auth_file:
+ options_obj.auth_file = os.path.expanduser(options_obj.auth_file)
+
+ # Dictionaries are easier to work with
+ options = vars(options_obj)
+
+ log.setLevel(options["loglevel"])
+
+ # Set up logging, for now just to the console
+ if not _skip_logging: # pragma: no cover
+ ch = logging.StreamHandler()
+ cf = logging.Formatter("%(levelname)s - %(message)s")
+ ch.setFormatter(cf)
+ log.addHandler(ch)
+
+ if options["algorithm"] != "sha512":
+ parser.error("only --algorithm sha512 is supported")
+
+ if len(args) < 1:
+ parser.error("You must specify a command")
+
+ return 0 if process_command(options, args) else 1
+
+
+if __name__ == "__main__": # pragma: no cover
+ sys.exit(main(sys.argv))
diff --git a/python/mozbuild/mozbuild/action/unify_symbols.py b/python/mozbuild/mozbuild/action/unify_symbols.py
new file mode 100644
index 0000000000..4e96a010b2
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/unify_symbols.py
@@ -0,0 +1,49 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import argparse
+
+from mozpack.copier import FileCopier
+from mozpack.errors import errors
+from mozpack.files import FileFinder
+from mozpack.unify import UnifiedFinder
+
+
+class UnifiedSymbolsFinder(UnifiedFinder):
+ def unify_file(self, path, file1, file2):
+ # We expect none of the files to overlap.
+ if not file2:
+ return file1
+ if not file1:
+ return file2
+ errors.error(
+ "{} is in both {} and {}".format(
+ path, self._finder1.base, self._finder2.base
+ )
+ )
+
+
+def main():
+ parser = argparse.ArgumentParser(
+ description="Merge two crashreporter symbols directories."
+ )
+ parser.add_argument("dir1", help="Directory")
+ parser.add_argument("dir2", help="Directory to merge")
+
+ options = parser.parse_args()
+
+ dir1_finder = FileFinder(options.dir1)
+ dir2_finder = FileFinder(options.dir2)
+ finder = UnifiedSymbolsFinder(dir1_finder, dir2_finder)
+
+ copier = FileCopier()
+ with errors.accumulate():
+ for p, f in finder:
+ copier.add(p, f)
+
+ copier.copy(options.dir1, skip_if_older=False)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/action/unify_tests.py b/python/mozbuild/mozbuild/action/unify_tests.py
new file mode 100644
index 0000000000..d94ebade1b
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/unify_tests.py
@@ -0,0 +1,65 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import argparse
+import os
+
+import buildconfig
+import mozpack.path as mozpath
+from mozpack.copier import FileCopier
+from mozpack.errors import errors
+from mozpack.files import FileFinder
+from mozpack.unify import UnifiedFinder
+
+
+class UnifiedTestFinder(UnifiedFinder):
+ def unify_file(self, path, file1, file2):
+ unified = super(UnifiedTestFinder, self).unify_file(path, file1, file2)
+ basename = mozpath.basename(path)
+ if basename == "mozinfo.json":
+ # The mozinfo.json files contain processor info, which differs
+ # between both ends.
+ # Remove the block when this assert is hit.
+ assert not unified
+ errors.ignore_errors()
+ self._report_difference(path, file1, file2)
+ errors.ignore_errors(False)
+ return file1
+ elif basename == "dump_syms_mac":
+ # At the moment, the dump_syms_mac executable is a x86_64 binary
+ # on both ends. We can't create a universal executable from twice
+ # the same executable.
+ # When this assert hits, remove this block.
+ assert file1.open().read() == file2.open().read()
+ return file1
+ return unified
+
+
+def main():
+ parser = argparse.ArgumentParser(
+ description="Merge two directories, creating Universal binaries for "
+ "executables and libraries they contain."
+ )
+ parser.add_argument("dir1", help="Directory")
+ parser.add_argument("dir2", help="Directory to merge")
+
+ options = parser.parse_args()
+
+ buildconfig.substs["OS_ARCH"] = "Darwin"
+ buildconfig.substs["LIPO"] = os.environ.get("LIPO")
+
+ dir1_finder = FileFinder(options.dir1, find_executables=True, find_dotfiles=True)
+ dir2_finder = FileFinder(options.dir2, find_executables=True, find_dotfiles=True)
+ finder = UnifiedTestFinder(dir1_finder, dir2_finder)
+
+ copier = FileCopier()
+ with errors.accumulate():
+ for p, f in finder:
+ copier.add(p, f)
+
+ copier.copy(options.dir1, skip_if_older=False)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/action/unpack_dmg.py b/python/mozbuild/mozbuild/action/unpack_dmg.py
new file mode 100644
index 0000000000..74e4091549
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/unpack_dmg.py
@@ -0,0 +1,52 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import argparse
+import sys
+from pathlib import Path
+
+from mozpack import dmg
+
+from mozbuild.bootstrap import bootstrap_toolchain
+
+
+def _path_or_none(input: str):
+ if not input:
+ return None
+ return Path(input)
+
+
+def main(args):
+ parser = argparse.ArgumentParser(
+ description="Explode a DMG into its relevant files"
+ )
+
+ parser.add_argument("--dsstore", help="DSStore file from")
+ parser.add_argument("--background", help="Background file from")
+ parser.add_argument("--icon", help="Icon file from")
+
+ parser.add_argument("dmgfile", metavar="DMG_IN", help="DMG File to Unpack")
+ parser.add_argument(
+ "outpath", metavar="PATH_OUT", help="Location to put unpacked files"
+ )
+
+ options = parser.parse_args(args)
+
+ dmg_tool = bootstrap_toolchain("dmg/dmg")
+ hfs_tool = bootstrap_toolchain("dmg/hfsplus")
+
+ dmg.extract_dmg(
+ dmgfile=Path(options.dmgfile),
+ output=Path(options.outpath),
+ dmg_tool=Path(dmg_tool),
+ hfs_tool=Path(hfs_tool),
+ dsstore=_path_or_none(options.dsstore),
+ background=_path_or_none(options.background),
+ icon=_path_or_none(options.icon),
+ )
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main(sys.argv[1:]))
diff --git a/python/mozbuild/mozbuild/action/util.py b/python/mozbuild/mozbuild/action/util.py
new file mode 100644
index 0000000000..d4102629ff
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/util.py
@@ -0,0 +1,24 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import json
+import sys
+import time
+
+
+def log_build_task(f, *args, **kwargs):
+ """Run the given function, representing an entire build task, and log the
+ BUILDTASK metadata row to stdout.
+ """
+ start = time.monotonic()
+ try:
+ return f(*args, **kwargs)
+ finally:
+ end = time.monotonic()
+ print(
+ "BUILDTASK %s"
+ % json.dumps(
+ {"argv": sys.argv, "start": start, "end": end, "context": None}
+ )
+ )
diff --git a/python/mozbuild/mozbuild/action/webidl.py b/python/mozbuild/mozbuild/action/webidl.py
new file mode 100644
index 0000000000..81c2c2a507
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/webidl.py
@@ -0,0 +1,19 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import sys
+
+from mozwebidlcodegen import create_build_system_manager
+
+from mozbuild.action.util import log_build_task
+
+
+def main(argv):
+ """Perform WebIDL code generation required by the build system."""
+ manager = create_build_system_manager()
+ manager.generate_build_files()
+
+
+if __name__ == "__main__":
+ sys.exit(log_build_task(main, sys.argv[1:]))
diff --git a/python/mozbuild/mozbuild/action/wrap_rustc.py b/python/mozbuild/mozbuild/action/wrap_rustc.py
new file mode 100644
index 0000000000..d865438c47
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/wrap_rustc.py
@@ -0,0 +1,79 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import argparse
+import os
+import subprocess
+import sys
+
+
+def parse_outputs(crate_output, dep_outputs, pass_l_flag):
+ env = {}
+ args = []
+
+ def parse_line(line):
+ if line.startswith("cargo:"):
+ return line[len("cargo:") :].split("=", 1)
+
+ def parse_file(f):
+ with open(f) as fh:
+ return [parse_line(line.rstrip()) for line in fh.readlines()]
+
+ for f in dep_outputs:
+ for entry in parse_file(f):
+ if not entry:
+ continue
+ key, value = entry
+ if key == "rustc-link-search":
+ args += ["-L", value]
+ elif key == "rustc-flags":
+ flags = value.split()
+ for flag, val in zip(flags[0::2], flags[1::2]):
+ if flag == "-l" and f == crate_output:
+ args += ["-l", val]
+ elif flag == "-L":
+ args += ["-L", val]
+ else:
+ raise Exception(
+ "Unknown flag passed through "
+ '"cargo:rustc-flags": "%s"' % flag
+ )
+ elif key == "rustc-link-lib" and f == crate_output:
+ args += ["-l", value]
+ elif key == "rustc-cfg" and f == crate_output:
+ args += ["--cfg", value]
+ elif key == "rustc-env" and f == crate_output:
+ env_key, env_value = value.split("=", 1)
+ env[env_key] = env_value
+ elif key == "rerun-if-changed":
+ pass
+ elif key == "rerun-if-env-changed":
+ pass
+ elif key == "warning":
+ pass
+ elif key:
+ # Todo: Distinguish between direct and transitive
+ # dependencies so we can pass metadata environment
+ # variables correctly.
+ pass
+
+ return env, args
+
+
+def wrap_rustc(args):
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--crate-out", nargs="?")
+ parser.add_argument("--deps-out", nargs="*")
+ parser.add_argument("--cwd")
+ parser.add_argument("--pass-l-flag", action="store_true")
+ parser.add_argument("--cmd", nargs=argparse.REMAINDER)
+ args = parser.parse_args(args)
+
+ new_env, new_args = parse_outputs(args.crate_out, args.deps_out, args.pass_l_flag)
+ os.environ.update(new_env)
+ return subprocess.Popen(args.cmd + new_args, cwd=args.cwd).wait()
+
+
+if __name__ == "__main__":
+ sys.exit(wrap_rustc(sys.argv[1:]))
diff --git a/python/mozbuild/mozbuild/action/xpccheck.py b/python/mozbuild/mozbuild/action/xpccheck.py
new file mode 100644
index 0000000000..4b59577cce
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/xpccheck.py
@@ -0,0 +1,109 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""A generic script to verify all test files are in the
+corresponding .ini file.
+
+Usage: xpccheck.py <directory> [<directory> ...]
+"""
+
+import os
+import sys
+from glob import glob
+
+import manifestparser
+
+
+def getIniTests(testdir):
+ mp = manifestparser.ManifestParser(strict=False)
+ mp.read(os.path.join(testdir, "xpcshell.ini"))
+ return mp.tests
+
+
+def verifyDirectory(initests, directory):
+ files = glob(os.path.join(os.path.abspath(directory), "test_*"))
+ for f in files:
+ if not os.path.isfile(f):
+ continue
+
+ name = os.path.basename(f)
+ if name.endswith(".in"):
+ name = name[:-3]
+
+ if not name.endswith(".js"):
+ continue
+
+ found = False
+ for test in initests:
+ if os.path.join(os.path.abspath(directory), name) == test["path"]:
+ found = True
+ break
+
+ if not found:
+ print(
+ (
+ "TEST-UNEXPECTED-FAIL | xpccheck | test "
+ "%s is missing from test manifest %s!"
+ )
+ % (
+ name,
+ os.path.join(directory, "xpcshell.ini"),
+ ),
+ file=sys.stderr,
+ )
+ sys.exit(1)
+
+
+def verifyIniFile(initests, directory):
+ files = glob(os.path.join(os.path.abspath(directory), "test_*"))
+ for test in initests:
+ name = test["path"].split("/")[-1]
+
+ found = False
+ for f in files:
+
+ fname = f.split("/")[-1]
+ if fname.endswith(".in"):
+ fname = ".in".join(fname.split(".in")[:-1])
+
+ if os.path.join(os.path.abspath(directory), fname) == test["path"]:
+ found = True
+ break
+
+ if not found:
+ print(
+ (
+ "TEST-UNEXPECTED-FAIL | xpccheck | found "
+ "%s in xpcshell.ini and not in directory '%s'"
+ )
+ % (
+ name,
+ directory,
+ ),
+ file=sys.stderr,
+ )
+ sys.exit(1)
+
+
+def main(argv):
+ if len(argv) < 2:
+ print(
+ "Usage: xpccheck.py <topsrcdir> <directory> [<directory> ...]",
+ file=sys.stderr,
+ )
+ sys.exit(1)
+
+ for d in argv[1:]:
+ # xpcshell-unpack is a copy of xpcshell sibling directory and in the Makefile
+ # we copy all files (including xpcshell.ini from the sibling directory.
+ if d.endswith("toolkit/mozapps/extensions/test/xpcshell-unpack"):
+ continue
+
+ initests = getIniTests(d)
+ verifyDirectory(initests, d)
+ verifyIniFile(initests, d)
+
+
+if __name__ == "__main__":
+ main(sys.argv[1:])
diff --git a/python/mozbuild/mozbuild/action/xpidl-process.py b/python/mozbuild/mozbuild/action/xpidl-process.py
new file mode 100755
index 0000000000..99f2a83f5e
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/xpidl-process.py
@@ -0,0 +1,153 @@
+#!/usr/bin/env python
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This script is used to generate an output header and xpt file for
+# input IDL file(s). It's purpose is to directly support the build
+# system. The API will change to meet the needs of the build system.
+
+import argparse
+import os
+import sys
+
+import six
+from buildconfig import topsrcdir
+from mozpack import path as mozpath
+from xpidl import jsonxpt
+from xpidl.header import print_header
+from xpidl.rust import print_rust_bindings
+from xpidl.rust_macros import print_rust_macros_bindings
+from xpidl.xpidl import IDLParser
+
+from mozbuild.action.util import log_build_task
+from mozbuild.makeutil import Makefile
+from mozbuild.pythonutil import iter_modules_in_path
+from mozbuild.util import FileAvoidWrite
+
+
+def process(
+ input_dirs,
+ inc_paths,
+ bindings_conf,
+ header_dir,
+ xpcrs_dir,
+ xpt_dir,
+ deps_dir,
+ module,
+ idl_files,
+):
+ p = IDLParser()
+
+ xpts = []
+ mk = Makefile()
+ rule = mk.create_rule()
+
+ glbl = {}
+ exec(open(bindings_conf, encoding="utf-8").read(), glbl)
+ webidlconfig = glbl["DOMInterfaces"]
+
+ # Write out dependencies for Python modules we import. If this list isn't
+ # up to date, we will not re-process XPIDL files if the processor changes.
+ rule.add_dependencies(six.ensure_text(s) for s in iter_modules_in_path(topsrcdir))
+
+ for path in idl_files:
+ basename = os.path.basename(path)
+ stem, _ = os.path.splitext(basename)
+ idl_data = open(path, encoding="utf-8").read()
+
+ idl = p.parse(idl_data, filename=path)
+ idl.resolve(inc_paths, p, webidlconfig)
+
+ header_path = os.path.join(header_dir, "%s.h" % stem)
+ rs_rt_path = os.path.join(xpcrs_dir, "rt", "%s.rs" % stem)
+ rs_bt_path = os.path.join(xpcrs_dir, "bt", "%s.rs" % stem)
+
+ xpts.append(jsonxpt.build_typelib(idl))
+
+ rule.add_dependencies(six.ensure_text(s) for s in idl.deps)
+
+ # The print_* functions don't actually do anything with the
+ # passed-in path other than writing it into the file to let people
+ # know where the original source was. This script receives
+ # absolute paths, which are not so great to embed in header files
+ # (they mess with deterministic generation of files on different
+ # machines, Searchfox logic, shared compilation caches, etc.), so
+ # we pass in fake paths that are the same across compilations, but
+ # should still enable people to figure out where to go.
+ relpath = mozpath.relpath(path, topsrcdir)
+
+ with FileAvoidWrite(header_path) as fh:
+ print_header(idl, fh, path, relpath)
+
+ with FileAvoidWrite(rs_rt_path) as fh:
+ print_rust_bindings(idl, fh, relpath)
+
+ with FileAvoidWrite(rs_bt_path) as fh:
+ print_rust_macros_bindings(idl, fh, relpath)
+
+ # NOTE: We don't use FileAvoidWrite here as we may re-run this code due to a
+ # number of different changes in the code, which may not cause the .xpt
+ # files to be changed in any way. This means that make will re-run us every
+ # time a build is run whether or not anything changed. To fix this we
+ # unconditionally write out the file.
+ xpt_path = os.path.join(xpt_dir, "%s.xpt" % module)
+ with open(xpt_path, "w", encoding="utf-8", newline="\n") as fh:
+ jsonxpt.write(jsonxpt.link(xpts), fh)
+
+ rule.add_targets([six.ensure_text(xpt_path)])
+ if deps_dir:
+ deps_path = os.path.join(deps_dir, "%s.pp" % module)
+ with FileAvoidWrite(deps_path) as fh:
+ mk.dump(fh)
+
+
+def main(argv):
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ "--depsdir", help="Directory in which to write dependency files."
+ )
+ parser.add_argument(
+ "--bindings-conf", help="Path to the WebIDL binding configuration file."
+ )
+ parser.add_argument(
+ "--input-dir",
+ dest="input_dirs",
+ action="append",
+ default=[],
+ help="Directory(ies) in which to find source .idl files.",
+ )
+ parser.add_argument("headerdir", help="Directory in which to write header files.")
+ parser.add_argument(
+ "xpcrsdir", help="Directory in which to write rust xpcom binding files."
+ )
+ parser.add_argument("xptdir", help="Directory in which to write xpt file.")
+ parser.add_argument(
+ "module", help="Final module name to use for linked output xpt file."
+ )
+ parser.add_argument("idls", nargs="+", help="Source .idl file(s).")
+ parser.add_argument(
+ "-I",
+ dest="incpath",
+ action="append",
+ default=[],
+ help="Extra directories where to look for included .idl files.",
+ )
+
+ args = parser.parse_args(argv)
+ incpath = [os.path.join(topsrcdir, p) for p in args.incpath]
+ process(
+ args.input_dirs,
+ incpath,
+ args.bindings_conf,
+ args.headerdir,
+ args.xpcrsdir,
+ args.xptdir,
+ args.depsdir,
+ args.module,
+ args.idls,
+ )
+
+
+if __name__ == "__main__":
+ log_build_task(main, sys.argv[1:])
diff --git a/python/mozbuild/mozbuild/action/zip.py b/python/mozbuild/mozbuild/action/zip.py
new file mode 100644
index 0000000000..e0dcbe020f
--- /dev/null
+++ b/python/mozbuild/mozbuild/action/zip.py
@@ -0,0 +1,52 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This script creates a zip file, but will also strip any binaries
+# it finds before adding them to the zip.
+
+import argparse
+import sys
+
+import mozpack.path as mozpath
+from mozpack.copier import Jarrer
+from mozpack.errors import errors
+from mozpack.files import FileFinder
+from mozpack.path import match
+
+from mozbuild.action.util import log_build_task
+
+
+def main(args):
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ "-C",
+ metavar="DIR",
+ default=".",
+ help="Change to given directory before considering " "other paths",
+ )
+ parser.add_argument("--strip", action="store_true", help="Strip executables")
+ parser.add_argument(
+ "-x",
+ metavar="EXCLUDE",
+ default=[],
+ action="append",
+ help="Exclude files that match the pattern",
+ )
+ parser.add_argument("zip", help="Path to zip file to write")
+ parser.add_argument("input", nargs="+", help="Path to files to add to zip")
+ args = parser.parse_args(args)
+
+ jarrer = Jarrer()
+
+ with errors.accumulate():
+ finder = FileFinder(args.C, find_executables=args.strip)
+ for path in args.input:
+ for p, f in finder.find(path):
+ if not any([match(p, exclude) for exclude in args.x]):
+ jarrer.add(p, f)
+ jarrer.copy(mozpath.join(args.C, args.zip))
+
+
+if __name__ == "__main__":
+ log_build_task(main, sys.argv[1:])
diff --git a/python/mozbuild/mozbuild/analyze/__init__.py b/python/mozbuild/mozbuild/analyze/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/analyze/__init__.py
diff --git a/python/mozbuild/mozbuild/analyze/hg.py b/python/mozbuild/mozbuild/analyze/hg.py
new file mode 100644
index 0000000000..605ff6838e
--- /dev/null
+++ b/python/mozbuild/mozbuild/analyze/hg.py
@@ -0,0 +1,176 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import bisect
+import gzip
+import json
+import math
+from collections import Counter
+from datetime import datetime, timedelta
+
+import mozpack.path as mozpath
+import requests
+
+PUSHLOG_CHUNK_SIZE = 500
+
+URL = "https://hg.mozilla.org/mozilla-central/json-pushes?"
+
+
+def unix_epoch(date):
+ return (date - datetime(1970, 1, 1)).total_seconds()
+
+
+def unix_from_date(n, today):
+ return unix_epoch(today - timedelta(days=n))
+
+
+def get_lastpid(session):
+ return session.get(URL + "&version=2").json()["lastpushid"]
+
+
+def get_pushlog_chunk(session, start, end):
+ # returns pushes sorted by date
+ res = session.get(
+ URL
+ + "version=1&startID={0}&\
+ endID={1}&full=1".format(
+ start, end
+ )
+ ).json()
+ return sorted(res.items(), key=lambda x: x[1]["date"])
+
+
+def collect_data(session, date):
+ if date < 1206031764: # first push
+ raise Exception("No pushes exist before March 20, 2008.")
+ lastpushid = get_lastpid(session)
+ data = []
+ start_id = lastpushid - PUSHLOG_CHUNK_SIZE
+ end_id = lastpushid + 1
+ while True:
+ res = get_pushlog_chunk(session, start_id, end_id)
+ starting_date = res[0][1]["date"] # date of oldest push in chunk
+ dates = [x[1]["date"] for x in res]
+ if starting_date < date:
+ i = bisect.bisect_left(dates, date)
+ data.append(res[i:])
+ return data
+ else:
+ data.append(res)
+ end_id = start_id + 1
+ start_id = start_id - PUSHLOG_CHUNK_SIZE
+
+
+def get_data(epoch):
+ session = requests.Session()
+ data = collect_data(session, epoch)
+ return {k: v for sublist in data for (k, v) in sublist}
+
+
+class Pushlog(object):
+ def __init__(self, days):
+ info = get_data(unix_from_date(days, datetime.today()))
+ self.pushlog = info
+ self.pids = self.get_pids()
+ self.pushes = self.make_pushes()
+ self.files = [l for p in self.pushes for l in set(p.files)]
+ self.file_set = set(self.files)
+ self.file_count = Counter(self.files)
+
+ def make_pushes(self):
+ pids = self.pids
+ all_pushes = self.pushlog
+ return [Push(pid, all_pushes[str(pid)]) for pid in pids]
+
+ def get_pids(self):
+ keys = self.pushlog.keys()
+ keys.sort()
+ return keys
+
+
+class Push(object):
+ def __init__(self, pid, p_dict):
+ self.id = pid
+ self.date = p_dict["date"]
+ self.files = [f for x in p_dict["changesets"] for f in x["files"]]
+
+
+class Report(object):
+ def __init__(self, days, path=None, cost_dict=None):
+ obj = Pushlog(days)
+ self.file_set = obj.file_set
+ self.file_count = obj.file_count
+ self.name = str(days) + "day_report"
+ self.cost_dict = self.get_cost_dict(path, cost_dict)
+
+ def get_cost_dict(self, path, cost_dict):
+ if path is not None:
+ with gzip.open(path) as file:
+ return json.loads(file.read())
+ else:
+ if cost_dict is not None:
+ return cost_dict
+ else:
+ raise Exception
+
+ def organize_data(self):
+ costs = self.cost_dict
+ counts = self.file_count
+ res = []
+ for f in self.file_set:
+ cost = costs.get(f)
+ count = counts.get(f)
+ if cost is not None:
+ res.append((f, cost, count, round(cost * count, 3)))
+ return res
+
+ def get_sorted_report(self, format):
+ res = self.organize_data()
+ res.sort(key=(lambda x: x[3]), reverse=True)
+
+ def ms_to_mins_secs(ms):
+ secs = ms / 1000.0
+ mins = secs / 60
+ secs = secs % 60
+ return "%d:%02d" % (math.trunc(mins), int(round(secs)))
+
+ if format in ("html", "pretty"):
+ res = [
+ (f, ms_to_mins_secs(cost), count, ms_to_mins_secs(total))
+ for (f, cost, count, total) in res
+ ]
+
+ return res
+
+ def cut(self, size, lst):
+ if len(lst) <= size:
+ return lst
+ else:
+ return lst[:size]
+
+ def generate_output(self, format, limit, dst):
+ import tablib
+
+ data = tablib.Dataset(headers=["FILE", "TIME", "CHANGES", "TOTAL"])
+ res = self.get_sorted_report(format)
+ if limit is not None:
+ res = self.cut(limit, res)
+ for x in res:
+ data.append(x)
+ if format == "pretty":
+ print(data)
+ else:
+ file_name = self.name + "." + format
+ content = None
+ data.export(format)
+ if format == "csv":
+ content = data.csv
+ elif format == "json":
+ content = data.json
+ else:
+ content = data.html
+ file_path = mozpath.join(dst, file_name)
+ with open(file_path, "wb") as f:
+ f.write(content)
+ print("Created report: %s" % file_path)
diff --git a/python/mozbuild/mozbuild/android_version_code.py b/python/mozbuild/mozbuild/android_version_code.py
new file mode 100644
index 0000000000..aa13609a7a
--- /dev/null
+++ b/python/mozbuild/mozbuild/android_version_code.py
@@ -0,0 +1,197 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import argparse
+import math
+import sys
+import time
+
+# Builds before this build ID use the v0 version scheme. Builds after this
+# build ID use the v1 version scheme.
+V1_CUTOFF = 20150801000000 # YYYYmmddHHMMSS
+
+
+def android_version_code_v0(buildid, cpu_arch=None, min_sdk=0, max_sdk=0):
+ base = int(str(buildid)[:10])
+ # None is interpreted as arm.
+ if not cpu_arch or cpu_arch == "armeabi-v7a":
+ # Increment by MIN_SDK_VERSION -- this adds 9 to every build ID as a
+ # minimum. Our split APK starts at 15.
+ return base + min_sdk + 0
+ elif cpu_arch in ["x86"]:
+ # Increment the version code by 3 for x86 builds so they are offered to
+ # x86 phones that have ARM emulators, beating the 2-point advantage that
+ # the v15+ ARMv7 APK has. If we change our splits in the future, we'll
+ # need to do this further still.
+ return base + min_sdk + 3
+ else:
+ raise ValueError(
+ "Don't know how to compute android:versionCode "
+ "for CPU arch %s" % cpu_arch
+ )
+
+
+def android_version_code_v1(buildid, cpu_arch=None, min_sdk=0, max_sdk=0):
+ """Generate a v1 android:versionCode.
+ The important consideration is that version codes be monotonically
+ increasing (per Android package name) for all published builds. The input
+ build IDs are based on timestamps and hence are always monotonically
+ increasing.
+
+ The generated v1 version codes look like (in binary):
+
+ 0111 1000 0010 tttt tttt tttt tttt txpg
+
+ The 17 bits labelled 't' represent the number of hours since midnight on
+ September 1, 2015. (2015090100 in YYYYMMMDDHH format.) This yields a
+ little under 15 years worth of hourly build identifiers, since 2**17 / (366
+ * 24) =~ 14.92.
+
+ The bits labelled 'x', 'p', and 'g' are feature flags.
+
+ The bit labelled 'x' is 1 if the build is for an x86 or x86-64 architecture,
+ and 0 otherwise, which means the build is for an ARM or ARM64 architecture.
+ (Fennec no longer supports ARMv6, so ARM is equivalent to ARMv7.
+
+ ARM64 is also known as AArch64; it is logically ARMv8.)
+
+ For the same release, x86 and x86_64 builds have higher version codes and
+ take precedence over ARM builds, so that they are preferred over ARM on
+ devices that have ARM emulation.
+
+ The bit labelled 'p' is 1 if the build is for a 64-bit architecture (x86-64
+ or ARM64), and 0 otherwise, which means the build is for a 32-bit
+ architecture (x86 or ARM). 64-bit builds have higher version codes so
+ they take precedence over 32-bit builds on devices that support 64-bit.
+
+ The bit labelled 'g' is 1 if the build targets a recent API level, which
+ is currently always the case, because Firefox no longer ships releases that
+ are split by API levels. However, we may reintroduce a split in the future,
+ in which case the release that targets an older API level will
+
+ We throw an explanatory exception when we are within one calendar year of
+ running out of build events. This gives lots of time to update the version
+ scheme. The responsible individual should then bump the range (to allow
+ builds to continue) and use the time remaining to update the version scheme
+ via the reserved high order bits.
+
+ N.B.: the reserved 0 bit to the left of the highest order 't' bit can,
+ sometimes, be used to bump the version scheme. In addition, by reducing the
+ granularity of the build identifiers (for example, moving to identifying
+ builds every 2 or 4 hours), the version scheme may be adjusted further still
+ without losing a (valuable) high order bit.
+ """
+
+ def hours_since_cutoff(buildid):
+ # The ID is formatted like YYYYMMDDHHMMSS (using
+ # datetime.now().strftime('%Y%m%d%H%M%S'); see build/variables.py).
+ # The inverse function is time.strptime.
+ # N.B.: the time module expresses time as decimal seconds since the
+ # epoch.
+ fmt = "%Y%m%d%H%M%S"
+ build = time.strptime(str(buildid), fmt)
+ cutoff = time.strptime(str(V1_CUTOFF), fmt)
+ return int(
+ math.floor((time.mktime(build) - time.mktime(cutoff)) / (60.0 * 60.0))
+ )
+
+ # Of the 21 low order bits, we take 17 bits for builds.
+ base = hours_since_cutoff(buildid)
+ if base < 0:
+ raise ValueError(
+ "Something has gone horribly wrong: cannot calculate "
+ "android:versionCode from build ID %s: hours underflow "
+ "bits allotted!" % buildid
+ )
+ if base > 2 ** 17:
+ raise ValueError(
+ "Something has gone horribly wrong: cannot calculate "
+ "android:versionCode from build ID %s: hours overflow "
+ "bits allotted!" % buildid
+ )
+ if base > 2 ** 17 - 366 * 24:
+ raise ValueError(
+ "Running out of low order bits calculating "
+ "android:versionCode from build ID %s: "
+ "; YOU HAVE ONE YEAR TO UPDATE THE VERSION SCHEME." % buildid
+ )
+
+ version = 0b1111000001000000000000000000000
+ # We reserve 1 "middle" high order bit for the future, and 3 low order bits
+ # for architecture and APK splits.
+ version |= base << 3
+
+ # 'x' bit is 1 for x86/x86-64 architectures (`None` is interpreted as ARM).
+ if cpu_arch in ["x86", "x86_64"]:
+ version |= 1 << 2
+ elif not cpu_arch or cpu_arch in ["armeabi-v7a", "arm64-v8a"]:
+ pass
+ else:
+ raise ValueError(
+ "Don't know how to compute android:versionCode "
+ "for CPU arch %s" % cpu_arch
+ )
+
+ # 'p' bit is 1 for 64-bit architectures.
+ if cpu_arch in ["arm64-v8a", "x86_64"]:
+ version |= 1 << 1
+ elif cpu_arch in ["armeabi-v7a", "x86"]:
+ pass
+ else:
+ raise ValueError(
+ "Don't know how to compute android:versionCode "
+ "for CPU arch %s" % cpu_arch
+ )
+
+ # 'g' bit is currently always 1, but may depend on `min_sdk` in the future.
+ version |= 1 << 0
+
+ return version
+
+
+def android_version_code(buildid, *args, **kwargs):
+ base = int(str(buildid))
+ if base < V1_CUTOFF:
+ return android_version_code_v0(buildid, *args, **kwargs)
+ else:
+ return android_version_code_v1(buildid, *args, **kwargs)
+
+
+def main(argv):
+ parser = argparse.ArgumentParser("Generate an android:versionCode", add_help=False)
+ parser.add_argument(
+ "--verbose", action="store_true", default=False, help="Be verbose"
+ )
+ parser.add_argument(
+ "--with-android-cpu-arch",
+ dest="cpu_arch",
+ choices=["armeabi", "armeabi-v7a", "arm64-v8a", "x86", "x86_64"],
+ help="The target CPU architecture",
+ )
+ parser.add_argument(
+ "--with-android-min-sdk-version",
+ dest="min_sdk",
+ type=int,
+ default=0,
+ help="The minimum target SDK",
+ )
+ parser.add_argument(
+ "--with-android-max-sdk-version",
+ dest="max_sdk",
+ type=int,
+ default=0,
+ help="The maximum target SDK",
+ )
+ parser.add_argument("buildid", type=int, help="The input build ID")
+
+ args = parser.parse_args(argv)
+ code = android_version_code(
+ args.buildid, cpu_arch=args.cpu_arch, min_sdk=args.min_sdk, max_sdk=args.max_sdk
+ )
+ print(code)
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main(sys.argv[1:]))
diff --git a/python/mozbuild/mozbuild/artifact_builds.py b/python/mozbuild/mozbuild/artifact_builds.py
new file mode 100644
index 0000000000..a4d2a0bdd2
--- /dev/null
+++ b/python/mozbuild/mozbuild/artifact_builds.py
@@ -0,0 +1,27 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# The values correspond to entries at
+# https://tools.taskcluster.net/index/artifacts/#gecko.v2.mozilla-central.latest/gecko.v2.mozilla-central.latest
+JOB_CHOICES = {
+ "android-arm-opt",
+ "android-arm-debug",
+ "android-x86-opt",
+ "android-x86_64-opt",
+ "android-x86_64-debug",
+ "android-aarch64-opt",
+ "android-aarch64-debug",
+ "linux-opt",
+ "linux-debug",
+ "linux64-opt",
+ "linux64-debug",
+ "macosx64-opt",
+ "macosx64-debug",
+ "win32-opt",
+ "win32-debug",
+ "win64-opt",
+ "win64-debug",
+ "win64-aarch64-opt",
+ "win64-aarch64-debug",
+}
diff --git a/python/mozbuild/mozbuild/artifact_cache.py b/python/mozbuild/mozbuild/artifact_cache.py
new file mode 100644
index 0000000000..572953e1f7
--- /dev/null
+++ b/python/mozbuild/mozbuild/artifact_cache.py
@@ -0,0 +1,251 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+Fetch and cache artifacts from URLs.
+
+This module manages fetching artifacts from URLS and purging old
+artifacts using a simple Least Recently Used cache.
+
+This module requires certain modules be importable from the ambient Python
+environment. Consumers will need to arrange this themselves.
+
+The bulk of the complexity is in managing and persisting several caches. If
+we found a Python LRU cache that pickled cleanly, we could remove a lot of
+this code! Sadly, I found no such candidate implementations, so we pickle
+pylru caches manually.
+
+None of the instances (or the underlying caches) are safe for concurrent use.
+A future need, perhaps.
+"""
+
+
+import binascii
+import hashlib
+import logging
+import os
+
+import dlmanager
+import mozpack.path as mozpath
+import six
+import six.moves.urllib.parse as urlparse
+
+from mozbuild.util import mkdir
+
+# Using 'DownloadManager' through the provided interface we
+# can't directly specify a 'chunk_size' for the 'Download' it manages.
+# One way to get it to use the 'chunk_size' we want is to monkeypatch
+# the defaults of the init function for the 'Download' class.
+CHUNK_SIZE = 16 * 1024 * 1024 # 16 MB in bytes.
+dl_init = dlmanager.Download.__init__
+dl_init.__defaults__ = (
+ dl_init.__defaults__[:1] + (CHUNK_SIZE,) + dl_init.__defaults__[2:]
+)
+
+
+# Minimum number of downloaded artifacts to keep. Each artifact can be very large,
+# so don't make this to large!
+MIN_CACHED_ARTIFACTS = 12
+
+# Maximum size of the downloaded artifacts to keep in cache, in bytes (2GiB).
+MAX_CACHED_ARTIFACTS_SIZE = 2 * 1024 * 1024 * 1024
+
+
+class ArtifactPersistLimit(dlmanager.PersistLimit):
+ """Handle persistence for a cache of artifacts.
+
+ When instantiating a DownloadManager, it starts by filling the
+ PersistLimit instance it's given with register_dir_content.
+ In practice, this registers all the files already in the cache directory.
+ After a download finishes, the newly downloaded file is registered, and the
+ oldest files registered to the PersistLimit instance are removed depending
+ on the size and file limits it's configured for.
+
+ This is all good, but there are a few tweaks we want here:
+
+ - We have pickle files in the cache directory that we don't want purged.
+ - Files that were just downloaded in the same session shouldn't be
+ purged. (if for some reason we end up downloading more than the default
+ max size, we don't want the files to be purged)
+
+ To achieve this, this subclass of PersistLimit inhibits the register_file
+ method for pickle files and tracks what files were downloaded in the same
+ session to avoid removing them.
+
+ The register_file method may be used to register cache matches too, so that
+ later sessions know they were freshly used.
+ """
+
+ def __init__(self, log=None):
+ super(ArtifactPersistLimit, self).__init__(
+ size_limit=MAX_CACHED_ARTIFACTS_SIZE, file_limit=MIN_CACHED_ARTIFACTS
+ )
+ self._log = log
+ self._registering_dir = False
+ self._downloaded_now = set()
+
+ def log(self, *args, **kwargs):
+ if self._log:
+ self._log(*args, **kwargs)
+
+ def register_file(self, path):
+ if (
+ path.endswith(".pickle")
+ or path.endswith(".checksum")
+ or os.path.basename(path) == ".metadata_never_index"
+ ):
+ return
+ if not self._registering_dir:
+ # Touch the file so that subsequent calls to a mach artifact
+ # command know it was recently used. While remove_old_files
+ # is based on access time, in various cases, the access time is not
+ # updated when just reading the file, so we force an update.
+ try:
+ os.utime(path, None)
+ except OSError:
+ pass
+ self._downloaded_now.add(path)
+ super(ArtifactPersistLimit, self).register_file(path)
+
+ def register_dir_content(self, directory, pattern="*"):
+ self._registering_dir = True
+ super(ArtifactPersistLimit, self).register_dir_content(directory, pattern)
+ self._registering_dir = False
+
+ def remove_old_files(self):
+ from dlmanager import fs
+
+ files = sorted(self.files, key=lambda f: f.stat.st_atime)
+ kept = []
+ while len(files) > self.file_limit and self._files_size >= self.size_limit:
+ f = files.pop(0)
+ if f.path in self._downloaded_now:
+ kept.append(f)
+ continue
+ try:
+ fs.remove(f.path)
+ except WindowsError:
+ # For some reason, on automation, we can't remove those files.
+ # So for now, ignore the error.
+ kept.append(f)
+ continue
+ self.log(
+ logging.INFO,
+ "artifact",
+ {"filename": f.path},
+ "Purged artifact {filename}",
+ )
+ self._files_size -= f.stat.st_size
+ self.files = files + kept
+
+ def remove_all(self):
+ from dlmanager import fs
+
+ for f in self.files:
+ fs.remove(f.path)
+ self._files_size = 0
+ self.files = []
+
+
+class ArtifactCache(object):
+ """Fetch artifacts from URLS and purge least recently used artifacts from disk."""
+
+ def __init__(self, cache_dir, log=None, skip_cache=False):
+ mkdir(cache_dir, not_indexed=True)
+ self._cache_dir = cache_dir
+ self._log = log
+ self._skip_cache = skip_cache
+ self._persist_limit = ArtifactPersistLimit(log)
+ self._download_manager = dlmanager.DownloadManager(
+ self._cache_dir, persist_limit=self._persist_limit
+ )
+ self._last_dl_update = -1
+
+ def log(self, *args, **kwargs):
+ if self._log:
+ self._log(*args, **kwargs)
+
+ def fetch(self, url, force=False):
+ fname = os.path.basename(url)
+ try:
+ # Use the file name from the url if it looks like a hash digest.
+ if len(fname) not in (32, 40, 56, 64, 96, 128):
+ raise TypeError()
+ binascii.unhexlify(fname)
+ except (TypeError, binascii.Error):
+ # We download to a temporary name like HASH[:16]-basename to
+ # differentiate among URLs with the same basenames. We used to then
+ # extract the build ID from the downloaded artifact and use it to make a
+ # human readable unique name, but extracting build IDs is time consuming
+ # (especially on Mac OS X, where we must mount a large DMG file).
+ hash = hashlib.sha256(six.ensure_binary(url)).hexdigest()[:16]
+ # Strip query string and fragments.
+ basename = os.path.basename(urlparse.urlparse(url).path)
+ fname = hash + "-" + basename
+
+ path = os.path.abspath(mozpath.join(self._cache_dir, fname))
+ if self._skip_cache and os.path.exists(path):
+ self.log(
+ logging.INFO,
+ "artifact",
+ {"path": path},
+ "Skipping cache: removing cached downloaded artifact {path}",
+ )
+ os.remove(path)
+
+ try:
+ dl = self._download_manager.download(url, fname)
+
+ def download_progress(dl, bytes_so_far, total_size):
+ if not total_size:
+ return
+ percent = (float(bytes_so_far) / total_size) * 100
+ now = int(percent / 5)
+ if now == self._last_dl_update:
+ return
+ self._last_dl_update = now
+ self.log(
+ logging.INFO,
+ "artifact",
+ {
+ "bytes_so_far": bytes_so_far,
+ "total_size": total_size,
+ "percent": percent,
+ },
+ "Downloading... {percent:02.1f} %",
+ )
+
+ if dl:
+ self.log(
+ logging.INFO,
+ "artifact",
+ {"path": path},
+ "Downloading artifact to local cache: {path}",
+ )
+ dl.set_progress(download_progress)
+ dl.wait()
+ else:
+ self.log(
+ logging.INFO,
+ "artifact",
+ {"path": path},
+ "Using artifact from local cache: {path}",
+ )
+ # Avoid the file being removed if it was in the cache already.
+ path = os.path.join(self._cache_dir, fname)
+ self._persist_limit.register_file(path)
+
+ return os.path.abspath(mozpath.join(self._cache_dir, fname))
+ finally:
+ # Cancel any background downloads in progress.
+ self._download_manager.cancel()
+
+ def clear_cache(self):
+ if self._skip_cache:
+ self.log(
+ logging.INFO, "artifact", {}, "Skipping cache: ignoring clear_cache!"
+ )
+ return
+
+ self._persist_limit.remove_all()
diff --git a/python/mozbuild/mozbuild/artifact_commands.py b/python/mozbuild/mozbuild/artifact_commands.py
new file mode 100644
index 0000000000..12184ce0d9
--- /dev/null
+++ b/python/mozbuild/mozbuild/artifact_commands.py
@@ -0,0 +1,615 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+
+import argparse
+import hashlib
+import json
+import logging
+import os
+import shutil
+from collections import OrderedDict
+
+import mozversioncontrol
+import six
+from mach.decorators import Command, CommandArgument, SubCommand
+
+from mozbuild.artifact_builds import JOB_CHOICES
+from mozbuild.base import MachCommandConditions as conditions
+from mozbuild.util import ensureParentDir
+
+_COULD_NOT_FIND_ARTIFACTS_TEMPLATE = (
+ "ERROR!!!!!! Could not find artifacts for a toolchain build named "
+ "`{build}`. Local commits, dirty/stale files, and other changes in your "
+ "checkout may cause this error. Make sure you are on a fresh, current "
+ "checkout of mozilla-central. Beware that commands like `mach bootstrap` "
+ "and `mach artifact` are unlikely to work on any versions of the code "
+ "besides recent revisions of mozilla-central."
+)
+
+
+class SymbolsAction(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ # If this function is called, it means the --symbols option was given,
+ # so we want to store the value `True` if no explicit value was given
+ # to the option.
+ setattr(namespace, self.dest, values or True)
+
+
+class ArtifactSubCommand(SubCommand):
+ def __call__(self, func):
+ after = SubCommand.__call__(self, func)
+ args = [
+ CommandArgument("--tree", metavar="TREE", type=str, help="Firefox tree."),
+ CommandArgument(
+ "--job", metavar="JOB", choices=JOB_CHOICES, help="Build job."
+ ),
+ CommandArgument(
+ "--verbose", "-v", action="store_true", help="Print verbose output."
+ ),
+ ]
+ for arg in args:
+ after = arg(after)
+ return after
+
+
+# Fetch and install binary artifacts from Mozilla automation.
+
+
+@Command(
+ "artifact",
+ category="post-build",
+ description="Use pre-built artifacts to build Firefox.",
+)
+def artifact(command_context):
+ """Download, cache, and install pre-built binary artifacts to build Firefox.
+
+ Use ``mach build`` as normal to freshen your installed binary libraries:
+ artifact builds automatically download, cache, and install binary
+ artifacts from Mozilla automation, replacing whatever may be in your
+ object directory. Use ``mach artifact last`` to see what binary artifacts
+ were last used.
+
+ Never build libxul again!
+
+ """
+ pass
+
+
+def _make_artifacts(
+ command_context,
+ tree=None,
+ job=None,
+ skip_cache=False,
+ download_tests=True,
+ download_symbols=False,
+ download_maven_zip=False,
+ no_process=False,
+):
+ state_dir = command_context._mach_context.state_dir
+ cache_dir = os.path.join(state_dir, "package-frontend")
+
+ hg = None
+ if conditions.is_hg(command_context):
+ hg = command_context.substs["HG"]
+
+ git = None
+ if conditions.is_git(command_context):
+ git = command_context.substs["GIT"]
+
+ # If we're building Thunderbird, we should be checking for comm-central artifacts.
+ topsrcdir = command_context.substs.get("commtopsrcdir", command_context.topsrcdir)
+
+ if download_maven_zip:
+ if download_tests:
+ raise ValueError("--maven-zip requires --no-tests")
+ if download_symbols:
+ raise ValueError("--maven-zip requires no --symbols")
+ if not no_process:
+ raise ValueError("--maven-zip requires --no-process")
+
+ from mozbuild.artifacts import Artifacts
+
+ artifacts = Artifacts(
+ tree,
+ command_context.substs,
+ command_context.defines,
+ job,
+ log=command_context.log,
+ cache_dir=cache_dir,
+ skip_cache=skip_cache,
+ hg=hg,
+ git=git,
+ topsrcdir=topsrcdir,
+ download_tests=download_tests,
+ download_symbols=download_symbols,
+ download_maven_zip=download_maven_zip,
+ no_process=no_process,
+ mozbuild=command_context,
+ )
+ return artifacts
+
+
+@ArtifactSubCommand("artifact", "install", "Install a good pre-built artifact.")
+@CommandArgument(
+ "source",
+ metavar="SRC",
+ nargs="?",
+ type=str,
+ help="Where to fetch and install artifacts from. Can be omitted, in "
+ "which case the current hg repository is inspected; an hg revision; "
+ "a remote URL; or a local file.",
+ default=None,
+)
+@CommandArgument(
+ "--skip-cache",
+ action="store_true",
+ help="Skip all local caches to force re-fetching remote artifacts.",
+ default=False,
+)
+@CommandArgument("--no-tests", action="store_true", help="Don't install tests.")
+@CommandArgument("--symbols", nargs="?", action=SymbolsAction, help="Download symbols.")
+@CommandArgument("--distdir", help="Where to install artifacts to.")
+@CommandArgument(
+ "--no-process",
+ action="store_true",
+ help="Don't process (unpack) artifact packages, just download them.",
+)
+@CommandArgument(
+ "--maven-zip", action="store_true", help="Download Maven zip (Android-only)."
+)
+def artifact_install(
+ command_context,
+ source=None,
+ skip_cache=False,
+ tree=None,
+ job=None,
+ verbose=False,
+ no_tests=False,
+ symbols=False,
+ distdir=None,
+ no_process=False,
+ maven_zip=False,
+):
+ command_context._set_log_level(verbose)
+ artifacts = _make_artifacts(
+ command_context,
+ tree=tree,
+ job=job,
+ skip_cache=skip_cache,
+ download_tests=not no_tests,
+ download_symbols=symbols,
+ download_maven_zip=maven_zip,
+ no_process=no_process,
+ )
+
+ return artifacts.install_from(source, distdir or command_context.distdir)
+
+
+@ArtifactSubCommand(
+ "artifact",
+ "clear-cache",
+ "Delete local artifacts and reset local artifact cache.",
+)
+def artifact_clear_cache(command_context, tree=None, job=None, verbose=False):
+ command_context._set_log_level(verbose)
+ artifacts = _make_artifacts(command_context, tree=tree, job=job)
+ artifacts.clear_cache()
+ return 0
+
+
+@SubCommand("artifact", "toolchain")
+@CommandArgument("--verbose", "-v", action="store_true", help="Print verbose output.")
+@CommandArgument(
+ "--cache-dir",
+ metavar="DIR",
+ help="Directory where to store the artifacts cache",
+)
+@CommandArgument(
+ "--skip-cache",
+ action="store_true",
+ help="Skip all local caches to force re-fetching remote artifacts.",
+ default=False,
+)
+@CommandArgument(
+ "--from-build",
+ metavar="BUILD",
+ nargs="+",
+ help="Download toolchains resulting from the given build(s); "
+ "BUILD is a name of a toolchain task, e.g. linux64-clang",
+)
+@CommandArgument(
+ "--from-task",
+ metavar="TASK_ID:ARTIFACT",
+ nargs="+",
+ help="Download toolchain artifact from a given task.",
+)
+@CommandArgument(
+ "--tooltool-manifest",
+ metavar="MANIFEST",
+ help="Explicit tooltool manifest to process",
+)
+@CommandArgument(
+ "--no-unpack", action="store_true", help="Do not unpack any downloaded file"
+)
+@CommandArgument(
+ "--retry", type=int, default=4, help="Number of times to retry failed downloads"
+)
+@CommandArgument(
+ "--bootstrap",
+ action="store_true",
+ help="Whether this is being called from bootstrap. "
+ "This verifies the toolchain is annotated as a toolchain used for local development.",
+)
+@CommandArgument(
+ "--artifact-manifest",
+ metavar="FILE",
+ help="Store a manifest about the downloaded taskcluster artifacts",
+)
+def artifact_toolchain(
+ command_context,
+ verbose=False,
+ cache_dir=None,
+ skip_cache=False,
+ from_build=(),
+ from_task=(),
+ tooltool_manifest=None,
+ no_unpack=False,
+ retry=0,
+ bootstrap=False,
+ artifact_manifest=None,
+):
+ """Download, cache and install pre-built toolchains."""
+ import time
+
+ import redo
+ import requests
+ from taskgraph.util.taskcluster import get_artifact_url
+
+ from mozbuild.action.tooltool import FileRecord, open_manifest, unpack_file
+ from mozbuild.artifacts import ArtifactCache
+
+ start = time.monotonic()
+ command_context._set_log_level(verbose)
+ # Normally, we'd use command_context.log_manager.enable_unstructured(),
+ # but that enables all logging, while we only really want tooltool's
+ # and it also makes structured log output twice.
+ # So we manually do what it does, and limit that to the tooltool
+ # logger.
+ if command_context.log_manager.terminal_handler:
+ logging.getLogger("mozbuild.action.tooltool").addHandler(
+ command_context.log_manager.terminal_handler
+ )
+ logging.getLogger("redo").addHandler(
+ command_context.log_manager.terminal_handler
+ )
+ command_context.log_manager.terminal_handler.addFilter(
+ command_context.log_manager.structured_filter
+ )
+ if not cache_dir:
+ cache_dir = os.path.join(command_context._mach_context.state_dir, "toolchains")
+
+ tooltool_host = os.environ.get("TOOLTOOL_HOST", "tooltool.mozilla-releng.net")
+ taskcluster_proxy_url = os.environ.get("TASKCLUSTER_PROXY_URL")
+ if taskcluster_proxy_url:
+ tooltool_url = "{}/{}".format(taskcluster_proxy_url, tooltool_host)
+ else:
+ tooltool_url = "https://{}".format(tooltool_host)
+
+ cache = ArtifactCache(
+ cache_dir=cache_dir, log=command_context.log, skip_cache=skip_cache
+ )
+
+ class DownloadRecord(FileRecord):
+ def __init__(self, url, *args, **kwargs):
+ super(DownloadRecord, self).__init__(*args, **kwargs)
+ self.url = url
+ self.basename = self.filename
+
+ def fetch_with(self, cache):
+ self.filename = cache.fetch(self.url)
+ return self.filename
+
+ def validate(self):
+ if self.size is None and self.digest is None:
+ return True
+ return super(DownloadRecord, self).validate()
+
+ class ArtifactRecord(DownloadRecord):
+ def __init__(self, task_id, artifact_name):
+ for _ in redo.retrier(attempts=retry + 1, sleeptime=60):
+ cot = cache._download_manager.session.get(
+ get_artifact_url(task_id, "public/chain-of-trust.json")
+ )
+ if cot.status_code >= 500:
+ continue
+ cot.raise_for_status()
+ break
+ else:
+ cot.raise_for_status()
+
+ digest = algorithm = None
+ data = json.loads(cot.text)
+ for algorithm, digest in (
+ data.get("artifacts", {}).get(artifact_name, {}).items()
+ ):
+ pass
+
+ name = os.path.basename(artifact_name)
+ artifact_url = get_artifact_url(
+ task_id,
+ artifact_name,
+ use_proxy=not artifact_name.startswith("public/"),
+ )
+ super(ArtifactRecord, self).__init__(
+ artifact_url, name, None, digest, algorithm, unpack=True
+ )
+
+ records = OrderedDict()
+ downloaded = []
+
+ if tooltool_manifest:
+ manifest = open_manifest(tooltool_manifest)
+ for record in manifest.file_records:
+ url = "{}/{}/{}".format(tooltool_url, record.algorithm, record.digest)
+ records[record.filename] = DownloadRecord(
+ url,
+ record.filename,
+ record.size,
+ record.digest,
+ record.algorithm,
+ unpack=record.unpack,
+ version=record.version,
+ visibility=record.visibility,
+ )
+
+ if from_build:
+ if "MOZ_AUTOMATION" in os.environ:
+ command_context.log(
+ logging.ERROR,
+ "artifact",
+ {},
+ "Do not use --from-build in automation; all dependencies "
+ "should be determined in the decision task.",
+ )
+ return 1
+ from gecko_taskgraph.optimize.strategies import IndexSearch
+
+ from mozbuild.toolchains import toolchain_task_definitions
+
+ tasks = toolchain_task_definitions()
+
+ for b in from_build:
+ user_value = b
+
+ if not b.startswith("toolchain-"):
+ b = "toolchain-{}".format(b)
+
+ task = tasks.get(b)
+ if not task:
+ command_context.log(
+ logging.ERROR,
+ "artifact",
+ {"build": user_value},
+ "Could not find a toolchain build named `{build}`",
+ )
+ return 1
+
+ # Ensure that toolchains installed by `mach bootstrap` have the
+ # `local-toolchain attribute set. Taskgraph ensures that these
+ # are built on trunk projects, so the task will be available to
+ # install here.
+ if bootstrap and not task.attributes.get("local-toolchain"):
+ command_context.log(
+ logging.ERROR,
+ "artifact",
+ {"build": user_value},
+ "Toolchain `{build}` is not annotated as used for local development.",
+ )
+ return 1
+
+ artifact_name = task.attributes.get("toolchain-artifact")
+ command_context.log(
+ logging.DEBUG,
+ "artifact",
+ {
+ "name": artifact_name,
+ "index": task.optimization.get("index-search"),
+ },
+ "Searching for {name} in {index}",
+ )
+ deadline = None
+ task_id = IndexSearch().should_replace_task(
+ task, {}, deadline, task.optimization.get("index-search", [])
+ )
+ if task_id in (True, False) or not artifact_name:
+ command_context.log(
+ logging.ERROR,
+ "artifact",
+ {"build": user_value},
+ _COULD_NOT_FIND_ARTIFACTS_TEMPLATE,
+ )
+ # Get and print some helpful info for diagnosis.
+ repo = mozversioncontrol.get_repository_object(
+ command_context.topsrcdir
+ )
+ if not isinstance(repo, mozversioncontrol.SrcRepository):
+ changed_files = set(repo.get_outgoing_files()) | set(
+ repo.get_changed_files()
+ )
+ if changed_files:
+ command_context.log(
+ logging.ERROR,
+ "artifact",
+ {},
+ "Hint: consider reverting your local changes "
+ "to the following files: %s" % sorted(changed_files),
+ )
+ if "TASKCLUSTER_ROOT_URL" in os.environ:
+ command_context.log(
+ logging.ERROR,
+ "artifact",
+ {"build": user_value},
+ "Due to the environment variable TASKCLUSTER_ROOT_URL "
+ "being set, the artifacts were expected to be found "
+ "on {}. If this was unintended, unset "
+ "TASKCLUSTER_ROOT_URL and try again.".format(
+ os.environ["TASKCLUSTER_ROOT_URL"]
+ ),
+ )
+ return 1
+
+ command_context.log(
+ logging.DEBUG,
+ "artifact",
+ {"name": artifact_name, "task_id": task_id},
+ "Found {name} in {task_id}",
+ )
+
+ record = ArtifactRecord(task_id, artifact_name)
+ records[record.filename] = record
+
+ # Handle the list of files of the form task_id:path from --from-task.
+ for f in from_task or ():
+ task_id, colon, name = f.partition(":")
+ if not colon:
+ command_context.log(
+ logging.ERROR,
+ "artifact",
+ {},
+ "Expected an argument of the form task_id:path",
+ )
+ return 1
+ record = ArtifactRecord(task_id, name)
+ records[record.filename] = record
+
+ for record in six.itervalues(records):
+ command_context.log(
+ logging.INFO,
+ "artifact",
+ {"name": record.basename},
+ "Setting up artifact {name}",
+ )
+ valid = False
+ # sleeptime is 60 per retry.py, used by tooltool_wrapper.sh
+ for attempt, _ in enumerate(redo.retrier(attempts=retry + 1, sleeptime=60)):
+ try:
+ record.fetch_with(cache)
+ except (
+ requests.exceptions.HTTPError,
+ requests.exceptions.ChunkedEncodingError,
+ requests.exceptions.ConnectionError,
+ ) as e:
+
+ if isinstance(e, requests.exceptions.HTTPError):
+ # The relengapi proxy likes to return error 400 bad request
+ # which seems improbably to be due to our (simple) GET
+ # being borked.
+ status = e.response.status_code
+ should_retry = status >= 500 or status == 400
+ else:
+ should_retry = True
+
+ if should_retry or attempt < retry:
+ level = logging.WARN
+ else:
+ level = logging.ERROR
+ command_context.log(level, "artifact", {}, str(e))
+ if not should_retry:
+ break
+ if attempt < retry:
+ command_context.log(
+ logging.INFO, "artifact", {}, "Will retry in a moment..."
+ )
+ continue
+ try:
+ valid = record.validate()
+ except Exception:
+ pass
+ if not valid:
+ os.unlink(record.filename)
+ if attempt < retry:
+ command_context.log(
+ logging.INFO,
+ "artifact",
+ {},
+ "Corrupt download. Will retry in a moment...",
+ )
+ continue
+
+ downloaded.append(record)
+ break
+
+ if not valid:
+ command_context.log(
+ logging.ERROR,
+ "artifact",
+ {"name": record.basename},
+ "Failed to download {name}",
+ )
+ return 1
+
+ artifacts = {} if artifact_manifest else None
+
+ for record in downloaded:
+ local = os.path.join(os.getcwd(), record.basename)
+ if os.path.exists(local):
+ os.unlink(local)
+ # unpack_file needs the file with its final name to work
+ # (https://github.com/mozilla/build-tooltool/issues/38), so we
+ # need to copy it, even though we remove it later. Use hard links
+ # when possible.
+ try:
+ os.link(record.filename, local)
+ except Exception:
+ shutil.copy(record.filename, local)
+ # Keep a sha256 of each downloaded file, for the chain-of-trust
+ # validation.
+ if artifact_manifest is not None:
+ with open(local, "rb") as fh:
+ h = hashlib.sha256()
+ while True:
+ data = fh.read(1024 * 1024)
+ if not data:
+ break
+ h.update(data)
+ artifacts[record.url] = {"sha256": h.hexdigest()}
+ if record.unpack and not no_unpack:
+ unpack_file(local)
+ os.unlink(local)
+
+ if not downloaded:
+ command_context.log(logging.ERROR, "artifact", {}, "Nothing to download")
+ if from_task:
+ return 1
+
+ if artifacts:
+ ensureParentDir(artifact_manifest)
+ with open(artifact_manifest, "w") as fh:
+ json.dump(artifacts, fh, indent=4, sort_keys=True)
+
+ if "MOZ_AUTOMATION" in os.environ:
+ end = time.monotonic()
+
+ perfherder_data = {
+ "framework": {"name": "build_metrics"},
+ "suites": [
+ {
+ "name": "mach_artifact_toolchain",
+ "value": end - start,
+ "lowerIsBetter": True,
+ "shouldAlert": False,
+ "subtests": [],
+ }
+ ],
+ }
+ command_context.log(
+ logging.INFO,
+ "perfherder",
+ {"data": json.dumps(perfherder_data)},
+ "PERFHERDER_DATA: {data}",
+ )
+
+ return 0
diff --git a/python/mozbuild/mozbuild/artifacts.py b/python/mozbuild/mozbuild/artifacts.py
new file mode 100644
index 0000000000..1083c0c997
--- /dev/null
+++ b/python/mozbuild/mozbuild/artifacts.py
@@ -0,0 +1,1661 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+Fetch build artifacts from a Firefox tree.
+
+This provides an (at-the-moment special purpose) interface to download Android
+artifacts from Mozilla's Task Cluster.
+
+This module performs the following steps:
+
+* find a candidate hg parent revision. At one time we used the local pushlog,
+ which required the mozext hg extension. This isn't feasible with git, and it
+ is only mildly less efficient to not use the pushlog, so we don't use it even
+ when querying hg.
+
+* map the candidate parent to candidate Task Cluster tasks and artifact
+ locations. Pushlog entries might not correspond to tasks (yet), and those
+ tasks might not produce the desired class of artifacts.
+
+* fetch fresh Task Cluster artifacts and purge old artifacts, using a simple
+ Least Recently Used cache.
+
+* post-process fresh artifacts, to speed future installation. In particular,
+ extract relevant files from Mac OS X DMG files into a friendly archive format
+ so we don't have to mount DMG files frequently.
+
+This module requires certain modules be importable from the ambient Python
+environment. ``mach artifact`` ensures these modules are available, but other
+consumers will need to arrange this themselves.
+"""
+
+
+import collections
+import functools
+import glob
+import logging
+import operator
+import os
+import pickle
+import re
+import shutil
+import stat
+import subprocess
+import tarfile
+import tempfile
+import zipfile
+from contextlib import contextmanager
+from io import BufferedReader
+from urllib.parse import urlparse
+
+import buildconfig
+import mozinstall
+import mozpack.path as mozpath
+import pylru
+import requests
+import six
+from mach.util import UserError
+from mozpack import executables
+from mozpack.files import JarFinder, TarFinder
+from mozpack.mozjar import JarReader, JarWriter
+from mozpack.packager.unpack import UnpackFinder
+from taskgraph.util.taskcluster import find_task_id, get_artifact_url, list_artifacts
+
+from mozbuild.artifact_builds import JOB_CHOICES
+from mozbuild.artifact_cache import ArtifactCache
+from mozbuild.util import FileAvoidWrite, ensureParentDir, mkdir
+
+# Number of candidate pushheads to cache per parent changeset.
+NUM_PUSHHEADS_TO_QUERY_PER_PARENT = 50
+
+# Number of parent changesets to consider as possible pushheads.
+# There isn't really such a thing as a reasonable default here, because we don't
+# know how many pushheads we'll need to look at to find a build with our artifacts,
+# and we don't know how many changesets will be in each push. For now we assume
+# we'll find a build in the last 50 pushes, assuming each push contains 10 changesets.
+NUM_REVISIONS_TO_QUERY = 500
+
+MAX_CACHED_TASKS = 400 # Number of pushheads to cache Task Cluster task data for.
+
+# Downloaded artifacts are cached, and a subset of their contents extracted for
+# easy installation. This is most noticeable on Mac OS X: since mounting and
+# copying from DMG files is very slow, we extract the desired binaries to a
+# separate archive for fast re-installation.
+PROCESSED_SUFFIX = ".processed.jar"
+
+
+class ArtifactJob(object):
+ trust_domain = "gecko"
+ default_candidate_trees = [
+ "releases/mozilla-release",
+ ]
+ nightly_candidate_trees = [
+ "mozilla-central",
+ "integration/autoland",
+ ]
+ beta_candidate_trees = [
+ "releases/mozilla-beta",
+ ]
+ # The list below list should be updated when we have new ESRs.
+ esr_candidate_trees = [
+ "releases/mozilla-esr102",
+ "releases/mozilla-esr115",
+ ]
+ try_tree = "try"
+
+ # These are a subset of TEST_HARNESS_BINS in testing/mochitest/Makefile.in.
+ # Each item is a pair of (pattern, (src_prefix, dest_prefix), where src_prefix
+ # is the prefix of the pattern relevant to its location in the archive, and
+ # dest_prefix is the prefix to be added that will yield the final path relative
+ # to dist/.
+ test_artifact_patterns = {
+ ("bin/BadCertAndPinningServer", ("bin", "bin")),
+ ("bin/DelegatedCredentialsServer", ("bin", "bin")),
+ ("bin/EncryptedClientHelloServer", ("bin", "bin")),
+ ("bin/FaultyServer", ("bin", "bin")),
+ ("bin/GenerateOCSPResponse", ("bin", "bin")),
+ ("bin/OCSPStaplingServer", ("bin", "bin")),
+ ("bin/SanctionsTestServer", ("bin", "bin")),
+ ("bin/certutil", ("bin", "bin")),
+ ("bin/geckodriver", ("bin", "bin")),
+ ("bin/pk12util", ("bin", "bin")),
+ ("bin/screentopng", ("bin", "bin")),
+ ("bin/ssltunnel", ("bin", "bin")),
+ ("bin/xpcshell", ("bin", "bin")),
+ ("bin/http3server", ("bin", "bin")),
+ ("bin/plugins/gmp-*/*/*", ("bin/plugins", "bin")),
+ ("bin/plugins/*", ("bin/plugins", "plugins")),
+ }
+
+ # We can tell our input is a test archive by this suffix, which happens to
+ # be the same across platforms.
+ _test_zip_archive_suffix = ".common.tests.zip"
+ _test_tar_archive_suffix = ".common.tests.tar.gz"
+
+ # A map of extra archives to fetch and unpack. An extra archive might
+ # include optional build output to incorporate into the local artifact
+ # build. Test archives and crashreporter symbols could be extra archives
+ # but they require special handling; this mechanism is generic and intended
+ # only for the simplest cases.
+ #
+ # Each suffix key matches a candidate archive (i.e., an artifact produced by
+ # an upstream build). Each value is itself a dictionary that must contain
+ # the following keys:
+ #
+ # - `description`: a purely informational string description.
+ # - `src_prefix`: entry names in the archive with leading `src_prefix` will
+ # have the prefix stripped.
+ # - `dest_prefix`: entry names in the archive will have `dest_prefix`
+ # prepended.
+ #
+ # The entries in the archive, suitably renamed, will be extracted into `dist`.
+ _extra_archives = {
+ ".xpt_artifacts.zip": {
+ "description": "XPT Artifacts",
+ "src_prefix": "",
+ "dest_prefix": "xpt_artifacts",
+ },
+ }
+ _extra_archive_suffixes = tuple(sorted(_extra_archives.keys()))
+
+ def __init__(
+ self,
+ log=None,
+ download_tests=True,
+ download_symbols=False,
+ download_maven_zip=False,
+ substs=None,
+ mozbuild=None,
+ ):
+ self._package_re = re.compile(self.package_re)
+ self._tests_re = None
+ if download_tests:
+ self._tests_re = re.compile(
+ r"public/build/(en-US/)?target\.common\.tests\.(zip|tar\.gz)$"
+ )
+ self._maven_zip_re = None
+ if download_maven_zip:
+ self._maven_zip_re = re.compile(r"public/build/target\.maven\.zip$")
+ self._log = log
+ self._substs = substs
+ self._symbols_archive_suffix = None
+ if download_symbols == "full":
+ self._symbols_archive_suffix = "crashreporter-symbols-full.tar.zst"
+ elif download_symbols:
+ self._symbols_archive_suffix = "crashreporter-symbols.zip"
+ self._mozbuild = mozbuild
+ self._candidate_trees = None
+
+ def log(self, *args, **kwargs):
+ if self._log:
+ self._log(*args, **kwargs)
+
+ def find_candidate_artifacts(self, artifacts):
+ # TODO: Handle multiple artifacts, taking the latest one.
+ tests_artifact = None
+ maven_zip_artifact = None
+ for artifact in artifacts:
+ name = artifact["name"]
+ if self._maven_zip_re:
+ if self._maven_zip_re.match(name):
+ maven_zip_artifact = name
+ yield name
+ else:
+ continue
+ elif self._package_re and self._package_re.match(name):
+ yield name
+ elif self._tests_re and self._tests_re.match(name):
+ tests_artifact = name
+ yield name
+ elif self._symbols_archive_suffix and name.endswith(
+ self._symbols_archive_suffix
+ ):
+ yield name
+ elif name.endswith(ArtifactJob._extra_archive_suffixes):
+ yield name
+ else:
+ self.log(
+ logging.DEBUG,
+ "artifact",
+ {"name": name},
+ "Not yielding artifact named {name} as a candidate artifact",
+ )
+ if self._tests_re and not tests_artifact:
+ raise ValueError(
+ 'Expected tests archive matching "{re}", but '
+ "found none!".format(re=self._tests_re)
+ )
+ if self._maven_zip_re and not maven_zip_artifact:
+ raise ValueError(
+ 'Expected Maven zip archive matching "{re}", but '
+ "found none!".format(re=self._maven_zip_re)
+ )
+
+ @contextmanager
+ def get_writer(self, **kwargs):
+ with JarWriter(**kwargs) as writer:
+ yield writer
+
+ def process_artifact(self, filename, processed_filename):
+ if filename.endswith(ArtifactJob._test_zip_archive_suffix) and self._tests_re:
+ return self.process_tests_zip_artifact(filename, processed_filename)
+ if filename.endswith(ArtifactJob._test_tar_archive_suffix) and self._tests_re:
+ return self.process_tests_tar_artifact(filename, processed_filename)
+ if self._symbols_archive_suffix and filename.endswith(
+ self._symbols_archive_suffix
+ ):
+ return self.process_symbols_archive(filename, processed_filename)
+ if filename.endswith(ArtifactJob._extra_archive_suffixes):
+ return self.process_extra_archive(filename, processed_filename)
+ return self.process_package_artifact(filename, processed_filename)
+
+ def process_package_artifact(self, filename, processed_filename):
+ raise NotImplementedError(
+ "Subclasses must specialize process_package_artifact!"
+ )
+
+ def process_tests_zip_artifact(self, filename, processed_filename):
+ from mozbuild.action.test_archive import OBJDIR_TEST_FILES
+
+ added_entry = False
+
+ with self.get_writer(file=processed_filename, compress_level=5) as writer:
+ reader = JarReader(filename)
+ for filename, entry in six.iteritems(reader.entries):
+ for pattern, (src_prefix, dest_prefix) in self.test_artifact_patterns:
+ if not mozpath.match(filename, pattern):
+ continue
+ destpath = mozpath.relpath(filename, src_prefix)
+ destpath = mozpath.join(dest_prefix, destpath)
+ self.log(
+ logging.DEBUG,
+ "artifact",
+ {"destpath": destpath},
+ "Adding {destpath} to processed archive",
+ )
+ mode = entry["external_attr"] >> 16
+ writer.add(destpath.encode("utf-8"), reader[filename], mode=mode)
+ added_entry = True
+ break
+
+ if filename.endswith(".ini"):
+ # The artifact build writes test .ini files into the object
+ # directory; they don't come from the upstream test archive.
+ self.log(
+ logging.DEBUG,
+ "artifact",
+ {"filename": filename},
+ "Skipping test INI file {filename}",
+ )
+ continue
+
+ for files_entry in OBJDIR_TEST_FILES.values():
+ origin_pattern = files_entry["pattern"]
+ leaf_filename = filename
+ if "dest" in files_entry:
+ dest = files_entry["dest"]
+ origin_pattern = mozpath.join(dest, origin_pattern)
+ leaf_filename = filename[len(dest) + 1 :]
+ if mozpath.match(filename, origin_pattern):
+ destpath = mozpath.join(
+ "..", files_entry["base"], leaf_filename
+ )
+ mode = entry["external_attr"] >> 16
+ writer.add(
+ destpath.encode("utf-8"), reader[filename], mode=mode
+ )
+
+ if not added_entry:
+ raise ValueError(
+ 'Archive format changed! No pattern from "{patterns}"'
+ "matched an archive path.".format(
+ patterns=LinuxArtifactJob.test_artifact_patterns
+ )
+ )
+
+ def process_tests_tar_artifact(self, filename, processed_filename):
+ from mozbuild.action.test_archive import OBJDIR_TEST_FILES
+
+ added_entry = False
+
+ with self.get_writer(file=processed_filename, compress_level=5) as writer:
+ with tarfile.open(filename) as reader:
+ for filename, entry in TarFinder(filename, reader):
+ for (
+ pattern,
+ (src_prefix, dest_prefix),
+ ) in self.test_artifact_patterns:
+ if not mozpath.match(filename, pattern):
+ continue
+
+ destpath = mozpath.relpath(filename, src_prefix)
+ destpath = mozpath.join(dest_prefix, destpath)
+ self.log(
+ logging.DEBUG,
+ "artifact",
+ {"destpath": destpath},
+ "Adding {destpath} to processed archive",
+ )
+ mode = entry.mode
+ writer.add(destpath.encode("utf-8"), entry.open(), mode=mode)
+ added_entry = True
+ break
+
+ if filename.endswith(".ini"):
+ # The artifact build writes test .ini files into the object
+ # directory; they don't come from the upstream test archive.
+ self.log(
+ logging.DEBUG,
+ "artifact",
+ {"filename": filename},
+ "Skipping test INI file {filename}",
+ )
+ continue
+
+ for files_entry in OBJDIR_TEST_FILES.values():
+ origin_pattern = files_entry["pattern"]
+ leaf_filename = filename
+ if "dest" in files_entry:
+ dest = files_entry["dest"]
+ origin_pattern = mozpath.join(dest, origin_pattern)
+ leaf_filename = filename[len(dest) + 1 :]
+ if mozpath.match(filename, origin_pattern):
+ destpath = mozpath.join(
+ "..", files_entry["base"], leaf_filename
+ )
+ mode = entry.mode
+ writer.add(
+ destpath.encode("utf-8"), entry.open(), mode=mode
+ )
+
+ if not added_entry:
+ raise ValueError(
+ 'Archive format changed! No pattern from "{patterns}"'
+ "matched an archive path.".format(
+ patterns=LinuxArtifactJob.test_artifact_patterns
+ )
+ )
+
+ def process_symbols_archive(
+ self, filename, processed_filename, skip_compressed=False
+ ):
+ with self.get_writer(file=processed_filename, compress_level=5) as writer:
+ for filename, entry in self.iter_artifact_archive(filename):
+ if skip_compressed and filename.endswith(".gz"):
+ self.log(
+ logging.DEBUG,
+ "artifact",
+ {"filename": filename},
+ "Skipping compressed ELF debug symbol file {filename}",
+ )
+ continue
+ destpath = mozpath.join("crashreporter-symbols", filename)
+ self.log(
+ logging.INFO,
+ "artifact",
+ {"destpath": destpath},
+ "Adding {destpath} to processed archive",
+ )
+ writer.add(destpath.encode("utf-8"), entry)
+
+ def process_extra_archive(self, filename, processed_filename):
+ for suffix, extra_archive in ArtifactJob._extra_archives.items():
+ if filename.endswith(suffix):
+ self.log(
+ logging.INFO,
+ "artifact",
+ {"filename": filename, "description": extra_archive["description"]},
+ '"{filename}" is a recognized extra archive ({description})',
+ )
+ break
+ else:
+ raise ValueError('"{}" is not a recognized extra archive!'.format(filename))
+
+ src_prefix = extra_archive["src_prefix"]
+ dest_prefix = extra_archive["dest_prefix"]
+
+ with self.get_writer(file=processed_filename, compress_level=5) as writer:
+ for filename, entry in self.iter_artifact_archive(filename):
+ if not filename.startswith(src_prefix):
+ self.log(
+ logging.DEBUG,
+ "artifact",
+ {"filename": filename, "src_prefix": src_prefix},
+ "Skipping extra archive item {filename} "
+ "that does not start with {src_prefix}",
+ )
+ continue
+ destpath = mozpath.relpath(filename, src_prefix)
+ destpath = mozpath.join(dest_prefix, destpath)
+ self.log(
+ logging.INFO,
+ "artifact",
+ {"destpath": destpath},
+ "Adding {destpath} to processed archive",
+ )
+ writer.add(destpath.encode("utf-8"), entry)
+
+ def iter_artifact_archive(self, filename):
+ if filename.endswith(".zip"):
+ reader = JarReader(filename)
+ for filename in reader.entries:
+ yield filename, reader[filename]
+ elif filename.endswith(".tar.zst") and self._mozbuild is not None:
+ self._mozbuild._ensure_zstd()
+ import zstandard
+
+ ctx = zstandard.ZstdDecompressor()
+ uncompressed = ctx.stream_reader(open(filename, "rb"))
+ with tarfile.open(
+ mode="r|", fileobj=uncompressed, bufsize=1024 * 1024
+ ) as reader:
+ while True:
+ info = reader.next()
+ if info is None:
+ break
+ yield info.name, reader.extractfile(info)
+ else:
+ raise RuntimeError("Unsupported archive type for %s" % filename)
+
+ @property
+ def candidate_trees(self):
+ if not self._candidate_trees:
+ self._candidate_trees = self.select_candidate_trees()
+ return self._candidate_trees
+
+ def select_candidate_trees(self):
+ source_repo = buildconfig.substs.get("MOZ_SOURCE_REPO", "")
+ version_display = buildconfig.substs.get("MOZ_APP_VERSION_DISPLAY")
+
+ if "esr" in version_display or "esr" in source_repo:
+ return self.esr_candidate_trees
+ elif re.search("a\d+$", version_display):
+ return self.nightly_candidate_trees
+ elif re.search("b\d+$", version_display):
+ return self.beta_candidate_trees
+
+ return self.default_candidate_trees
+
+
+class AndroidArtifactJob(ArtifactJob):
+ package_re = r"public/build/geckoview_example\.apk$"
+ product = "mobile"
+
+ package_artifact_patterns = {"**/*.so"}
+
+ def process_package_artifact(self, filename, processed_filename):
+ # Extract all .so files into the root, which will get copied into dist/bin.
+ with self.get_writer(file=processed_filename, compress_level=5) as writer:
+ for p, f in UnpackFinder(JarFinder(filename, JarReader(filename))):
+ if not any(
+ mozpath.match(p, pat) for pat in self.package_artifact_patterns
+ ):
+ continue
+
+ dirname, basename = os.path.split(p)
+ self.log(
+ logging.DEBUG,
+ "artifact",
+ {"basename": basename},
+ "Adding {basename} to processed archive",
+ )
+
+ basedir = "bin"
+ if not basename.endswith(".so"):
+ basedir = mozpath.join("bin", dirname.lstrip("assets/"))
+ basename = mozpath.join(basedir, basename)
+ writer.add(basename.encode("utf-8"), f.open())
+
+ def process_symbols_archive(self, filename, processed_filename):
+ ArtifactJob.process_symbols_archive(
+ self, filename, processed_filename, skip_compressed=True
+ )
+
+ if not self._symbols_archive_suffix.startswith("crashreporter-symbols-full."):
+ return
+
+ import gzip
+
+ with self.get_writer(file=processed_filename, compress_level=5) as writer:
+ for filename, entry in self.iter_artifact_archive(filename):
+ if not filename.endswith(".gz"):
+ continue
+
+ # Uncompress "libxul.so/D3271457813E976AE7BF5DAFBABABBFD0/libxul.so.dbg.gz"
+ # into "libxul.so.dbg".
+ #
+ # After running `settings append target.debug-file-search-paths $file`,
+ # where file=/path/to/topobjdir/dist/crashreporter-symbols,
+ # Android Studio's lldb (7.0.0, at least) will find the ELF debug symbol files.
+ #
+ # There are other paths that will work but none seem more desireable. See
+ # https://github.com/llvm-mirror/lldb/blob/882670690ca69d9dd96b7236c620987b11894af9/source/Host/common/Symbols.cpp#L324.
+ basename = os.path.basename(filename).replace(".gz", "")
+ destpath = mozpath.join("crashreporter-symbols", basename)
+ self.log(
+ logging.DEBUG,
+ "artifact",
+ {"destpath": destpath},
+ "Adding uncompressed ELF debug symbol file "
+ "{destpath} to processed archive",
+ )
+ writer.add(destpath.encode("utf-8"), gzip.GzipFile(fileobj=entry))
+
+
+class LinuxArtifactJob(ArtifactJob):
+ package_re = r"public/build/target\.tar\.bz2$"
+ product = "firefox"
+
+ _package_artifact_patterns = {
+ "{product}/crashreporter",
+ "{product}/dependentlibs.list",
+ "{product}/{product}",
+ "{product}/{product}-bin",
+ "{product}/minidump-analyzer",
+ "{product}/pingsender",
+ "{product}/plugin-container",
+ "{product}/updater",
+ "{product}/glxtest",
+ "{product}/vaapitest",
+ "{product}/**/*.so",
+ # Preserve signatures when present.
+ "{product}/**/*.sig",
+ }
+
+ @property
+ def package_artifact_patterns(self):
+ return {p.format(product=self.product) for p in self._package_artifact_patterns}
+
+ def process_package_artifact(self, filename, processed_filename):
+ added_entry = False
+
+ with self.get_writer(file=processed_filename, compress_level=5) as writer:
+ with tarfile.open(filename) as reader:
+ for p, f in UnpackFinder(TarFinder(filename, reader)):
+ if not any(
+ mozpath.match(p, pat) for pat in self.package_artifact_patterns
+ ):
+ continue
+
+ # We strip off the relative "firefox/" bit from the path,
+ # but otherwise preserve it.
+ destpath = mozpath.join("bin", mozpath.relpath(p, self.product))
+ self.log(
+ logging.DEBUG,
+ "artifact",
+ {"destpath": destpath},
+ "Adding {destpath} to processed archive",
+ )
+ writer.add(destpath.encode("utf-8"), f.open(), mode=f.mode)
+ added_entry = True
+
+ if not added_entry:
+ raise ValueError(
+ 'Archive format changed! No pattern from "{patterns}" '
+ "matched an archive path.".format(
+ patterns=LinuxArtifactJob.package_artifact_patterns
+ )
+ )
+
+
+class ResignJarWriter(JarWriter):
+ def __init__(self, job, **kwargs):
+ super().__init__(**kwargs)
+ self._job = job
+
+ def add(self, name, data, mode=None):
+ if self._job._substs["HOST_OS_ARCH"] == "Darwin":
+ # Wrap in a BufferedReader so that executable.get_type can peek at the
+ # data signature without subsequent read() being affected.
+ data = BufferedReader(data)
+ if executables.get_type(data) == executables.MACHO:
+ # If the file is a Mach-O binary, we run `codesign -s - -f` against
+ # it to force a local codesign against the original binary, which is
+ # likely unsigned. As of writing, only arm64 macs require codesigned
+ # binaries, but it doesn't hurt to do it on intel macs as well
+ # preemptively, because they could end up with the same requirement
+ # in future versions of macOS.
+ tmp = tempfile.NamedTemporaryFile(delete=False)
+ try:
+ shutil.copyfileobj(data, tmp)
+ tmp.close()
+ self._job.log(
+ logging.DEBUG,
+ "artifact",
+ {"path": name.decode("utf-8")},
+ "Re-signing {path}",
+ )
+ subprocess.check_call(
+ ["codesign", "-s", "-", "-f", tmp.name],
+ stdout=subprocess.DEVNULL,
+ stderr=subprocess.DEVNULL,
+ )
+ data = open(tmp.name, "rb")
+ finally:
+ os.unlink(tmp.name)
+ super().add(name, data, mode=mode)
+
+
+class MacArtifactJob(ArtifactJob):
+ package_re = r"public/build/target\.dmg$"
+ product = "firefox"
+
+ # These get copied into dist/bin without the path, so "root/a/b/c" -> "dist/bin/c".
+ _paths_no_keep_path = (
+ "Contents/MacOS",
+ [
+ "crashreporter.app/Contents/MacOS/crashreporter",
+ "{product}",
+ "{product}-bin",
+ "*.dylib",
+ "minidump-analyzer",
+ "pingsender",
+ "plugin-container.app/Contents/MacOS/plugin-container",
+ "updater.app/Contents/MacOS/org.mozilla.updater",
+ # 'xpcshell',
+ "XUL",
+ ],
+ )
+
+ @property
+ def paths_no_keep_path(self):
+ root, paths = self._paths_no_keep_path
+ return (root, [p.format(product=self.product) for p in paths])
+
+ @contextmanager
+ def get_writer(self, **kwargs):
+ with ResignJarWriter(self, **kwargs) as writer:
+ yield writer
+
+ def process_package_artifact(self, filename, processed_filename):
+ tempdir = tempfile.mkdtemp()
+ oldcwd = os.getcwd()
+ try:
+ self.log(
+ logging.DEBUG,
+ "artifact",
+ {"tempdir": tempdir},
+ "Unpacking DMG into {tempdir}",
+ )
+ if self._substs["HOST_OS_ARCH"] == "Linux":
+ # This is a cross build, use hfsplus and dmg tools to extract the dmg.
+ os.chdir(tempdir)
+ with open(os.devnull, "wb") as devnull:
+ subprocess.check_call(
+ [
+ self._substs["DMG_TOOL"],
+ "extract",
+ filename,
+ "extracted_img",
+ ],
+ stdout=devnull,
+ )
+ subprocess.check_call(
+ [self._substs["HFS_TOOL"], "extracted_img", "extractall"],
+ stdout=devnull,
+ )
+ else:
+ mozinstall.install(filename, tempdir)
+
+ bundle_dirs = glob.glob(mozpath.join(tempdir, "*.app"))
+ if len(bundle_dirs) != 1:
+ raise ValueError(
+ "Expected one source bundle, found: {}".format(bundle_dirs)
+ )
+ [source] = bundle_dirs
+
+ # These get copied into dist/bin with the path, so "root/a/b/c" -> "dist/bin/a/b/c".
+ paths_keep_path = [
+ (
+ "Contents/Resources",
+ [
+ "browser/components/libbrowsercomps.dylib",
+ "dependentlibs.list",
+ # 'firefox',
+ "gmp-clearkey/0.1/libclearkey.dylib",
+ # 'gmp-fake/1.0/libfake.dylib',
+ # 'gmp-fakeopenh264/1.0/libfakeopenh264.dylib',
+ ],
+ )
+ ]
+
+ with self.get_writer(file=processed_filename, compress_level=5) as writer:
+ root, paths = self.paths_no_keep_path
+ finder = UnpackFinder(mozpath.join(source, root))
+ for path in paths:
+ for p, f in finder.find(path):
+ self.log(
+ logging.DEBUG,
+ "artifact",
+ {"path": p},
+ "Adding {path} to processed archive",
+ )
+ destpath = mozpath.join("bin", os.path.basename(p))
+ writer.add(destpath.encode("utf-8"), f.open(), mode=f.mode)
+
+ for root, paths in paths_keep_path:
+ finder = UnpackFinder(mozpath.join(source, root))
+ for path in paths:
+ for p, f in finder.find(path):
+ self.log(
+ logging.DEBUG,
+ "artifact",
+ {"path": p},
+ "Adding {path} to processed archive",
+ )
+ destpath = mozpath.join("bin", p)
+ writer.add(destpath.encode("utf-8"), f.open(), mode=f.mode)
+
+ finally:
+ os.chdir(oldcwd)
+ try:
+ shutil.rmtree(tempdir)
+ except (OSError, IOError):
+ self.log(
+ logging.WARN,
+ "artifact",
+ {"tempdir": tempdir},
+ "Unable to delete {tempdir}",
+ )
+ pass
+
+
+class WinArtifactJob(ArtifactJob):
+ package_re = r"public/build/target\.(zip|tar\.gz)$"
+ product = "firefox"
+
+ _package_artifact_patterns = {
+ "{product}/dependentlibs.list",
+ "{product}/**/*.dll",
+ "{product}/*.exe",
+ "{product}/*.tlb",
+ }
+
+ @property
+ def package_artifact_patterns(self):
+ return {p.format(product=self.product) for p in self._package_artifact_patterns}
+
+ # These are a subset of TEST_HARNESS_BINS in testing/mochitest/Makefile.in.
+ test_artifact_patterns = {
+ ("bin/BadCertAndPinningServer.exe", ("bin", "bin")),
+ ("bin/DelegatedCredentialsServer.exe", ("bin", "bin")),
+ ("bin/EncryptedClientHelloServer.exe", ("bin", "bin")),
+ ("bin/FaultyServer.exe", ("bin", "bin")),
+ ("bin/GenerateOCSPResponse.exe", ("bin", "bin")),
+ ("bin/OCSPStaplingServer.exe", ("bin", "bin")),
+ ("bin/SanctionsTestServer.exe", ("bin", "bin")),
+ ("bin/certutil.exe", ("bin", "bin")),
+ ("bin/geckodriver.exe", ("bin", "bin")),
+ ("bin/minidumpwriter.exe", ("bin", "bin")),
+ ("bin/pk12util.exe", ("bin", "bin")),
+ ("bin/screenshot.exe", ("bin", "bin")),
+ ("bin/ssltunnel.exe", ("bin", "bin")),
+ ("bin/xpcshell.exe", ("bin", "bin")),
+ ("bin/http3server.exe", ("bin", "bin")),
+ ("bin/plugins/gmp-*/*/*", ("bin/plugins", "bin")),
+ ("bin/plugins/*", ("bin/plugins", "plugins")),
+ ("bin/components/*", ("bin/components", "bin/components")),
+ }
+
+ def process_package_artifact(self, filename, processed_filename):
+ added_entry = False
+ with self.get_writer(file=processed_filename, compress_level=5) as writer:
+ for p, f in UnpackFinder(JarFinder(filename, JarReader(filename))):
+ if not any(
+ mozpath.match(p, pat) for pat in self.package_artifact_patterns
+ ):
+ continue
+
+ # strip off the relative "firefox/" bit from the path:
+ basename = mozpath.relpath(p, self.product)
+ basename = mozpath.join("bin", basename)
+ self.log(
+ logging.DEBUG,
+ "artifact",
+ {"basename": basename},
+ "Adding {basename} to processed archive",
+ )
+ writer.add(basename.encode("utf-8"), f.open(), mode=f.mode)
+ added_entry = True
+
+ if not added_entry:
+ raise ValueError(
+ 'Archive format changed! No pattern from "{patterns}"'
+ "matched an archive path.".format(patterns=self.artifact_patterns)
+ )
+
+
+class ThunderbirdMixin(object):
+ trust_domain = "comm"
+ product = "thunderbird"
+ try_tree = "try-comm-central"
+
+ nightly_candidate_trees = [
+ "comm-central",
+ ]
+ beta_candidate_trees = [
+ "releases/comm-beta",
+ ]
+ # The list below list should be updated when we have new ESRs.
+ esr_candidate_trees = [
+ "releases/comm-esr102",
+ "releases/comm-esr115",
+ ]
+
+
+class LinuxThunderbirdArtifactJob(ThunderbirdMixin, LinuxArtifactJob):
+ pass
+
+
+class MacThunderbirdArtifactJob(ThunderbirdMixin, MacArtifactJob):
+ pass
+
+
+class WinThunderbirdArtifactJob(ThunderbirdMixin, WinArtifactJob):
+ pass
+
+
+def startswithwhich(s, prefixes):
+ for prefix in prefixes:
+ if s.startswith(prefix):
+ return prefix
+
+
+MOZ_JOB_DETAILS = {
+ j: {
+ "android": AndroidArtifactJob,
+ "linux": LinuxArtifactJob,
+ "macosx": MacArtifactJob,
+ "win": WinArtifactJob,
+ }[startswithwhich(j, ("android", "linux", "macosx", "win"))]
+ for j in JOB_CHOICES
+}
+COMM_JOB_DETAILS = {
+ j: {
+ "android": None,
+ "linux": LinuxThunderbirdArtifactJob,
+ "macosx": MacThunderbirdArtifactJob,
+ "win": WinThunderbirdArtifactJob,
+ }[startswithwhich(j, ("android", "linux", "macosx", "win"))]
+ for j in JOB_CHOICES
+}
+
+
+def cachedmethod(cachefunc):
+ """Decorator to wrap a class or instance method with a memoizing callable that
+ saves results in a (possibly shared) cache.
+ """
+
+ def decorator(method):
+ def wrapper(self, *args, **kwargs):
+ mapping = cachefunc(self)
+ if mapping is None:
+ return method(self, *args, **kwargs)
+ key = (method.__name__, args, tuple(sorted(kwargs.items())))
+ try:
+ value = mapping[key]
+ return value
+ except KeyError:
+ pass
+ result = method(self, *args, **kwargs)
+ mapping[key] = result
+ return result
+
+ return functools.update_wrapper(wrapper, method)
+
+ return decorator
+
+
+class CacheManager(object):
+ """Maintain an LRU cache. Provide simple persistence, including support for
+ loading and saving the state using a "with" block. Allow clearing the cache
+ and printing the cache for debugging.
+
+ Provide simple logging.
+ """
+
+ def __init__(
+ self,
+ cache_dir,
+ cache_name,
+ cache_size,
+ cache_callback=None,
+ log=None,
+ skip_cache=False,
+ ):
+ self._skip_cache = skip_cache
+ self._cache = pylru.lrucache(cache_size, callback=cache_callback)
+ self._cache_filename = mozpath.join(cache_dir, cache_name + "-cache.pickle")
+ self._log = log
+ mkdir(cache_dir, not_indexed=True)
+
+ def log(self, *args, **kwargs):
+ if self._log:
+ self._log(*args, **kwargs)
+
+ def load_cache(self):
+ if self._skip_cache:
+ self.log(
+ logging.INFO, "artifact", {}, "Skipping cache: ignoring load_cache!"
+ )
+ return
+
+ try:
+ items = pickle.load(open(self._cache_filename, "rb"))
+ for key, value in items:
+ self._cache[key] = value
+ except Exception as e:
+ # Corrupt cache, perhaps? Sadly, pickle raises many different
+ # exceptions, so it's not worth trying to be fine grained here.
+ # We ignore any exception, so the cache is effectively dropped.
+ self.log(
+ logging.INFO,
+ "artifact",
+ {"filename": self._cache_filename, "exception": repr(e)},
+ "Ignoring exception unpickling cache file {filename}: {exception}",
+ )
+ pass
+
+ def dump_cache(self):
+ if self._skip_cache:
+ self.log(
+ logging.INFO, "artifact", {}, "Skipping cache: ignoring dump_cache!"
+ )
+ return
+
+ ensureParentDir(self._cache_filename)
+ pickle.dump(
+ list(reversed(list(self._cache.items()))),
+ open(self._cache_filename, "wb"),
+ -1,
+ )
+
+ def clear_cache(self):
+ if self._skip_cache:
+ self.log(
+ logging.INFO, "artifact", {}, "Skipping cache: ignoring clear_cache!"
+ )
+ return
+
+ with self:
+ self._cache.clear()
+
+ def __enter__(self):
+ self.load_cache()
+ return self
+
+ def __exit__(self, type, value, traceback):
+ self.dump_cache()
+
+
+class PushheadCache(CacheManager):
+ """Helps map tree/revision pairs to parent pushheads according to the pushlog."""
+
+ def __init__(self, cache_dir, log=None, skip_cache=False):
+ CacheManager.__init__(
+ self,
+ cache_dir,
+ "pushhead_cache",
+ MAX_CACHED_TASKS,
+ log=log,
+ skip_cache=skip_cache,
+ )
+
+ @cachedmethod(operator.attrgetter("_cache"))
+ def parent_pushhead_id(self, tree, revision):
+ cset_url_tmpl = (
+ "https://hg.mozilla.org/{tree}/json-pushes?"
+ "changeset={changeset}&version=2&tipsonly=1"
+ )
+ req = requests.get(
+ cset_url_tmpl.format(tree=tree, changeset=revision),
+ headers={"Accept": "application/json"},
+ )
+ if req.status_code not in range(200, 300):
+ raise ValueError
+ result = req.json()
+ [found_pushid] = result["pushes"].keys()
+ return int(found_pushid)
+
+ @cachedmethod(operator.attrgetter("_cache"))
+ def pushid_range(self, tree, start, end):
+ pushid_url_tmpl = (
+ "https://hg.mozilla.org/{tree}/json-pushes?"
+ "startID={start}&endID={end}&version=2&tipsonly=1"
+ )
+
+ req = requests.get(
+ pushid_url_tmpl.format(tree=tree, start=start, end=end),
+ headers={"Accept": "application/json"},
+ )
+ result = req.json()
+ return [p["changesets"][-1] for p in result["pushes"].values()]
+
+
+class TaskCache(CacheManager):
+ """Map candidate pushheads to Task Cluster task IDs and artifact URLs."""
+
+ def __init__(self, cache_dir, log=None, skip_cache=False):
+ CacheManager.__init__(
+ self,
+ cache_dir,
+ "artifact_url",
+ MAX_CACHED_TASKS,
+ log=log,
+ skip_cache=skip_cache,
+ )
+
+ @cachedmethod(operator.attrgetter("_cache"))
+ def artifacts(self, tree, job, artifact_job_class, rev):
+ # Grab the second part of the repo name, which is generally how things
+ # are indexed. Eg: 'integration/autoland' is indexed as
+ # 'autoland'
+ tree = tree.split("/")[1] if "/" in tree else tree
+
+ if job.endswith("-opt"):
+ tree += ".shippable"
+
+ namespace = "{trust_domain}.v2.{tree}.revision.{rev}.{product}.{job}".format(
+ trust_domain=artifact_job_class.trust_domain,
+ rev=rev,
+ tree=tree,
+ product=artifact_job_class.product,
+ job=job,
+ )
+ self.log(
+ logging.DEBUG,
+ "artifact",
+ {"namespace": namespace},
+ "Searching Taskcluster index with namespace: {namespace}",
+ )
+ try:
+ taskId = find_task_id(namespace)
+ except KeyError:
+ # Not all revisions correspond to pushes that produce the job we
+ # care about; and even those that do may not have completed yet.
+ raise ValueError(
+ "Task for {namespace} does not exist (yet)!".format(namespace=namespace)
+ )
+
+ return taskId, list_artifacts(taskId)
+
+
+class Artifacts(object):
+ """Maintain state to efficiently fetch build artifacts from a Firefox tree."""
+
+ def __init__(
+ self,
+ tree,
+ substs,
+ defines,
+ job=None,
+ log=None,
+ cache_dir=".",
+ hg=None,
+ git=None,
+ skip_cache=False,
+ topsrcdir=None,
+ download_tests=True,
+ download_symbols=False,
+ download_maven_zip=False,
+ no_process=False,
+ mozbuild=None,
+ ):
+ if (hg and git) or (not hg and not git):
+ raise ValueError("Must provide path to exactly one of hg and git")
+
+ self._substs = substs
+ self._defines = defines
+ self._tree = tree
+ self._job = job or self._guess_artifact_job()
+ self._log = log
+ self._hg = hg
+ self._git = git
+ self._cache_dir = cache_dir
+ self._skip_cache = skip_cache
+ self._topsrcdir = topsrcdir
+ self._no_process = no_process
+
+ app = self._substs.get("MOZ_BUILD_APP")
+ job_details = COMM_JOB_DETAILS if app == "comm/mail" else MOZ_JOB_DETAILS
+
+ try:
+ cls = job_details[self._job]
+ self._artifact_job = cls(
+ log=self._log,
+ download_tests=download_tests,
+ download_symbols=download_symbols,
+ download_maven_zip=download_maven_zip,
+ substs=self._substs,
+ mozbuild=mozbuild,
+ )
+ except KeyError:
+ self.log(logging.INFO, "artifact", {"job": self._job}, "Unknown job {job}")
+ raise KeyError("Unknown job")
+
+ self._task_cache = TaskCache(
+ self._cache_dir, log=self._log, skip_cache=self._skip_cache
+ )
+ self._artifact_cache = ArtifactCache(
+ self._cache_dir, log=self._log, skip_cache=self._skip_cache
+ )
+ self._pushhead_cache = PushheadCache(
+ self._cache_dir, log=self._log, skip_cache=self._skip_cache
+ )
+
+ def log(self, *args, **kwargs):
+ if self._log:
+ self._log(*args, **kwargs)
+
+ def run_hg(self, *args, **kwargs):
+ env = kwargs.get("env", {})
+ env["HGPLAIN"] = "1"
+ kwargs["universal_newlines"] = True
+ return subprocess.check_output([self._hg] + list(args), **kwargs)
+
+ def _guess_artifact_job(self):
+ # Add the "-debug" suffix to the guessed artifact job name
+ # if MOZ_DEBUG is enabled.
+ if self._substs.get("MOZ_DEBUG"):
+ target_suffix = "-debug"
+ else:
+ target_suffix = "-opt"
+
+ if self._substs.get("MOZ_BUILD_APP", "") == "mobile/android":
+ if self._substs["ANDROID_CPU_ARCH"] == "x86_64":
+ return "android-x86_64" + target_suffix
+ if self._substs["ANDROID_CPU_ARCH"] == "x86":
+ return "android-x86" + target_suffix
+ if self._substs["ANDROID_CPU_ARCH"] == "arm64-v8a":
+ return "android-aarch64" + target_suffix
+ return "android-arm" + target_suffix
+
+ target_64bit = False
+ if self._substs["target_cpu"] == "x86_64":
+ target_64bit = True
+
+ if self._defines.get("XP_LINUX", False):
+ return ("linux64" if target_64bit else "linux") + target_suffix
+ if self._defines.get("XP_WIN", False):
+ if self._substs["target_cpu"] == "aarch64":
+ return "win64-aarch64" + target_suffix
+ return ("win64" if target_64bit else "win32") + target_suffix
+ if self._defines.get("XP_MACOSX", False):
+ # We only produce unified builds in automation, so the target_cpu
+ # check is not relevant.
+ return "macosx64" + target_suffix
+ raise Exception("Cannot determine default job for |mach artifact|!")
+
+ def _pushheads_from_rev(self, rev, count):
+ """Queries hg.mozilla.org's json-pushlog for pushheads that are nearby
+ ancestors or `rev`. Multiple trees are queried, as the `rev` may
+ already have been pushed to multiple repositories. For each repository
+ containing `rev`, the pushhead introducing `rev` and the previous
+ `count` pushheads from that point are included in the output.
+ """
+
+ with self._pushhead_cache as pushhead_cache:
+ found_pushids = {}
+
+ search_trees = self._artifact_job.candidate_trees
+ for tree in search_trees:
+ self.log(
+ logging.DEBUG,
+ "artifact",
+ {"tree": tree, "rev": rev},
+ "Attempting to find a pushhead containing {rev} on {tree}.",
+ )
+ try:
+ pushid = pushhead_cache.parent_pushhead_id(tree, rev)
+ found_pushids[tree] = pushid
+ except ValueError:
+ continue
+
+ candidate_pushheads = collections.defaultdict(list)
+
+ for tree, pushid in six.iteritems(found_pushids):
+ end = pushid
+ start = pushid - NUM_PUSHHEADS_TO_QUERY_PER_PARENT
+
+ self.log(
+ logging.DEBUG,
+ "artifact",
+ {
+ "tree": tree,
+ "pushid": pushid,
+ "num": NUM_PUSHHEADS_TO_QUERY_PER_PARENT,
+ },
+ "Retrieving the last {num} pushheads starting with id {pushid} on {tree}",
+ )
+ for pushhead in pushhead_cache.pushid_range(tree, start, end):
+ candidate_pushheads[pushhead].append(tree)
+
+ return candidate_pushheads
+
+ def _get_hg_revisions_from_git(self):
+ rev_list = subprocess.check_output(
+ [
+ self._git,
+ "rev-list",
+ "--topo-order",
+ "--max-count={num}".format(num=NUM_REVISIONS_TO_QUERY),
+ "HEAD",
+ ],
+ universal_newlines=True,
+ cwd=self._topsrcdir,
+ )
+
+ hg_hash_list = subprocess.check_output(
+ [self._git, "cinnabar", "git2hg"] + rev_list.splitlines(),
+ universal_newlines=True,
+ cwd=self._topsrcdir,
+ )
+
+ zeroes = "0" * 40
+
+ hashes = []
+ for hg_hash in hg_hash_list.splitlines():
+ hg_hash = hg_hash.strip()
+ if not hg_hash or hg_hash == zeroes:
+ continue
+ hashes.append(hg_hash)
+ if not hashes:
+ msg = (
+ "Could not list any recent revisions in your clone. Does "
+ "your clone have git-cinnabar metadata? If not, consider "
+ "re-cloning using the directions at "
+ "https://github.com/glandium/git-cinnabar/wiki/Mozilla:-A-"
+ "git-workflow-for-Gecko-development"
+ )
+ try:
+ subprocess.check_output(
+ [
+ self._git,
+ "cat-file",
+ "-e",
+ "05e5d33a570d48aed58b2d38f5dfc0a7870ff8d3^{commit}",
+ ],
+ stderr=subprocess.STDOUT,
+ )
+ # If the above commit exists, we're probably in a clone of
+ # `gecko-dev`, and this documentation applies.
+ msg += (
+ "\n\nNOTE: Consider following the directions "
+ "at https://github.com/glandium/git-cinnabar/wiki/"
+ "Mozilla:-Using-a-git-clone-of-gecko%E2%80%90dev-"
+ "to-push-to-mercurial to resolve this issue."
+ )
+ except subprocess.CalledProcessError:
+ pass
+ raise UserError(msg)
+ return hashes
+
+ def _get_recent_public_revisions(self):
+ """Returns recent ancestors of the working parent that are likely to
+ to be known to Mozilla automation.
+
+ If we're using git, retrieves hg revisions from git-cinnabar.
+ """
+ if self._git:
+ return self._get_hg_revisions_from_git()
+
+ # Mercurial updated the ordering of "last" in 4.3. We use revision
+ # numbers to order here to accommodate multiple versions of hg.
+ last_revs = self.run_hg(
+ "log",
+ "--template",
+ "{rev}:{node}\n",
+ "-r",
+ "last(public() and ::., {num})".format(num=NUM_REVISIONS_TO_QUERY),
+ cwd=self._topsrcdir,
+ ).splitlines()
+
+ if len(last_revs) == 0:
+ raise UserError(
+ """\
+There are no public revisions.
+This can happen if the repository is created from bundle file and never pulled
+from remote. Please run `hg pull` and build again.
+https://firefox-source-docs.mozilla.org/contributing/vcs/mercurial_bundles.html
+"""
+ )
+
+ self.log(
+ logging.DEBUG,
+ "artifact",
+ {"len": len(last_revs)},
+ "hg suggested {len} candidate revisions",
+ )
+
+ def to_pair(line):
+ rev, node = line.split(":", 1)
+ return (int(rev), node)
+
+ pairs = [to_pair(r) for r in last_revs]
+
+ # Python's tuple sort orders by first component: here, the (local)
+ # revision number.
+ nodes = [pair[1] for pair in sorted(pairs, reverse=True)]
+
+ for node in nodes[:20]:
+ self.log(
+ logging.DEBUG,
+ "artifact",
+ {"node": node},
+ "hg suggested candidate revision: {node}",
+ )
+ self.log(
+ logging.DEBUG,
+ "artifact",
+ {"remaining": max(0, len(nodes) - 20)},
+ "hg suggested candidate revision: and {remaining} more",
+ )
+
+ return nodes
+
+ def _find_pushheads(self):
+ """Returns an iterator of recent pushhead revisions, starting with the
+ working parent.
+ """
+
+ last_revs = self._get_recent_public_revisions()
+ candidate_pushheads = self._pushheads_from_rev(
+ last_revs[0].rstrip(), NUM_PUSHHEADS_TO_QUERY_PER_PARENT
+ )
+ count = 0
+ for rev in last_revs:
+ rev = rev.rstrip()
+ if not rev:
+ continue
+ if rev not in candidate_pushheads:
+ continue
+ count += 1
+ yield candidate_pushheads[rev], rev
+
+ if not count:
+ raise Exception(
+ "Could not find any candidate pushheads in the last {num} revisions.\n"
+ "Search started with {rev}, which must be known to Mozilla automation.\n\n"
+ "see https://firefox-source-docs.mozilla.org/contributing/build/artifact_builds.html".format( # noqa E501
+ rev=last_revs[0], num=NUM_PUSHHEADS_TO_QUERY_PER_PARENT
+ )
+ )
+
+ def find_pushhead_artifacts(self, task_cache, job, tree, pushhead):
+ try:
+ taskId, artifacts = task_cache.artifacts(
+ tree, job, self._artifact_job.__class__, pushhead
+ )
+ except ValueError:
+ return None
+
+ urls = []
+ for artifact_name in self._artifact_job.find_candidate_artifacts(artifacts):
+ url = get_artifact_url(taskId, artifact_name)
+ urls.append(url)
+ if urls:
+ self.log(
+ logging.DEBUG,
+ "artifact",
+ {"pushhead": pushhead, "tree": tree},
+ "Installing from remote pushhead {pushhead} on {tree}",
+ )
+ return urls
+ return None
+
+ def install_from_file(self, filename, distdir):
+ self.log(
+ logging.DEBUG,
+ "artifact",
+ {"filename": filename},
+ "Installing from {filename}",
+ )
+
+ # Copy all .so files, avoiding modification where possible.
+ ensureParentDir(mozpath.join(distdir, ".dummy"))
+
+ if self._no_process:
+ orig_basename = os.path.basename(filename)
+ # Turn 'HASH-target...' into 'target...' if possible. It might not
+ # be possible if the file is given directly on the command line.
+ before, _sep, after = orig_basename.rpartition("-")
+ if re.match(r"[0-9a-fA-F]{16}$", before):
+ orig_basename = after
+ path = mozpath.join(distdir, orig_basename)
+ with FileAvoidWrite(path, readmode="rb") as fh:
+ shutil.copyfileobj(open(filename, mode="rb"), fh)
+ self.log(
+ logging.DEBUG,
+ "artifact",
+ {"path": path},
+ "Copied unprocessed artifact: to {path}",
+ )
+ return
+
+ # Do we need to post-process?
+ processed_filename = filename + PROCESSED_SUFFIX
+
+ if self._skip_cache and os.path.exists(processed_filename):
+ self.log(
+ logging.INFO,
+ "artifact",
+ {"path": processed_filename},
+ "Skipping cache: removing cached processed artifact {path}",
+ )
+ os.remove(processed_filename)
+
+ if not os.path.exists(processed_filename):
+ self.log(
+ logging.DEBUG,
+ "artifact",
+ {"filename": filename},
+ "Processing contents of {filename}",
+ )
+ self.log(
+ logging.DEBUG,
+ "artifact",
+ {"processed_filename": processed_filename},
+ "Writing processed {processed_filename}",
+ )
+ try:
+ self._artifact_job.process_artifact(filename, processed_filename)
+ except Exception as e:
+ # Delete the partial output of failed processing.
+ try:
+ os.remove(processed_filename)
+ except FileNotFoundError:
+ pass
+ raise e
+
+ self._artifact_cache._persist_limit.register_file(processed_filename)
+
+ self.log(
+ logging.DEBUG,
+ "artifact",
+ {"processed_filename": processed_filename},
+ "Installing from processed {processed_filename}",
+ )
+
+ with zipfile.ZipFile(processed_filename) as zf:
+ for info in zf.infolist():
+ n = mozpath.join(distdir, info.filename)
+ fh = FileAvoidWrite(n, readmode="rb")
+ shutil.copyfileobj(zf.open(info), fh)
+ file_existed, file_updated = fh.close()
+ self.log(
+ logging.DEBUG,
+ "artifact",
+ {
+ "updating": "Updating" if file_updated else "Not updating",
+ "filename": n,
+ },
+ "{updating} {filename}",
+ )
+ if not file_existed or file_updated:
+ # Libraries and binaries may need to be marked executable,
+ # depending on platform.
+ perms = (
+ info.external_attr >> 16
+ ) # See http://stackoverflow.com/a/434689.
+ perms |= (
+ stat.S_IWUSR | stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH
+ ) # u+w, a+r.
+ os.chmod(n, perms)
+ return 0
+
+ def install_from_url(self, url, distdir):
+ self.log(logging.DEBUG, "artifact", {"url": url}, "Installing from {url}")
+ filename = self._artifact_cache.fetch(url)
+ return self.install_from_file(filename, distdir)
+
+ def _install_from_hg_pushheads(self, hg_pushheads, distdir):
+ """Iterate pairs (hg_hash, {tree-set}) associating hg revision hashes
+ and tree-sets they are known to be in, trying to download and
+ install from each.
+ """
+
+ urls = None
+ count = 0
+ # with blocks handle handle persistence.
+ with self._task_cache as task_cache:
+ for trees, hg_hash in hg_pushheads:
+ for tree in trees:
+ count += 1
+ self.log(
+ logging.DEBUG,
+ "artifact",
+ {"hg_hash": hg_hash, "tree": tree},
+ "Trying to find artifacts for hg revision {hg_hash} on tree {tree}.",
+ )
+ urls = self.find_pushhead_artifacts(
+ task_cache, self._job, tree, hg_hash
+ )
+ if urls:
+ for url in urls:
+ if self.install_from_url(url, distdir):
+ return 1
+ return 0
+
+ self.log(
+ logging.ERROR,
+ "artifact",
+ {"count": count},
+ "Tried {count} pushheads, no built artifacts found.",
+ )
+ return 1
+
+ def install_from_recent(self, distdir):
+ hg_pushheads = self._find_pushheads()
+ return self._install_from_hg_pushheads(hg_pushheads, distdir)
+
+ def install_from_revset(self, revset, distdir):
+ revision = None
+ try:
+ if self._hg:
+ revision = self.run_hg(
+ "log", "--template", "{node}\n", "-r", revset, cwd=self._topsrcdir
+ ).strip()
+ elif self._git:
+ revset = subprocess.check_output(
+ [self._git, "rev-parse", "%s^{commit}" % revset],
+ stderr=open(os.devnull, "w"),
+ universal_newlines=True,
+ cwd=self._topsrcdir,
+ ).strip()
+ else:
+ # Fallback to the exception handling case from both hg and git
+ raise subprocess.CalledProcessError()
+ except subprocess.CalledProcessError:
+ # If the mercurial of git commands above failed, it means the given
+ # revset is not known locally to the VCS. But if the revset looks
+ # like a complete sha1, assume it is a mercurial sha1 that hasn't
+ # been pulled, and use that.
+ if re.match(r"^[A-Fa-f0-9]{40}$", revset):
+ revision = revset
+
+ if revision is None and self._git:
+ revision = subprocess.check_output(
+ [self._git, "cinnabar", "git2hg", revset],
+ universal_newlines=True,
+ cwd=self._topsrcdir,
+ ).strip()
+
+ if revision == "0" * 40 or revision is None:
+ raise ValueError(
+ "revision specification must resolve to a commit known to hg"
+ )
+ if len(revision.split("\n")) != 1:
+ raise ValueError(
+ "revision specification must resolve to exactly one commit"
+ )
+
+ self.log(
+ logging.INFO,
+ "artifact",
+ {"revset": revset, "revision": revision},
+ "Will only accept artifacts from a pushhead at {revision} "
+ '(matched revset "{revset}").',
+ )
+ # Include try in our search to allow pulling from a specific push.
+ pushheads = [
+ (
+ self._artifact_job.candidate_trees + [self._artifact_job.try_tree],
+ revision,
+ )
+ ]
+ return self._install_from_hg_pushheads(pushheads, distdir)
+
+ def install_from_task(self, taskId, distdir):
+ artifacts = list_artifacts(taskId)
+
+ urls = []
+ for artifact_name in self._artifact_job.find_candidate_artifacts(artifacts):
+ url = get_artifact_url(taskId, artifact_name)
+ urls.append(url)
+ if not urls:
+ raise ValueError(
+ "Task {taskId} existed, but no artifacts found!".format(taskId=taskId)
+ )
+ for url in urls:
+ if self.install_from_url(url, distdir):
+ return 1
+ return 0
+
+ def install_from(self, source, distdir):
+ """Install artifacts from a ``source`` into the given ``distdir``."""
+ if (source and os.path.isfile(source)) or "MOZ_ARTIFACT_FILE" in os.environ:
+ source = source or os.environ["MOZ_ARTIFACT_FILE"]
+ for source in source.split(os.pathsep):
+ ret = self.install_from_file(source, distdir)
+ if ret:
+ return ret
+ return 0
+
+ if (source and urlparse(source).scheme) or "MOZ_ARTIFACT_URL" in os.environ:
+ source = source or os.environ["MOZ_ARTIFACT_URL"]
+ for source in source.split():
+ ret = self.install_from_url(source, distdir)
+ if ret:
+ return ret
+ return 0
+
+ if source or "MOZ_ARTIFACT_REVISION" in os.environ:
+ source = source or os.environ["MOZ_ARTIFACT_REVISION"]
+ return self.install_from_revset(source, distdir)
+
+ for var in (
+ "MOZ_ARTIFACT_TASK_%s" % self._job.upper().replace("-", "_"),
+ "MOZ_ARTIFACT_TASK",
+ ):
+ if var in os.environ:
+ return self.install_from_task(os.environ[var], distdir)
+
+ return self.install_from_recent(distdir)
+
+ def clear_cache(self):
+ self.log(logging.INFO, "artifact", {}, "Deleting cached artifacts and caches.")
+ self._task_cache.clear_cache()
+ self._artifact_cache.clear_cache()
+ self._pushhead_cache.clear_cache()
diff --git a/python/mozbuild/mozbuild/backend/__init__.py b/python/mozbuild/mozbuild/backend/__init__.py
new file mode 100644
index 0000000000..e7097eb614
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/__init__.py
@@ -0,0 +1,27 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+backends = {
+ "Clangd": "mozbuild.backend.clangd",
+ "ChromeMap": "mozbuild.codecoverage.chrome_map",
+ "CompileDB": "mozbuild.compilation.database",
+ "CppEclipse": "mozbuild.backend.cpp_eclipse",
+ "FasterMake": "mozbuild.backend.fastermake",
+ "FasterMake+RecursiveMake": None,
+ "RecursiveMake": "mozbuild.backend.recursivemake",
+ "StaticAnalysis": "mozbuild.backend.static_analysis",
+ "TestManifest": "mozbuild.backend.test_manifest",
+ "VisualStudio": "mozbuild.backend.visualstudio",
+}
+
+
+def get_backend_class(name):
+ if "+" in name:
+ from mozbuild.backend.base import HybridBackend
+
+ return HybridBackend(*(get_backend_class(name) for name in name.split("+")))
+
+ class_name = "%sBackend" % name
+ module = __import__(backends[name], globals(), locals(), [class_name])
+ return getattr(module, class_name)
diff --git a/python/mozbuild/mozbuild/backend/base.py b/python/mozbuild/mozbuild/backend/base.py
new file mode 100644
index 0000000000..0f95942f51
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/base.py
@@ -0,0 +1,389 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import errno
+import io
+import itertools
+import os
+import time
+from abc import ABCMeta, abstractmethod
+from contextlib import contextmanager
+
+import mozpack.path as mozpath
+import six
+from mach.mixin.logging import LoggingMixin
+
+from mozbuild.base import ExecutionSummary
+
+from ..frontend.data import ContextDerived
+from ..frontend.reader import EmptyConfig
+from ..preprocessor import Preprocessor
+from ..pythonutil import iter_modules_in_path
+from ..util import FileAvoidWrite, simple_diff
+from .configenvironment import ConfigEnvironment
+
+
+class BuildBackend(LoggingMixin):
+ """Abstract base class for build backends.
+
+ A build backend is merely a consumer of the build configuration (the output
+ of the frontend processing). It does something with said data. What exactly
+ is the discretion of the specific implementation.
+ """
+
+ __metaclass__ = ABCMeta
+
+ def __init__(self, environment):
+ assert isinstance(environment, (ConfigEnvironment, EmptyConfig))
+ self.populate_logger()
+
+ self.environment = environment
+
+ # Files whose modification should cause a new read and backend
+ # generation.
+ self.backend_input_files = set()
+
+ # Files generated by the backend.
+ self._backend_output_files = set()
+
+ self._environments = {}
+ self._environments[environment.topobjdir] = environment
+
+ # The number of backend files created.
+ self._created_count = 0
+
+ # The number of backend files updated.
+ self._updated_count = 0
+
+ # The number of unchanged backend files.
+ self._unchanged_count = 0
+
+ # The number of deleted backend files.
+ self._deleted_count = 0
+
+ # The total wall time spent in the backend. This counts the time the
+ # backend writes out files, etc.
+ self._execution_time = 0.0
+
+ # Mapping of changed file paths to diffs of the changes.
+ self.file_diffs = {}
+
+ self.dry_run = False
+
+ self._init()
+
+ def summary(self):
+ return ExecutionSummary(
+ self.__class__.__name__.replace("Backend", "")
+ + " backend executed in {execution_time:.2f}s\n "
+ "{total:d} total backend files; "
+ "{created:d} created; "
+ "{updated:d} updated; "
+ "{unchanged:d} unchanged; "
+ "{deleted:d} deleted",
+ execution_time=self._execution_time,
+ total=self._created_count + self._updated_count + self._unchanged_count,
+ created=self._created_count,
+ updated=self._updated_count,
+ unchanged=self._unchanged_count,
+ deleted=self._deleted_count,
+ )
+
+ def _init(self):
+ """Hook point for child classes to perform actions during __init__.
+
+ This exists so child classes don't need to implement __init__.
+ """
+
+ def consume(self, objs):
+ """Consume a stream of TreeMetadata instances.
+
+ This is the main method of the interface. This is what takes the
+ frontend output and does something with it.
+
+ Child classes are not expected to implement this method. Instead, the
+ base class consumes objects and calls methods (possibly) implemented by
+ child classes.
+ """
+
+ # Previously generated files.
+ list_file = mozpath.join(
+ self.environment.topobjdir, "backend.%s" % self.__class__.__name__
+ )
+ backend_output_list = set()
+ if os.path.exists(list_file):
+ with open(list_file) as fh:
+ backend_output_list.update(
+ mozpath.normsep(p) for p in fh.read().splitlines()
+ )
+
+ for obj in objs:
+ obj_start = time.monotonic()
+ if not self.consume_object(obj) and not isinstance(self, PartialBackend):
+ raise Exception("Unhandled object of type %s" % type(obj))
+ self._execution_time += time.monotonic() - obj_start
+
+ if isinstance(obj, ContextDerived) and not isinstance(self, PartialBackend):
+ self.backend_input_files |= obj.context_all_paths
+
+ # Pull in all loaded Python as dependencies so any Python changes that
+ # could influence our output result in a rescan.
+ self.backend_input_files |= set(
+ iter_modules_in_path(self.environment.topsrcdir, self.environment.topobjdir)
+ )
+
+ finished_start = time.monotonic()
+ self.consume_finished()
+ self._execution_time += time.monotonic() - finished_start
+
+ # Purge backend files created in previous run, but not created anymore
+ delete_files = backend_output_list - self._backend_output_files
+ for path in delete_files:
+ full_path = mozpath.join(self.environment.topobjdir, path)
+ try:
+ with io.open(full_path, mode="r", encoding="utf-8") as existing:
+ old_content = existing.read()
+ if old_content:
+ self.file_diffs[full_path] = simple_diff(
+ full_path, old_content.splitlines(), None
+ )
+ except IOError:
+ pass
+ try:
+ if not self.dry_run:
+ os.unlink(full_path)
+ self._deleted_count += 1
+ except OSError:
+ pass
+ # Remove now empty directories
+ for dir in set(mozpath.dirname(d) for d in delete_files):
+ try:
+ os.removedirs(dir)
+ except OSError:
+ pass
+
+ # Write out the list of backend files generated, if it changed.
+ if backend_output_list != self._backend_output_files:
+ with self._write_file(list_file) as fh:
+ fh.write("\n".join(sorted(self._backend_output_files)))
+ else:
+ # Always update its mtime if we're not in dry-run mode.
+ if not self.dry_run:
+ with open(list_file, "a"):
+ os.utime(list_file, None)
+
+ # Write out the list of input files for the backend
+ with self._write_file("%s.in" % list_file) as fh:
+ fh.write(
+ "\n".join(sorted(mozpath.normsep(f) for f in self.backend_input_files))
+ )
+
+ @abstractmethod
+ def consume_object(self, obj):
+ """Consumes an individual TreeMetadata instance.
+
+ This is the main method used by child classes to react to build
+ metadata.
+ """
+
+ def consume_finished(self):
+ """Called when consume() has completed handling all objects."""
+
+ def build(self, config, output, jobs, verbose, what=None):
+ """Called when 'mach build' is executed.
+
+ This should return the status value of a subprocess, where 0 denotes
+ success and any other value is an error code. A return value of None
+ indicates that the default 'make -f client.mk' should run.
+ """
+ return None
+
+ def _write_purgecaches(self, config):
+ """Write .purgecaches sentinels.
+
+ The purgecaches mechanism exists to allow the platform to
+ invalidate the XUL cache (which includes some JS) at application
+ startup-time. The application checks for .purgecaches in the
+ application directory, which varies according to
+ --enable-application/--enable-project. There's a further wrinkle on
+ macOS, where the real application directory is part of a Cocoa bundle
+ produced from the regular application directory by the build
+ system. In this case, we write to both locations, since the
+ build system recreates the Cocoa bundle from the contents of the
+ regular application directory and might remove a sentinel
+ created here.
+ """
+
+ app = config.substs["MOZ_BUILD_APP"]
+ if app == "mobile/android":
+ # In order to take effect, .purgecaches sentinels would need to be
+ # written to the Android device file system.
+ return
+
+ root = mozpath.join(config.topobjdir, "dist", "bin")
+
+ if app == "browser":
+ root = mozpath.join(config.topobjdir, "dist", "bin", "browser")
+
+ purgecaches_dirs = [root]
+ if app == "browser" and "cocoa" == config.substs["MOZ_WIDGET_TOOLKIT"]:
+ bundledir = mozpath.join(
+ config.topobjdir,
+ "dist",
+ config.substs["MOZ_MACBUNDLE_NAME"],
+ "Contents",
+ "Resources",
+ "browser",
+ )
+ purgecaches_dirs.append(bundledir)
+
+ for dir in purgecaches_dirs:
+ with open(mozpath.join(dir, ".purgecaches"), "wt") as f:
+ f.write("\n")
+
+ def post_build(self, config, output, jobs, verbose, status):
+ """Called late during 'mach build' execution, after `build(...)` has finished.
+
+ `status` is the status value returned from `build(...)`.
+
+ In the case where `build` returns `None`, this is called after
+ the default `make` command has completed, with the status of
+ that command.
+
+ This should return the status value from `build(...)`, or the
+ status value of a subprocess, where 0 denotes success and any
+ other value is an error code.
+
+ If an exception is raised, ``mach build`` will fail with a
+ non-zero exit code.
+ """
+ self._write_purgecaches(config)
+
+ return status
+
+ @contextmanager
+ def _write_file(self, path=None, fh=None, readmode="r"):
+ """Context manager to write a file.
+
+ This is a glorified wrapper around FileAvoidWrite with integration to
+ update the summary data on this instance.
+
+ Example usage:
+
+ with self._write_file('foo.txt') as fh:
+ fh.write('hello world')
+ """
+
+ if path is not None:
+ assert fh is None
+ fh = FileAvoidWrite(
+ path, capture_diff=True, dry_run=self.dry_run, readmode=readmode
+ )
+ else:
+ assert fh is not None
+
+ dirname = mozpath.dirname(fh.name)
+ try:
+ os.makedirs(dirname)
+ except OSError as error:
+ if error.errno != errno.EEXIST:
+ raise
+
+ yield fh
+
+ self._backend_output_files.add(
+ mozpath.relpath(fh.name, self.environment.topobjdir)
+ )
+ existed, updated = fh.close()
+ if fh.diff:
+ self.file_diffs[fh.name] = fh.diff
+ if not existed:
+ self._created_count += 1
+ elif updated:
+ self._updated_count += 1
+ else:
+ self._unchanged_count += 1
+
+ @contextmanager
+ def _get_preprocessor(self, obj):
+ """Returns a preprocessor with a few predefined values depending on
+ the given BaseConfigSubstitution(-like) object, and all the substs
+ in the current environment."""
+ pp = Preprocessor()
+ srcdir = mozpath.dirname(obj.input_path)
+ pp.context.update(
+ {
+ k: " ".join(v) if isinstance(v, list) else v
+ for k, v in six.iteritems(obj.config.substs)
+ }
+ )
+ pp.context.update(
+ top_srcdir=obj.topsrcdir,
+ topobjdir=obj.topobjdir,
+ srcdir=srcdir,
+ srcdir_rel=mozpath.relpath(srcdir, mozpath.dirname(obj.output_path)),
+ relativesrcdir=mozpath.relpath(srcdir, obj.topsrcdir) or ".",
+ DEPTH=mozpath.relpath(obj.topobjdir, mozpath.dirname(obj.output_path))
+ or ".",
+ )
+ pp.do_filter("attemptSubstitution")
+ pp.setMarker(None)
+ with self._write_file(obj.output_path) as fh:
+ pp.out = fh
+ yield pp
+
+
+class PartialBackend(BuildBackend):
+ """A PartialBackend is a BuildBackend declaring that its consume_object
+ method may not handle all build configuration objects it's passed, and
+ that it's fine."""
+
+
+def HybridBackend(*backends):
+ """A HybridBackend is the combination of one or more PartialBackends
+ with a non-partial BuildBackend.
+
+ Build configuration objects are passed to each backend, stopping at the
+ first of them that declares having handled them.
+ """
+ assert len(backends) >= 2
+ assert all(issubclass(b, PartialBackend) for b in backends[:-1])
+ assert not (issubclass(backends[-1], PartialBackend))
+ assert all(issubclass(b, BuildBackend) for b in backends)
+
+ class TheHybridBackend(BuildBackend):
+ def __init__(self, environment):
+ self._backends = [b(environment) for b in backends]
+ super(TheHybridBackend, self).__init__(environment)
+
+ def consume_object(self, obj):
+ return any(b.consume_object(obj) for b in self._backends)
+
+ def consume_finished(self):
+ for backend in self._backends:
+ backend.consume_finished()
+
+ for attr in (
+ "_execution_time",
+ "_created_count",
+ "_updated_count",
+ "_unchanged_count",
+ "_deleted_count",
+ ):
+ setattr(self, attr, sum(getattr(b, attr) for b in self._backends))
+
+ for b in self._backends:
+ self.file_diffs.update(b.file_diffs)
+ for attr in ("backend_input_files", "_backend_output_files"):
+ files = getattr(self, attr)
+ files |= getattr(b, attr)
+
+ name = "+".join(
+ itertools.chain(
+ (b.__name__.replace("Backend", "") for b in backends[:-1]),
+ (b.__name__ for b in backends[-1:]),
+ )
+ )
+
+ return type(str(name), (TheHybridBackend,), {})
diff --git a/python/mozbuild/mozbuild/backend/cargo_build_defs.py b/python/mozbuild/mozbuild/backend/cargo_build_defs.py
new file mode 100644
index 0000000000..c60fd2abf6
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/cargo_build_defs.py
@@ -0,0 +1,87 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+cargo_extra_outputs = {
+ "bindgen": ["tests.rs", "host-target.txt"],
+ "cssparser": ["tokenizer.rs"],
+ "gleam": ["gl_and_gles_bindings.rs", "gl_bindings.rs", "gles_bindings.rs"],
+ "khronos_api": ["webgl_exts.rs"],
+ "libloading": ["libglobal_static.a", "src/os/unix/global_static.o"],
+ "lmdb-sys": ["liblmdb.a", "midl.o", "mdb.o"],
+ "num-integer": ["rust_out.o"],
+ "num-traits": ["rust_out.o"],
+ "selectors": ["ascii_case_insensitive_html_attributes.rs"],
+ "style": [
+ "gecko/atom_macro.rs",
+ "gecko/bindings.rs",
+ "gecko/pseudo_element_definition.rs",
+ "gecko/structs.rs",
+ "gecko_properties.rs",
+ "longhands/background.rs",
+ "longhands/border.rs",
+ "longhands/box.rs",
+ "longhands/color.rs",
+ "longhands/column.rs",
+ "longhands/counters.rs",
+ "longhands/effects.rs",
+ "longhands/font.rs",
+ "longhands/inherited_box.rs",
+ "longhands/inherited_svg.rs",
+ "longhands/inherited_table.rs",
+ "longhands/inherited_text.rs",
+ "longhands/inherited_ui.rs",
+ "longhands/list.rs",
+ "longhands/margin.rs",
+ "longhands/outline.rs",
+ "longhands/padding.rs",
+ "longhands/position.rs",
+ "longhands/svg.rs",
+ "longhands/table.rs",
+ "longhands/text.rs",
+ "longhands/ui.rs",
+ "longhands/xul.rs",
+ "properties.rs",
+ "shorthands/background.rs",
+ "shorthands/border.rs",
+ "shorthands/box.rs",
+ "shorthands/color.rs",
+ "shorthands/column.rs",
+ "shorthands/counters.rs",
+ "shorthands/effects.rs",
+ "shorthands/font.rs",
+ "shorthands/inherited_box.rs",
+ "shorthands/inherited_svg.rs",
+ "shorthands/inherited_table.rs",
+ "shorthands/inherited_text.rs",
+ "shorthands/inherited_ui.rs",
+ "shorthands/list.rs",
+ "shorthands/margin.rs",
+ "shorthands/outline.rs",
+ "shorthands/padding.rs",
+ "shorthands/position.rs",
+ "shorthands/svg.rs",
+ "shorthands/table.rs",
+ "shorthands/text.rs",
+ "shorthands/ui.rs",
+ "shorthands/xul.rs",
+ ],
+ "webrender": ["shaders.rs"],
+ "geckodriver": ["build-info.rs"],
+ "gecko-profiler": ["gecko/bindings.rs"],
+ "crc": ["crc64_constants.rs", "crc32_constants.rs"],
+ "bzip2-sys": [
+ "bzip2-1.0.6/blocksort.o",
+ "bzip2-1.0.6/bzlib.o",
+ "bzip2-1.0.6/compress.o",
+ "bzip2-1.0.6/crctable.o",
+ "bzip2-1.0.6/decompress.o",
+ "bzip2-1.0.6/huffman.o",
+ "bzip2-1.0.6/randtable.o",
+ "libbz2.a",
+ ],
+ "clang-sys": ["common.rs", "dynamic.rs"],
+ "target-lexicon": ["host.rs"],
+ "baldrdash": ["bindings.rs"],
+ "typenum": ["op.rs", "consts.rs"],
+}
diff --git a/python/mozbuild/mozbuild/backend/clangd.py b/python/mozbuild/mozbuild/backend/clangd.py
new file mode 100644
index 0000000000..5db5610ae6
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/clangd.py
@@ -0,0 +1,126 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This module provides a backend for `clangd` in order to have support for
+# code completion, compile errors, go-to-definition and more.
+# It is based on `database.py` with the difference that we don't generate
+# an unified `compile_commands.json` but we generate a per file basis `command` in
+# `objdir/clangd/compile_commands.json`
+
+import os
+
+import mozpack.path as mozpath
+
+from mozbuild.compilation.database import CompileDBBackend
+
+
+def find_vscode_cmd():
+ import shutil
+ import sys
+
+ # Try to look up the `code` binary on $PATH, and use it if present. This
+ # should catch cases like being run from within a vscode-remote shell,
+ # even if vscode itself is also installed on the remote host.
+ path = shutil.which("code")
+ if path is not None:
+ return [path]
+
+ cmd_and_path = []
+
+ # If the binary wasn't on $PATH, try to find it in a variety of other
+ # well-known install locations based on the current platform.
+ if sys.platform.startswith("darwin"):
+ cmd_and_path = [
+ {"path": "/usr/local/bin/code", "cmd": ["/usr/local/bin/code"]},
+ {
+ "path": "/Applications/Visual Studio Code.app",
+ "cmd": ["open", "/Applications/Visual Studio Code.app", "--args"],
+ },
+ {
+ "path": "/Applications/Visual Studio Code - Insiders.app",
+ "cmd": [
+ "open",
+ "/Applications/Visual Studio Code - Insiders.app",
+ "--args",
+ ],
+ },
+ ]
+ elif sys.platform.startswith("win"):
+ from pathlib import Path
+
+ vscode_path = mozpath.join(
+ str(Path.home()),
+ "AppData",
+ "Local",
+ "Programs",
+ "Microsoft VS Code",
+ "Code.exe",
+ )
+ vscode_insiders_path = mozpath.join(
+ str(Path.home()),
+ "AppData",
+ "Local",
+ "Programs",
+ "Microsoft VS Code Insiders",
+ "Code - Insiders.exe",
+ )
+ cmd_and_path = [
+ {"path": vscode_path, "cmd": [vscode_path]},
+ {"path": vscode_insiders_path, "cmd": [vscode_insiders_path]},
+ ]
+ elif sys.platform.startswith("linux"):
+ cmd_and_path = [
+ {"path": "/usr/local/bin/code", "cmd": ["/usr/local/bin/code"]},
+ {"path": "/snap/bin/code", "cmd": ["/snap/bin/code"]},
+ {"path": "/usr/bin/code", "cmd": ["/usr/bin/code"]},
+ {"path": "/usr/bin/code-insiders", "cmd": ["/usr/bin/code-insiders"]},
+ ]
+
+ # Did we guess the path?
+ for element in cmd_and_path:
+ if os.path.exists(element["path"]):
+ return element["cmd"]
+
+ # Path cannot be found
+ return None
+
+
+class ClangdBackend(CompileDBBackend):
+ """
+ Configuration that generates the backend for clangd, it is used with `clangd`
+ extension for vscode
+ """
+
+ def _init(self):
+ CompileDBBackend._init(self)
+
+ def _get_compiler_args(self, cenv, canonical_suffix):
+ compiler_args = super(ClangdBackend, self)._get_compiler_args(
+ cenv, canonical_suffix
+ )
+ if compiler_args is None:
+ return None
+
+ if len(compiler_args) and compiler_args[0].endswith("ccache"):
+ compiler_args.pop(0)
+ return compiler_args
+
+ def _build_cmd(self, cmd, filename, unified):
+ cmd = list(cmd)
+
+ cmd.append(filename)
+
+ return cmd
+
+ def _outputfile_path(self):
+ clangd_cc_path = os.path.join(self.environment.topobjdir, "clangd")
+
+ if not os.path.exists(clangd_cc_path):
+ os.mkdir(clangd_cc_path)
+
+ # Output the database (a JSON file) to objdir/clangd/compile_commands.json
+ return mozpath.join(clangd_cc_path, "compile_commands.json")
+
+ def _process_unified_sources(self, obj):
+ self._process_unified_sources_without_mapping(obj)
diff --git a/python/mozbuild/mozbuild/backend/common.py b/python/mozbuild/mozbuild/backend/common.py
new file mode 100644
index 0000000000..f0dc7d4e46
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/common.py
@@ -0,0 +1,603 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import itertools
+import json
+import os
+from collections import defaultdict
+from operator import itemgetter
+
+import mozpack.path as mozpath
+import six
+from mozpack.chrome.manifest import parse_manifest_line
+
+from mozbuild.backend.base import BuildBackend
+from mozbuild.frontend.context import (
+ VARIABLES,
+ Context,
+ ObjDirPath,
+ Path,
+ RenamedSourcePath,
+)
+from mozbuild.frontend.data import (
+ BaseProgram,
+ ChromeManifestEntry,
+ ConfigFileSubstitution,
+ Exports,
+ FinalTargetFiles,
+ FinalTargetPreprocessedFiles,
+ GeneratedFile,
+ HostLibrary,
+ HostSources,
+ IPDLCollection,
+ LocalizedFiles,
+ LocalizedPreprocessedFiles,
+ SandboxedWasmLibrary,
+ SharedLibrary,
+ Sources,
+ StaticLibrary,
+ UnifiedSources,
+ WebIDLCollection,
+ XPCOMComponentManifests,
+ XPIDLModule,
+)
+from mozbuild.jar import DeprecatedJarManifest, JarManifestParser
+from mozbuild.preprocessor import Preprocessor
+from mozbuild.util import mkdir
+
+
+class XPIDLManager(object):
+ """Helps manage XPCOM IDLs in the context of the build system."""
+
+ class Module(object):
+ def __init__(self):
+ self.idl_files = set()
+ self.directories = set()
+ self._stems = set()
+
+ def add_idls(self, idls):
+ self.idl_files.update(idl.full_path for idl in idls)
+ self.directories.update(mozpath.dirname(idl.full_path) for idl in idls)
+ self._stems.update(
+ mozpath.splitext(mozpath.basename(idl))[0] for idl in idls
+ )
+
+ def stems(self):
+ return iter(self._stems)
+
+ def __init__(self, config):
+ self.config = config
+ self.topsrcdir = config.topsrcdir
+ self.topobjdir = config.topobjdir
+
+ self._idls = set()
+ self.modules = defaultdict(self.Module)
+
+ def link_module(self, module):
+ """Links an XPIDL module with with this instance."""
+ for idl in module.idl_files:
+ basename = mozpath.basename(idl.full_path)
+
+ if basename in self._idls:
+ raise Exception("IDL already registered: %s" % basename)
+ self._idls.add(basename)
+
+ self.modules[module.name].add_idls(module.idl_files)
+
+ def idl_stems(self):
+ """Return an iterator of stems of the managed IDL files.
+
+ The stem of an IDL file is the basename of the file with no .idl extension.
+ """
+ return itertools.chain(*[m.stems() for m in six.itervalues(self.modules)])
+
+
+class BinariesCollection(object):
+ """Tracks state of binaries produced by the build."""
+
+ def __init__(self):
+ self.shared_libraries = []
+ self.programs = []
+
+
+class CommonBackend(BuildBackend):
+ """Holds logic common to all build backends."""
+
+ def _init(self):
+ self._idl_manager = XPIDLManager(self.environment)
+ self._binaries = BinariesCollection()
+ self._configs = set()
+ self._generated_sources = set()
+
+ def consume_object(self, obj):
+ self._configs.add(obj.config)
+
+ if isinstance(obj, XPIDLModule):
+ # TODO bug 1240134 tracks not processing XPIDL files during
+ # artifact builds.
+ self._idl_manager.link_module(obj)
+
+ elif isinstance(obj, ConfigFileSubstitution):
+ # Do not handle ConfigFileSubstitution for Makefiles. Leave that
+ # to other
+ if mozpath.basename(obj.output_path) == "Makefile":
+ return False
+ with self._get_preprocessor(obj) as pp:
+ pp.do_include(obj.input_path)
+ self.backend_input_files.add(obj.input_path)
+
+ elif isinstance(obj, WebIDLCollection):
+ self._handle_webidl_collection(obj)
+
+ elif isinstance(obj, IPDLCollection):
+ self._handle_ipdl_sources(
+ obj.objdir,
+ list(sorted(obj.all_sources())),
+ list(sorted(obj.all_preprocessed_sources())),
+ list(sorted(obj.all_regular_sources())),
+ )
+
+ elif isinstance(obj, XPCOMComponentManifests):
+ self._handle_xpcom_collection(obj)
+
+ elif isinstance(obj, UnifiedSources):
+ if obj.generated_files:
+ self._handle_generated_sources(obj.generated_files)
+
+ # Unified sources aren't relevant to artifact builds.
+ if self.environment.is_artifact_build:
+ return True
+
+ if obj.have_unified_mapping:
+ self._write_unified_files(obj.unified_source_mapping, obj.objdir)
+ if hasattr(self, "_process_unified_sources"):
+ self._process_unified_sources(obj)
+
+ elif isinstance(obj, BaseProgram):
+ self._binaries.programs.append(obj)
+ return False
+
+ elif isinstance(obj, SharedLibrary):
+ self._binaries.shared_libraries.append(obj)
+ return False
+
+ elif isinstance(obj, SandboxedWasmLibrary):
+ self._handle_generated_sources(
+ [mozpath.join(obj.relobjdir, f"{obj.basename}.h")]
+ )
+ return False
+
+ elif isinstance(obj, (Sources, HostSources)):
+ if obj.generated_files:
+ self._handle_generated_sources(obj.generated_files)
+ return False
+
+ elif isinstance(obj, GeneratedFile):
+ if obj.required_during_compile or obj.required_before_compile:
+ for f in itertools.chain(
+ obj.required_before_compile, obj.required_during_compile
+ ):
+ fullpath = ObjDirPath(obj._context, "!" + f).full_path
+ self._handle_generated_sources([fullpath])
+ return False
+
+ elif isinstance(obj, Exports):
+ objdir_files = [
+ f.full_path
+ for path, files in obj.files.walk()
+ for f in files
+ if isinstance(f, ObjDirPath)
+ ]
+ if objdir_files:
+ self._handle_generated_sources(objdir_files)
+ return False
+
+ else:
+ return False
+
+ return True
+
+ def consume_finished(self):
+ if len(self._idl_manager.modules):
+ self._write_rust_xpidl_summary(self._idl_manager)
+ self._handle_idl_manager(self._idl_manager)
+ self._handle_xpidl_sources()
+
+ for config in self._configs:
+ self.backend_input_files.add(config.source)
+
+ # Write out a machine-readable file describing binaries.
+ topobjdir = self.environment.topobjdir
+ with self._write_file(mozpath.join(topobjdir, "binaries.json")) as fh:
+ d = {
+ "shared_libraries": sorted(
+ (s.to_dict() for s in self._binaries.shared_libraries),
+ key=itemgetter("basename"),
+ ),
+ "programs": sorted(
+ (p.to_dict() for p in self._binaries.programs),
+ key=itemgetter("program"),
+ ),
+ }
+ json.dump(d, fh, sort_keys=True, indent=4)
+
+ # Write out a file listing generated sources.
+ with self._write_file(mozpath.join(topobjdir, "generated-sources.json")) as fh:
+ d = {"sources": sorted(self._generated_sources)}
+ json.dump(d, fh, sort_keys=True, indent=4)
+
+ def _expand_libs(self, input_bin):
+ os_libs = []
+ shared_libs = []
+ static_libs = []
+ objs = []
+
+ seen_objs = set()
+ seen_libs = set()
+
+ def add_objs(lib):
+ for o in lib.objs:
+ if o in seen_objs:
+ continue
+
+ seen_objs.add(o)
+ objs.append(o)
+
+ def expand(lib, recurse_objs, system_libs):
+ if isinstance(lib, (HostLibrary, StaticLibrary, SandboxedWasmLibrary)):
+ if lib.no_expand_lib:
+ static_libs.append(lib)
+ recurse_objs = False
+ elif recurse_objs:
+ add_objs(lib)
+
+ for l in lib.linked_libraries:
+ expand(l, recurse_objs, system_libs)
+
+ if system_libs:
+ for l in lib.linked_system_libs:
+ if l not in seen_libs:
+ seen_libs.add(l)
+ os_libs.append(l)
+
+ elif isinstance(lib, SharedLibrary):
+ if lib not in seen_libs:
+ seen_libs.add(lib)
+ shared_libs.append(lib)
+
+ add_objs(input_bin)
+
+ system_libs = not isinstance(
+ input_bin, (HostLibrary, StaticLibrary, SandboxedWasmLibrary)
+ )
+ for lib in input_bin.linked_libraries:
+ if isinstance(lib, (HostLibrary, StaticLibrary, SandboxedWasmLibrary)):
+ expand(lib, True, system_libs)
+ elif isinstance(lib, SharedLibrary):
+ if lib not in seen_libs:
+ seen_libs.add(lib)
+ shared_libs.append(lib)
+
+ for lib in input_bin.linked_system_libs:
+ if lib not in seen_libs:
+ seen_libs.add(lib)
+ os_libs.append(lib)
+
+ return (objs, shared_libs, os_libs, static_libs)
+
+ def _make_list_file(self, kind, objdir, objs, name):
+ if not objs:
+ return None
+ if kind == "target":
+ list_style = self.environment.substs.get("EXPAND_LIBS_LIST_STYLE")
+ else:
+ # The host compiler is not necessarily the same kind as the target
+ # compiler, so we can't be sure EXPAND_LIBS_LIST_STYLE is the right
+ # style to use ; however, all compilers support the `list` type, so
+ # use that. That doesn't cause any practical problem because where
+ # it really matters to use something else than `list` is when
+ # linking tons of objects (because of command line argument limits),
+ # which only really happens for libxul.
+ list_style = "list"
+ list_file_path = mozpath.join(objdir, name)
+ objs = [os.path.relpath(o, objdir) for o in objs]
+ if list_style == "linkerscript":
+ ref = list_file_path
+ content = "\n".join('INPUT("%s")' % o for o in objs)
+ elif list_style == "filelist":
+ ref = "-Wl,-filelist," + list_file_path
+ content = "\n".join(objs)
+ elif list_style == "list":
+ ref = "@" + list_file_path
+ content = "\n".join(objs)
+ else:
+ return None
+
+ mkdir(objdir)
+ with self._write_file(list_file_path) as fh:
+ fh.write(content)
+
+ return ref
+
+ def _handle_generated_sources(self, files):
+ self._generated_sources.update(
+ mozpath.relpath(f, self.environment.topobjdir) for f in files
+ )
+
+ def _handle_xpidl_sources(self):
+ bindings_rt_dir = mozpath.join(
+ self.environment.topobjdir, "dist", "xpcrs", "rt"
+ )
+ bindings_bt_dir = mozpath.join(
+ self.environment.topobjdir, "dist", "xpcrs", "bt"
+ )
+ include_dir = mozpath.join(self.environment.topobjdir, "dist", "include")
+
+ self._handle_generated_sources(
+ itertools.chain.from_iterable(
+ (
+ mozpath.join(include_dir, "%s.h" % stem),
+ mozpath.join(bindings_rt_dir, "%s.rs" % stem),
+ mozpath.join(bindings_bt_dir, "%s.rs" % stem),
+ )
+ for stem in self._idl_manager.idl_stems()
+ )
+ )
+
+ def _handle_webidl_collection(self, webidls):
+
+ bindings_dir = mozpath.join(self.environment.topobjdir, "dom", "bindings")
+
+ all_inputs = set(webidls.all_static_sources())
+ for s in webidls.all_non_static_basenames():
+ all_inputs.add(mozpath.join(bindings_dir, s))
+
+ generated_events_stems = webidls.generated_events_stems()
+ exported_stems = webidls.all_regular_stems()
+
+ # The WebIDL manager reads configuration from a JSON file. So, we
+ # need to write this file early.
+ o = dict(
+ webidls=sorted(all_inputs),
+ generated_events_stems=sorted(generated_events_stems),
+ exported_stems=sorted(exported_stems),
+ example_interfaces=sorted(webidls.example_interfaces),
+ )
+
+ file_lists = mozpath.join(bindings_dir, "file-lists.json")
+ with self._write_file(file_lists) as fh:
+ json.dump(o, fh, sort_keys=True, indent=2)
+
+ import mozwebidlcodegen
+
+ manager = mozwebidlcodegen.create_build_system_manager(
+ self.environment.topsrcdir,
+ self.environment.topobjdir,
+ mozpath.join(self.environment.topobjdir, "dist"),
+ )
+ self._handle_generated_sources(manager.expected_build_output_files())
+ self._write_unified_files(
+ webidls.unified_source_mapping, bindings_dir, poison_windows_h=True
+ )
+ self._handle_webidl_build(
+ bindings_dir,
+ webidls.unified_source_mapping,
+ webidls,
+ manager.expected_build_output_files(),
+ manager.GLOBAL_DEFINE_FILES,
+ )
+
+ def _handle_xpcom_collection(self, manifests):
+ components_dir = mozpath.join(manifests.topobjdir, "xpcom", "components")
+
+ # The code generators read their configuration from this file, so it
+ # needs to be written early.
+ o = dict(manifests=sorted(manifests.all_sources()))
+
+ conf_file = mozpath.join(components_dir, "manifest-lists.json")
+ with self._write_file(conf_file) as fh:
+ json.dump(o, fh, sort_keys=True, indent=2)
+
+ def _write_unified_file(
+ self, unified_file, source_filenames, output_directory, poison_windows_h=False
+ ):
+ with self._write_file(mozpath.join(output_directory, unified_file)) as f:
+ f.write("#define MOZ_UNIFIED_BUILD\n")
+ includeTemplate = '#include "%(cppfile)s"'
+ if poison_windows_h:
+ includeTemplate += (
+ "\n"
+ "#if defined(_WINDOWS_) && !defined(MOZ_WRAPPED_WINDOWS_H)\n"
+ '#pragma message("wrapper failure reason: " MOZ_WINDOWS_WRAPPER_DISABLED_REASON)\n' # noqa
+ '#error "%(cppfile)s included unwrapped windows.h"\n'
+ "#endif"
+ )
+ includeTemplate += (
+ "\n"
+ "#ifdef PL_ARENA_CONST_ALIGN_MASK\n"
+ '#error "%(cppfile)s uses PL_ARENA_CONST_ALIGN_MASK, '
+ 'so it cannot be built in unified mode."\n'
+ "#undef PL_ARENA_CONST_ALIGN_MASK\n"
+ "#endif\n"
+ "#ifdef INITGUID\n"
+ '#error "%(cppfile)s defines INITGUID, '
+ 'so it cannot be built in unified mode."\n'
+ "#undef INITGUID\n"
+ "#endif"
+ )
+ f.write(
+ "\n".join(includeTemplate % {"cppfile": s} for s in source_filenames)
+ )
+
+ def _write_unified_files(
+ self, unified_source_mapping, output_directory, poison_windows_h=False
+ ):
+ for unified_file, source_filenames in unified_source_mapping:
+ self._write_unified_file(
+ unified_file, source_filenames, output_directory, poison_windows_h
+ )
+
+ def localized_path(self, relativesrcdir, filename):
+ """Return the localized path for a file.
+
+ Given ``relativesrcdir``, a path relative to the topsrcdir, return a path to ``filename``
+ from the current locale as specified by ``MOZ_UI_LOCALE``, using ``L10NBASEDIR`` as the
+ parent directory for non-en-US locales.
+ """
+ ab_cd = self.environment.substs["MOZ_UI_LOCALE"][0]
+ l10nbase = mozpath.join(self.environment.substs["L10NBASEDIR"], ab_cd)
+ # Filenames from LOCALIZED_FILES will start with en-US/.
+ if filename.startswith("en-US/"):
+ e, filename = filename.split("en-US/")
+ assert not e
+ if ab_cd == "en-US":
+ return mozpath.join(
+ self.environment.topsrcdir, relativesrcdir, "en-US", filename
+ )
+ if mozpath.basename(relativesrcdir) == "locales":
+ l10nrelsrcdir = mozpath.dirname(relativesrcdir)
+ else:
+ l10nrelsrcdir = relativesrcdir
+ return mozpath.join(l10nbase, l10nrelsrcdir, filename)
+
+ def _consume_jar_manifest(self, obj):
+ # Ideally, this would all be handled somehow in the emitter, but
+ # this would require all the magic surrounding l10n and addons in
+ # the recursive make backend to die, which is not going to happen
+ # any time soon enough.
+ # Notably missing:
+ # - DEFINES from config/config.mk
+ # - The equivalent of -e when USE_EXTENSION_MANIFEST is set in
+ # moz.build, but it doesn't matter in dist/bin.
+ pp = Preprocessor()
+ if obj.defines:
+ pp.context.update(obj.defines.defines)
+ pp.context.update(self.environment.defines)
+ ab_cd = obj.config.substs["MOZ_UI_LOCALE"][0]
+ pp.context.update(AB_CD=ab_cd)
+ pp.out = JarManifestParser()
+ try:
+ pp.do_include(obj.path.full_path)
+ except DeprecatedJarManifest as e:
+ raise DeprecatedJarManifest(
+ "Parsing error while processing %s: %s" % (obj.path.full_path, e)
+ )
+ self.backend_input_files |= pp.includes
+
+ for jarinfo in pp.out:
+ jar_context = Context(
+ allowed_variables=VARIABLES, config=obj._context.config
+ )
+ jar_context.push_source(obj._context.main_path)
+ jar_context.push_source(obj.path.full_path)
+
+ install_target = obj.install_target
+ if jarinfo.base:
+ install_target = mozpath.normpath(
+ mozpath.join(install_target, jarinfo.base)
+ )
+ jar_context["FINAL_TARGET"] = install_target
+ if obj.defines:
+ jar_context["DEFINES"] = obj.defines.defines
+ files = jar_context["FINAL_TARGET_FILES"]
+ files_pp = jar_context["FINAL_TARGET_PP_FILES"]
+ localized_files = jar_context["LOCALIZED_FILES"]
+ localized_files_pp = jar_context["LOCALIZED_PP_FILES"]
+
+ for e in jarinfo.entries:
+ if e.is_locale:
+ if jarinfo.relativesrcdir:
+ src = "/%s" % jarinfo.relativesrcdir
+ else:
+ src = ""
+ src = mozpath.join(src, "en-US", e.source)
+ else:
+ src = e.source
+
+ src = Path(jar_context, src)
+
+ if "*" not in e.source and not os.path.exists(src.full_path):
+ if e.is_locale:
+ raise Exception(
+ "%s: Cannot find %s (tried %s)"
+ % (obj.path, e.source, src.full_path)
+ )
+ if e.source.startswith("/"):
+ src = Path(jar_context, "!" + e.source)
+ else:
+ # This actually gets awkward if the jar.mn is not
+ # in the same directory as the moz.build declaring
+ # it, but it's how it works in the recursive make,
+ # not that anything relies on that, but it's simpler.
+ src = Path(obj._context, "!" + e.source)
+
+ output_basename = mozpath.basename(e.output)
+ if output_basename != src.target_basename:
+ src = RenamedSourcePath(jar_context, (src, output_basename))
+ path = mozpath.dirname(mozpath.join(jarinfo.name, e.output))
+
+ if e.preprocess:
+ if "*" in e.source:
+ raise Exception(
+ "%s: Wildcards are not supported with "
+ "preprocessing" % obj.path
+ )
+ if e.is_locale:
+ localized_files_pp[path] += [src]
+ else:
+ files_pp[path] += [src]
+ else:
+ if e.is_locale:
+ localized_files[path] += [src]
+ else:
+ files[path] += [src]
+
+ if files:
+ self.consume_object(FinalTargetFiles(jar_context, files))
+ if files_pp:
+ self.consume_object(FinalTargetPreprocessedFiles(jar_context, files_pp))
+ if localized_files:
+ self.consume_object(LocalizedFiles(jar_context, localized_files))
+ if localized_files_pp:
+ self.consume_object(
+ LocalizedPreprocessedFiles(jar_context, localized_files_pp)
+ )
+
+ for m in jarinfo.chrome_manifests:
+ entry = parse_manifest_line(
+ mozpath.dirname(jarinfo.name),
+ m.replace("%", mozpath.basename(jarinfo.name) + "/"),
+ )
+ self.consume_object(
+ ChromeManifestEntry(
+ jar_context, "%s.manifest" % jarinfo.name, entry
+ )
+ )
+
+ def _write_rust_xpidl_summary(self, manager):
+ """Write out a rust file which includes the generated xpcom rust modules"""
+ topobjdir = self.environment.topobjdir
+
+ include_tmpl = 'include!(mozbuild::objdir_path!("dist/xpcrs/%s/%s.rs"))'
+
+ # Ensure deterministic output files.
+ stems = sorted(manager.idl_stems())
+
+ with self._write_file(
+ mozpath.join(topobjdir, "dist", "xpcrs", "rt", "all.rs")
+ ) as fh:
+ fh.write("// THIS FILE IS GENERATED - DO NOT EDIT\n\n")
+ for stem in stems:
+ fh.write(include_tmpl % ("rt", stem))
+ fh.write(";\n")
+
+ with self._write_file(
+ mozpath.join(topobjdir, "dist", "xpcrs", "bt", "all.rs")
+ ) as fh:
+ fh.write("// THIS FILE IS GENERATED - DO NOT EDIT\n\n")
+ fh.write("&[\n")
+ for stem in stems:
+ fh.write(include_tmpl % ("bt", stem))
+ fh.write(",\n")
+ fh.write("]\n")
diff --git a/python/mozbuild/mozbuild/backend/configenvironment.py b/python/mozbuild/mozbuild/backend/configenvironment.py
new file mode 100644
index 0000000000..eef1b62ee6
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/configenvironment.py
@@ -0,0 +1,357 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import json
+import os
+import sys
+from collections import OrderedDict
+from collections.abc import Iterable
+from pathlib import Path
+from types import ModuleType
+
+import mozpack.path as mozpath
+import six
+
+from mozbuild.shellutil import quote as shell_quote
+from mozbuild.util import (
+ FileAvoidWrite,
+ ReadOnlyDict,
+ memoized_property,
+ system_encoding,
+)
+
+
+class ConfigStatusFailure(Exception):
+ """Error loading config.status"""
+
+
+class BuildConfig(object):
+ """Represents the output of configure."""
+
+ _CODE_CACHE = {}
+
+ def __init__(self):
+ self.topsrcdir = None
+ self.topobjdir = None
+ self.defines = {}
+ self.substs = {}
+ self.files = []
+ self.mozconfig = None
+
+ @classmethod
+ def from_config_status(cls, path):
+ """Create an instance from a config.status file."""
+ code_cache = cls._CODE_CACHE
+ mtime = os.path.getmtime(path)
+
+ # cache the compiled code as it can be reused
+ # we cache it the first time, or if the file changed
+ if path not in code_cache or code_cache[path][0] != mtime:
+ # Add config.status manually to sys.modules so it gets picked up by
+ # iter_modules_in_path() for automatic dependencies.
+ mod = ModuleType("config.status")
+ mod.__file__ = path
+ sys.modules["config.status"] = mod
+
+ with open(path, "rt") as fh:
+ source = fh.read()
+ code_cache[path] = (
+ mtime,
+ compile(source, path, "exec", dont_inherit=1),
+ )
+
+ g = {"__builtins__": __builtins__, "__file__": path}
+ l = {}
+ try:
+ exec(code_cache[path][1], g, l)
+ except Exception:
+ raise ConfigStatusFailure()
+
+ config = BuildConfig()
+
+ for name in l["__all__"]:
+ setattr(config, name, l[name])
+
+ return config
+
+
+class ConfigEnvironment(object):
+ """Perform actions associated with a configured but bare objdir.
+
+ The purpose of this class is to preprocess files from the source directory
+ and output results in the object directory.
+
+ There are two types of files: config files and config headers,
+ each treated through a different member function.
+
+ Creating a ConfigEnvironment requires a few arguments:
+ - topsrcdir and topobjdir are, respectively, the top source and
+ the top object directory.
+ - defines is a dict filled from AC_DEFINE and AC_DEFINE_UNQUOTED in autoconf.
+ - substs is a dict filled from AC_SUBST in autoconf.
+
+ ConfigEnvironment automatically defines one additional substs variable
+ from all the defines:
+ - ACDEFINES contains the defines in the form -DNAME=VALUE, for use on
+ preprocessor command lines. The order in which defines were given
+ when creating the ConfigEnvironment is preserved.
+
+ and two other additional subst variables from all the other substs:
+ - ALLSUBSTS contains the substs in the form NAME = VALUE, in sorted
+ order, for use in autoconf.mk. It includes ACDEFINES.
+ Only substs with a VALUE are included, such that the resulting file
+ doesn't change when new empty substs are added.
+ This results in less invalidation of build dependencies in the case
+ of autoconf.mk..
+ - ALLEMPTYSUBSTS contains the substs with an empty value, in the form NAME =.
+
+ ConfigEnvironment expects a "top_srcdir" subst to be set with the top
+ source directory, in msys format on windows. It is used to derive a
+ "srcdir" subst when treating config files. It can either be an absolute
+ path or a path relative to the topobjdir.
+ """
+
+ def __init__(
+ self,
+ topsrcdir,
+ topobjdir,
+ defines=None,
+ substs=None,
+ source=None,
+ mozconfig=None,
+ ):
+
+ if not source:
+ source = mozpath.join(topobjdir, "config.status")
+ self.source = source
+ self.defines = ReadOnlyDict(defines or {})
+ self.substs = dict(substs or {})
+ self.topsrcdir = mozpath.abspath(topsrcdir)
+ self.topobjdir = mozpath.abspath(topobjdir)
+ self.mozconfig = mozpath.abspath(mozconfig) if mozconfig else None
+ self.lib_prefix = self.substs.get("LIB_PREFIX", "")
+ if "LIB_SUFFIX" in self.substs:
+ self.lib_suffix = ".%s" % self.substs["LIB_SUFFIX"]
+ self.dll_prefix = self.substs.get("DLL_PREFIX", "")
+ self.dll_suffix = self.substs.get("DLL_SUFFIX", "")
+ self.host_dll_prefix = self.substs.get("HOST_DLL_PREFIX", "")
+ self.host_dll_suffix = self.substs.get("HOST_DLL_SUFFIX", "")
+ if self.substs.get("IMPORT_LIB_SUFFIX"):
+ self.import_prefix = self.lib_prefix
+ self.import_suffix = ".%s" % self.substs["IMPORT_LIB_SUFFIX"]
+ else:
+ self.import_prefix = self.dll_prefix
+ self.import_suffix = self.dll_suffix
+ if self.substs.get("HOST_IMPORT_LIB_SUFFIX"):
+ self.host_import_prefix = self.substs.get("HOST_LIB_PREFIX", "")
+ self.host_import_suffix = ".%s" % self.substs["HOST_IMPORT_LIB_SUFFIX"]
+ else:
+ self.host_import_prefix = self.host_dll_prefix
+ self.host_import_suffix = self.host_dll_suffix
+ self.bin_suffix = self.substs.get("BIN_SUFFIX", "")
+
+ global_defines = [name for name in self.defines]
+ self.substs["ACDEFINES"] = " ".join(
+ [
+ "-D%s=%s" % (name, shell_quote(self.defines[name]).replace("$", "$$"))
+ for name in sorted(global_defines)
+ ]
+ )
+
+ def serialize(name, obj):
+ if isinstance(obj, six.string_types):
+ return obj
+ if isinstance(obj, Iterable):
+ return " ".join(obj)
+ raise Exception("Unhandled type %s for %s", type(obj), str(name))
+
+ self.substs["ALLSUBSTS"] = "\n".join(
+ sorted(
+ [
+ "%s = %s" % (name, serialize(name, self.substs[name]))
+ for name in self.substs
+ if self.substs[name]
+ ]
+ )
+ )
+ self.substs["ALLEMPTYSUBSTS"] = "\n".join(
+ sorted(["%s =" % name for name in self.substs if not self.substs[name]])
+ )
+
+ self.substs = ReadOnlyDict(self.substs)
+
+ @property
+ def is_artifact_build(self):
+ return self.substs.get("MOZ_ARTIFACT_BUILDS", False)
+
+ @memoized_property
+ def acdefines(self):
+ acdefines = dict((name, self.defines[name]) for name in self.defines)
+ return ReadOnlyDict(acdefines)
+
+ @staticmethod
+ def from_config_status(path):
+ config = BuildConfig.from_config_status(path)
+
+ return ConfigEnvironment(
+ config.topsrcdir, config.topobjdir, config.defines, config.substs, path
+ )
+
+
+class PartialConfigDict(object):
+ """Facilitates mapping the config.statusd defines & substs with dict-like access.
+
+ This allows a buildconfig client to use buildconfig.defines['FOO'] (and
+ similar for substs), where the value of FOO is delay-loaded until it is
+ needed.
+ """
+
+ def __init__(self, config_statusd, typ, environ_override=False):
+ self._dict = {}
+ self._datadir = mozpath.join(config_statusd, typ)
+ self._config_track = mozpath.join(self._datadir, "config.track")
+ self._files = set()
+ self._environ_override = environ_override
+
+ def _load_config_track(self):
+ existing_files = set()
+ try:
+ with open(self._config_track) as fh:
+ existing_files.update(fh.read().splitlines())
+ except IOError:
+ pass
+ return existing_files
+
+ def _write_file(self, key, value):
+ filename = mozpath.join(self._datadir, key)
+ with FileAvoidWrite(filename) as fh:
+ to_write = json.dumps(value, indent=4)
+ fh.write(to_write.encode(system_encoding))
+ return filename
+
+ def _fill_group(self, values):
+ # Clear out any cached values. This is mostly for tests that will check
+ # the environment, write out a new set of variables, and then check the
+ # environment again. Normally only configure ends up calling this
+ # function, and other consumers create their own
+ # PartialConfigEnvironments in new python processes.
+ self._dict = {}
+
+ existing_files = self._load_config_track()
+ existing_files = {Path(f) for f in existing_files}
+
+ new_files = set()
+ for k, v in six.iteritems(values):
+ new_files.add(Path(self._write_file(k, v)))
+
+ for filename in existing_files - new_files:
+ # We can't actually os.remove() here, since make would not see that the
+ # file has been removed and that the target needs to be updated. Instead
+ # we just overwrite the file with a value of None, which is equivalent
+ # to a non-existing file.
+ with FileAvoidWrite(filename) as fh:
+ json.dump(None, fh)
+
+ with FileAvoidWrite(self._config_track) as fh:
+ for f in sorted(new_files):
+ fh.write("%s\n" % f)
+
+ def __getitem__(self, key):
+ if self._environ_override:
+ if (key not in ("CPP", "CXXCPP", "SHELL")) and (key in os.environ):
+ return os.environ[key]
+
+ if key not in self._dict:
+ data = None
+ try:
+ filename = mozpath.join(self._datadir, key)
+ self._files.add(filename)
+ with open(filename) as f:
+ data = json.load(f)
+ except IOError:
+ pass
+ self._dict[key] = data
+
+ if self._dict[key] is None:
+ raise KeyError("'%s'" % key)
+ return self._dict[key]
+
+ def __setitem__(self, key, value):
+ self._dict[key] = value
+
+ def get(self, key, default=None):
+ return self[key] if key in self else default
+
+ def __contains__(self, key):
+ try:
+ return self[key] is not None
+ except KeyError:
+ return False
+
+ def iteritems(self):
+ existing_files = self._load_config_track()
+ for f in existing_files:
+ # The track file contains filenames, and the basename is the
+ # variable name.
+ var = mozpath.basename(f)
+ yield var, self[var]
+
+
+class PartialConfigEnvironment(object):
+ """Allows access to individual config.status items via config.statusd/* files.
+
+ This class is similar to the full ConfigEnvironment, which uses
+ config.status, except this allows access and tracks dependencies to
+ individual configure values. It is intended to be used during the build
+ process to handle things like GENERATED_FILES, CONFIGURE_DEFINE_FILES, and
+ anything else that may need to access specific substs or defines.
+
+ Creating a PartialConfigEnvironment requires only the topobjdir, which is
+ needed to distinguish between the top-level environment and the js/src
+ environment.
+
+ The PartialConfigEnvironment automatically defines one additional subst variable
+ from all the defines:
+
+ - ACDEFINES contains the defines in the form -DNAME=VALUE, for use on
+ preprocessor command lines. The order in which defines were given
+ when creating the ConfigEnvironment is preserved.
+
+ and one additional define from all the defines as a dictionary:
+
+ - ALLDEFINES contains all of the global defines as a dictionary. This is
+ intended to be used instead of the defines structure from config.status so
+ that scripts can depend directly on its value.
+ """
+
+ def __init__(self, topobjdir):
+ config_statusd = mozpath.join(topobjdir, "config.statusd")
+ self.substs = PartialConfigDict(config_statusd, "substs", environ_override=True)
+ self.defines = PartialConfigDict(config_statusd, "defines")
+ self.topobjdir = topobjdir
+
+ def write_vars(self, config):
+ substs = config["substs"].copy()
+ defines = config["defines"].copy()
+
+ global_defines = [name for name in config["defines"]]
+ acdefines = " ".join(
+ [
+ "-D%s=%s"
+ % (name, shell_quote(config["defines"][name]).replace("$", "$$"))
+ for name in sorted(global_defines)
+ ]
+ )
+ substs["ACDEFINES"] = acdefines
+
+ all_defines = OrderedDict()
+ for k in global_defines:
+ all_defines[k] = config["defines"][k]
+ defines["ALLDEFINES"] = all_defines
+
+ self.substs._fill_group(substs)
+ self.defines._fill_group(defines)
+
+ def get_dependencies(self):
+ return ["$(wildcard %s)" % f for f in self.substs._files | self.defines._files]
diff --git a/python/mozbuild/mozbuild/backend/cpp_eclipse.py b/python/mozbuild/mozbuild/backend/cpp_eclipse.py
new file mode 100644
index 0000000000..413cca3f75
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/cpp_eclipse.py
@@ -0,0 +1,876 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import errno
+import glob
+import os
+import shutil
+import subprocess
+from xml.sax.saxutils import quoteattr
+
+from mozbuild.base import ExecutionSummary
+
+from ..frontend.data import ComputedFlags
+from .common import CommonBackend
+
+# TODO Have ./mach eclipse generate the workspace and index it:
+# /Users/bgirard/mozilla/eclipse/eclipse/eclipse/eclipse -application org.eclipse.cdt.managedbuilder.core.headlessbuild -data $PWD/workspace -importAll $PWD/eclipse
+# Open eclipse:
+# /Users/bgirard/mozilla/eclipse/eclipse/eclipse/eclipse -data $PWD/workspace
+
+
+class CppEclipseBackend(CommonBackend):
+ """Backend that generates Cpp Eclipse project files."""
+
+ def __init__(self, environment):
+ if os.name == "nt":
+ raise Exception(
+ "Eclipse is not supported on Windows. "
+ "Consider using Visual Studio instead."
+ )
+ super(CppEclipseBackend, self).__init__(environment)
+
+ def _init(self):
+ CommonBackend._init(self)
+
+ self._args_for_dirs = {}
+ self._project_name = "Gecko"
+ self._workspace_dir = self._get_workspace_path()
+ self._workspace_lang_dir = os.path.join(
+ self._workspace_dir, ".metadata/.plugins/org.eclipse.cdt.core"
+ )
+ self._project_dir = os.path.join(self._workspace_dir, self._project_name)
+ self._overwriting_workspace = os.path.isdir(self._workspace_dir)
+
+ self._macbundle = self.environment.substs["MOZ_MACBUNDLE_NAME"]
+ self._appname = self.environment.substs["MOZ_APP_NAME"]
+ self._bin_suffix = self.environment.substs["BIN_SUFFIX"]
+ self._cxx = self.environment.substs["CXX"]
+ # Note: We need the C Pre Processor (CPP) flags, not the CXX flags
+ self._cppflags = self.environment.substs.get("CPPFLAGS", "")
+
+ def summary(self):
+ return ExecutionSummary(
+ "CppEclipse backend executed in {execution_time:.2f}s\n"
+ 'Generated Cpp Eclipse workspace in "{workspace:s}".\n'
+ "If missing, import the project using File > Import > General > Existing Project into workspace\n"
+ "\n"
+ "Run with: eclipse -data {workspace:s}\n",
+ execution_time=self._execution_time,
+ workspace=self._workspace_dir,
+ )
+
+ def _get_workspace_path(self):
+ return CppEclipseBackend.get_workspace_path(
+ self.environment.topsrcdir, self.environment.topobjdir
+ )
+
+ @staticmethod
+ def get_workspace_path(topsrcdir, topobjdir):
+ # Eclipse doesn't support having the workspace inside the srcdir.
+ # Since most people have their objdir inside their srcdir it's easier
+ # and more consistent to just put the workspace along side the srcdir
+ srcdir_parent = os.path.dirname(topsrcdir)
+ workspace_dirname = "eclipse_" + os.path.basename(topobjdir)
+ return os.path.join(srcdir_parent, workspace_dirname)
+
+ def consume_object(self, obj):
+ reldir = getattr(obj, "relsrcdir", None)
+
+ # Note that unlike VS, Eclipse' indexer seem to crawl the headers and
+ # isn't picky about the local includes.
+ if isinstance(obj, ComputedFlags):
+ args = self._args_for_dirs.setdefault(
+ "tree/" + reldir, {"includes": [], "defines": []}
+ )
+ # use the same args for any objdirs we include:
+ if reldir == "dom/bindings":
+ self._args_for_dirs.setdefault("generated-webidl", args)
+ if reldir == "ipc/ipdl":
+ self._args_for_dirs.setdefault("generated-ipdl", args)
+
+ includes = args["includes"]
+ if "BASE_INCLUDES" in obj.flags and obj.flags["BASE_INCLUDES"]:
+ includes += obj.flags["BASE_INCLUDES"]
+ if "LOCAL_INCLUDES" in obj.flags and obj.flags["LOCAL_INCLUDES"]:
+ includes += obj.flags["LOCAL_INCLUDES"]
+
+ defs = args["defines"]
+ if "DEFINES" in obj.flags and obj.flags["DEFINES"]:
+ defs += obj.flags["DEFINES"]
+ if "LIBRARY_DEFINES" in obj.flags and obj.flags["LIBRARY_DEFINES"]:
+ defs += obj.flags["LIBRARY_DEFINES"]
+
+ return True
+
+ def consume_finished(self):
+ settings_dir = os.path.join(self._project_dir, ".settings")
+ launch_dir = os.path.join(self._project_dir, "RunConfigurations")
+ workspace_settings_dir = os.path.join(
+ self._workspace_dir, ".metadata/.plugins/org.eclipse.core.runtime/.settings"
+ )
+
+ for dir_name in [
+ self._project_dir,
+ settings_dir,
+ launch_dir,
+ workspace_settings_dir,
+ self._workspace_lang_dir,
+ ]:
+ try:
+ os.makedirs(dir_name)
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+
+ project_path = os.path.join(self._project_dir, ".project")
+ with open(project_path, "w") as fh:
+ self._write_project(fh)
+
+ cproject_path = os.path.join(self._project_dir, ".cproject")
+ with open(cproject_path, "w") as fh:
+ self._write_cproject(fh)
+
+ language_path = os.path.join(settings_dir, "language.settings.xml")
+ with open(language_path, "w") as fh:
+ self._write_language_settings(fh)
+
+ workspace_language_path = os.path.join(
+ self._workspace_lang_dir, "language.settings.xml"
+ )
+ with open(workspace_language_path, "w") as fh:
+ workspace_lang_settings = WORKSPACE_LANGUAGE_SETTINGS_TEMPLATE
+ workspace_lang_settings = workspace_lang_settings.replace(
+ "@COMPILER_FLAGS@", self._cxx + " " + self._cppflags
+ )
+ fh.write(workspace_lang_settings)
+
+ self._write_launch_files(launch_dir)
+
+ core_resources_prefs_path = os.path.join(
+ workspace_settings_dir, "org.eclipse.core.resources.prefs"
+ )
+ with open(core_resources_prefs_path, "w") as fh:
+ fh.write(STATIC_CORE_RESOURCES_PREFS)
+
+ core_runtime_prefs_path = os.path.join(
+ workspace_settings_dir, "org.eclipse.core.runtime.prefs"
+ )
+ with open(core_runtime_prefs_path, "w") as fh:
+ fh.write(STATIC_CORE_RUNTIME_PREFS)
+
+ ui_prefs_path = os.path.join(workspace_settings_dir, "org.eclipse.ui.prefs")
+ with open(ui_prefs_path, "w") as fh:
+ fh.write(STATIC_UI_PREFS)
+
+ cdt_ui_prefs_path = os.path.join(
+ workspace_settings_dir, "org.eclipse.cdt.ui.prefs"
+ )
+ cdt_ui_prefs = STATIC_CDT_UI_PREFS
+ # Here we generate the code formatter that will show up in the UI with
+ # the name "Mozilla". The formatter is stored as a single line of XML
+ # in the org.eclipse.cdt.ui.formatterprofiles pref.
+ cdt_ui_prefs += """org.eclipse.cdt.ui.formatterprofiles=<?xml version\="1.0" encoding\="UTF-8" standalone\="no"?>\\n<profiles version\="1">\\n<profile kind\="CodeFormatterProfile" name\="Mozilla" version\="1">\\n"""
+ XML_PREF_TEMPLATE = """<setting id\="@PREF_NAME@" value\="@PREF_VAL@"/>\\n"""
+ for line in FORMATTER_SETTINGS.splitlines():
+ [pref, val] = line.split("=")
+ cdt_ui_prefs += XML_PREF_TEMPLATE.replace("@PREF_NAME@", pref).replace(
+ "@PREF_VAL@", val
+ )
+ cdt_ui_prefs += "</profile>\\n</profiles>\\n"
+ with open(cdt_ui_prefs_path, "w") as fh:
+ fh.write(cdt_ui_prefs)
+
+ cdt_core_prefs_path = os.path.join(
+ workspace_settings_dir, "org.eclipse.cdt.core.prefs"
+ )
+ with open(cdt_core_prefs_path, "w") as fh:
+ cdt_core_prefs = STATIC_CDT_CORE_PREFS
+ # When we generated the code formatter called "Mozilla" above, we
+ # also set it to be the active formatter. When a formatter is set
+ # as the active formatter all its prefs are set in this prefs file,
+ # so we need add those now:
+ cdt_core_prefs += FORMATTER_SETTINGS
+ fh.write(cdt_core_prefs)
+
+ editor_prefs_path = os.path.join(
+ workspace_settings_dir, "org.eclipse.ui.editors.prefs"
+ )
+ with open(editor_prefs_path, "w") as fh:
+ fh.write(EDITOR_SETTINGS)
+
+ # Now import the project into the workspace
+ self._import_project()
+
+ def _import_project(self):
+ # If the workspace already exists then don't import the project again because
+ # eclipse doesn't handle this properly
+ if self._overwriting_workspace:
+ return
+
+ # We disable the indexer otherwise we're forced to index
+ # the whole codebase when importing the project. Indexing the project can take 20 minutes.
+ self._write_noindex()
+
+ try:
+ subprocess.check_call(
+ [
+ "eclipse",
+ "-application",
+ "-nosplash",
+ "org.eclipse.cdt.managedbuilder.core.headlessbuild",
+ "-data",
+ self._workspace_dir,
+ "-importAll",
+ self._project_dir,
+ ]
+ )
+ except OSError as e:
+ # Remove the workspace directory so we re-generate it and
+ # try to import again when the backend is invoked again.
+ shutil.rmtree(self._workspace_dir)
+
+ if e.errno == errno.ENOENT:
+ raise Exception(
+ "Failed to launch eclipse to import project. "
+ "Ensure 'eclipse' is in your PATH and try again"
+ )
+ else:
+ raise
+ finally:
+ self._remove_noindex()
+
+ def _write_noindex(self):
+ noindex_path = os.path.join(
+ self._project_dir, ".settings/org.eclipse.cdt.core.prefs"
+ )
+ with open(noindex_path, "w") as fh:
+ fh.write(NOINDEX_TEMPLATE)
+
+ def _remove_noindex(self):
+ # Below we remove the config file that temporarily disabled the indexer
+ # while we were importing the project. Unfortunately, CDT doesn't
+ # notice indexer settings changes in config files when it restarts. To
+ # work around that we remove the index database here to force it to:
+ for f in glob.glob(os.path.join(self._workspace_lang_dir, "Gecko.*.pdom")):
+ os.remove(f)
+
+ noindex_path = os.path.join(
+ self._project_dir, ".settings/org.eclipse.cdt.core.prefs"
+ )
+ # This may fail if the entire tree has been removed; that's fine.
+ try:
+ os.remove(noindex_path)
+ except OSError as e:
+ if e.errno != errno.ENOENT:
+ raise
+
+ def _write_language_settings(self, fh):
+ def add_abs_include_path(absinclude):
+ assert absinclude[:3] == "-I/"
+ return LANGUAGE_SETTINGS_TEMPLATE_DIR_INCLUDE.replace(
+ "@INCLUDE_PATH@", absinclude[2:]
+ )
+
+ def add_objdir_include_path(relpath):
+ p = os.path.join(self.environment.topobjdir, relpath)
+ return LANGUAGE_SETTINGS_TEMPLATE_DIR_INCLUDE.replace("@INCLUDE_PATH@", p)
+
+ def add_define(name, value):
+ define = LANGUAGE_SETTINGS_TEMPLATE_DIR_DEFINE
+ define = define.replace("@NAME@", name)
+ # We use quoteattr here because some defines contain characters
+ # such as "<" and '"' which need proper XML escaping.
+ define = define.replace("@VALUE@", quoteattr(value))
+ return define
+
+ fh.write(LANGUAGE_SETTINGS_TEMPLATE_HEADER)
+
+ # Unfortunately, whenever we set a user defined include path or define
+ # on a directory, Eclipse ignores user defined include paths and defines
+ # on ancestor directories. That means that we need to add all the
+ # common include paths and defines to every single directory entry that
+ # we add settings for. (Fortunately that doesn't appear to have a
+ # noticeable impact on the time it takes to open the generated Eclipse
+ # project.) We do that by generating a template here that we can then
+ # use for each individual directory in the loop below.
+ #
+ dirsettings_template = LANGUAGE_SETTINGS_TEMPLATE_DIR_HEADER
+
+ # Add OS_COMPILE_CXXFLAGS args (same as OS_COMPILE_CFLAGS):
+ dirsettings_template = dirsettings_template.replace(
+ "@PREINCLUDE_FILE_PATH@",
+ os.path.join(self.environment.topobjdir, "dist/include/mozilla-config.h"),
+ )
+ dirsettings_template += add_define("MOZILLA_CLIENT", "1")
+
+ # Add EXTRA_INCLUDES args:
+ dirsettings_template += add_objdir_include_path("dist/include")
+
+ # Add OS_INCLUDES args:
+ # XXX media/webrtc/trunk/webrtc's moz.builds reset this.
+ dirsettings_template += add_objdir_include_path("dist/include/nspr")
+ dirsettings_template += add_objdir_include_path("dist/include/nss")
+
+ # Finally, add anything else that makes things work better.
+ #
+ # Because of https://developer.mozilla.org/en-US/docs/Eclipse_CDT#Headers_are_only_parsed_once
+ # we set MOZILLA_INTERNAL_API for all directories to make sure
+ # headers are indexed with MOZILLA_INTERNAL_API set. Unfortunately
+ # this means that MOZILLA_EXTERNAL_API code will suffer.
+ #
+ # TODO: If we're doing this for MOZILLA_EXTERNAL_API then we may want
+ # to do it for other LIBRARY_DEFINES's defines too. Well, at least for
+ # STATIC_EXPORTABLE_JS_API which may be important to JS people.
+ # (The other two LIBRARY_DEFINES defines -- MOZ_HAS_MOZGLUE and
+ # IMPL_LIBXUL -- don't affect much and probably don't matter to anyone).
+ #
+ # TODO: Should we also always set DEBUG so that DEBUG code is always
+ # indexed? Or is there significant amounts of non-DEBUG code that
+ # would be adversely affected?
+ #
+ # TODO: Investigate whether the ordering of directories in the project
+ # file can be used to our advantage so that the first indexing of
+ # important headers has the defines we want.
+ #
+ dirsettings_template += add_objdir_include_path("ipc/ipdl/_ipdlheaders")
+ dirsettings_template += add_define("MOZILLA_INTERNAL_API", "1")
+
+ for path, args in self._args_for_dirs.items():
+ dirsettings = dirsettings_template
+ dirsettings = dirsettings.replace("@RELATIVE_PATH@", path)
+ for i in args["includes"]:
+ dirsettings += add_abs_include_path(i)
+ for d in args["defines"]:
+ assert d[:2] == u"-D" or d[:2] == u"-U"
+ if d[:2] == u"-U":
+ # gfx/harfbuzz/src uses -UDEBUG, at least on Mac
+ # netwerk/sctp/src uses -U__APPLE__ on Mac
+ # XXX We should make this code smart enough to remove existing defines.
+ continue
+ d = d[2:] # get rid of leading "-D"
+ name_value = d.split("=", 1)
+ name = name_value[0]
+ value = ""
+ if len(name_value) == 2:
+ value = name_value[1]
+ dirsettings += add_define(name, str(value))
+ dirsettings += LANGUAGE_SETTINGS_TEMPLATE_DIR_FOOTER
+ fh.write(dirsettings)
+
+ fh.write(
+ LANGUAGE_SETTINGS_TEMPLATE_FOOTER.replace(
+ "@COMPILER_FLAGS@", self._cxx + " " + self._cppflags
+ )
+ )
+
+ def _write_launch_files(self, launch_dir):
+ bin_dir = os.path.join(self.environment.topobjdir, "dist")
+
+ # TODO Improve binary detection
+ if self._macbundle:
+ exe_path = os.path.join(bin_dir, self._macbundle, "Contents/MacOS")
+ else:
+ exe_path = os.path.join(bin_dir, "bin")
+
+ exe_path = os.path.join(exe_path, self._appname + self._bin_suffix)
+
+ main_gecko_launch = os.path.join(launch_dir, "gecko.launch")
+ with open(main_gecko_launch, "w") as fh:
+ launch = GECKO_LAUNCH_CONFIG_TEMPLATE
+ launch = launch.replace("@LAUNCH_PROGRAM@", exe_path)
+ launch = launch.replace("@LAUNCH_ARGS@", "-P -no-remote")
+ fh.write(launch)
+
+ # TODO Add more launch configs (and delegate calls to mach)
+
+ def _write_project(self, fh):
+ project = PROJECT_TEMPLATE
+
+ project = project.replace("@PROJECT_NAME@", self._project_name)
+ project = project.replace("@PROJECT_TOPSRCDIR@", self.environment.topsrcdir)
+ project = project.replace(
+ "@GENERATED_IPDL_FILES@",
+ os.path.join(self.environment.topobjdir, "ipc", "ipdl"),
+ )
+ project = project.replace(
+ "@GENERATED_WEBIDL_FILES@",
+ os.path.join(self.environment.topobjdir, "dom", "bindings"),
+ )
+ fh.write(project)
+
+ def _write_cproject(self, fh):
+ cproject_header = CPROJECT_TEMPLATE_HEADER
+ cproject_header = cproject_header.replace(
+ "@PROJECT_TOPSRCDIR@", self.environment.topobjdir
+ )
+ cproject_header = cproject_header.replace(
+ "@MACH_COMMAND@", os.path.join(self.environment.topsrcdir, "mach")
+ )
+ fh.write(cproject_header)
+ fh.write(CPROJECT_TEMPLATE_FOOTER)
+
+
+PROJECT_TEMPLATE = """<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+ <name>@PROJECT_NAME@</name>
+ <comment></comment>
+ <projects>
+ </projects>
+ <buildSpec>
+ <buildCommand>
+ <name>org.eclipse.cdt.managedbuilder.core.genmakebuilder</name>
+ <triggers>clean,full,incremental,</triggers>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ <buildCommand>
+ <name>org.eclipse.cdt.managedbuilder.core.ScannerConfigBuilder</name>
+ <triggers></triggers>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ </buildSpec>
+ <natures>
+ <nature>org.eclipse.cdt.core.cnature</nature>
+ <nature>org.eclipse.cdt.core.ccnature</nature>
+ <nature>org.eclipse.cdt.managedbuilder.core.managedBuildNature</nature>
+ <nature>org.eclipse.cdt.managedbuilder.core.ScannerConfigNature</nature>
+ </natures>
+ <linkedResources>
+ <link>
+ <name>tree</name>
+ <type>2</type>
+ <location>@PROJECT_TOPSRCDIR@</location>
+ </link>
+ <link>
+ <name>generated-ipdl</name>
+ <type>2</type>
+ <location>@GENERATED_IPDL_FILES@</location>
+ </link>
+ <link>
+ <name>generated-webidl</name>
+ <type>2</type>
+ <location>@GENERATED_WEBIDL_FILES@</location>
+ </link>
+ </linkedResources>
+ <filteredResources>
+ <filter>
+ <id>17111971</id>
+ <name>tree</name>
+ <type>30</type>
+ <matcher>
+ <id>org.eclipse.ui.ide.multiFilter</id>
+ <arguments>1.0-name-matches-false-false-obj-*</arguments>
+ </matcher>
+ </filter>
+ <filter>
+ <id>14081994</id>
+ <name>tree</name>
+ <type>22</type>
+ <matcher>
+ <id>org.eclipse.ui.ide.multiFilter</id>
+ <arguments>1.0-name-matches-false-false-*.rej</arguments>
+ </matcher>
+ </filter>
+ <filter>
+ <id>25121970</id>
+ <name>tree</name>
+ <type>22</type>
+ <matcher>
+ <id>org.eclipse.ui.ide.multiFilter</id>
+ <arguments>1.0-name-matches-false-false-*.orig</arguments>
+ </matcher>
+ </filter>
+ <filter>
+ <id>10102004</id>
+ <name>tree</name>
+ <type>10</type>
+ <matcher>
+ <id>org.eclipse.ui.ide.multiFilter</id>
+ <arguments>1.0-name-matches-false-false-.hg</arguments>
+ </matcher>
+ </filter>
+ <filter>
+ <id>23122002</id>
+ <name>tree</name>
+ <type>22</type>
+ <matcher>
+ <id>org.eclipse.ui.ide.multiFilter</id>
+ <arguments>1.0-name-matches-false-false-*.pyc</arguments>
+ </matcher>
+ </filter>
+ </filteredResources>
+</projectDescription>
+"""
+
+CPROJECT_TEMPLATE_HEADER = """<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<?fileVersion 4.0.0?>
+
+<cproject storage_type_id="org.eclipse.cdt.core.XmlProjectDescriptionStorage">
+ <storageModule moduleId="org.eclipse.cdt.core.settings">
+ <cconfiguration id="0.1674256904">
+ <storageModule buildSystemId="org.eclipse.cdt.managedbuilder.core.configurationDataProvider" id="0.1674256904" moduleId="org.eclipse.cdt.core.settings" name="Default">
+ <externalSettings/>
+ <extensions>
+ <extension id="org.eclipse.cdt.core.VCErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
+ <extension id="org.eclipse.cdt.core.GmakeErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
+ <extension id="org.eclipse.cdt.core.CWDLocator" point="org.eclipse.cdt.core.ErrorParser"/>
+ <extension id="org.eclipse.cdt.core.GCCErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
+ <extension id="org.eclipse.cdt.core.GASErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
+ <extension id="org.eclipse.cdt.core.GLDErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
+ </extensions>
+ </storageModule>
+ <storageModule moduleId="cdtBuildSystem" version="4.0.0">
+ <configuration artifactName="${ProjName}" buildProperties="" description="" id="0.1674256904" name="Default" parent="org.eclipse.cdt.build.core.prefbase.cfg">
+ <folderInfo id="0.1674256904." name="/" resourcePath="">
+ <toolChain id="cdt.managedbuild.toolchain.gnu.cross.exe.debug.1276586933" name="Cross GCC" superClass="cdt.managedbuild.toolchain.gnu.cross.exe.debug">
+ <targetPlatform archList="all" binaryParser="" id="cdt.managedbuild.targetPlatform.gnu.cross.710759961" isAbstract="false" osList="all" superClass="cdt.managedbuild.targetPlatform.gnu.cross"/>
+ <builder arguments="--log-no-times build" buildPath="@PROJECT_TOPSRCDIR@" command="@MACH_COMMAND@" enableCleanBuild="false" incrementalBuildTarget="binaries" id="org.eclipse.cdt.build.core.settings.default.builder.1437267827" keepEnvironmentInBuildfile="false" name="Gnu Make Builder" superClass="org.eclipse.cdt.build.core.settings.default.builder"/>
+ </toolChain>
+ </folderInfo>
+"""
+CPROJECT_TEMPLATE_FILEINFO = """ <fileInfo id="0.1674256904.474736658" name="Layers.cpp" rcbsApplicability="disable" resourcePath="tree/gfx/layers/Layers.cpp" toolsToInvoke="org.eclipse.cdt.build.core.settings.holder.582514939.463639939">
+ <tool id="org.eclipse.cdt.build.core.settings.holder.582514939.463639939" name="GNU C++" superClass="org.eclipse.cdt.build.core.settings.holder.582514939">
+ <option id="org.eclipse.cdt.build.core.settings.holder.symbols.232300236" superClass="org.eclipse.cdt.build.core.settings.holder.symbols" valueType="definedSymbols">
+ <listOptionValue builtIn="false" value="BENWA=BENWAVAL"/>
+ </option>
+ <inputType id="org.eclipse.cdt.build.core.settings.holder.inType.1942876228" languageId="org.eclipse.cdt.core.g++" languageName="GNU C++" sourceContentType="org.eclipse.cdt.core.cxxSource,org.eclipse.cdt.core.cxxHeader" superClass="org.eclipse.cdt.build.core.settings.holder.inType"/>
+ </tool>
+ </fileInfo>
+"""
+CPROJECT_TEMPLATE_FOOTER = """
+ <sourceEntries>
+ <entry excluding="**/lib*|**/third_party/|tree/*.xcodeproj/|tree/.cargo/|tree/.vscode/|tree/build/|tree/extensions/|tree/gfx/angle/|tree/gfx/cairo/|tree/gfx/skia/skia/|tree/intl/icu/|tree/js/|tree/media/|tree/modules/freetype2|tree/modules/pdfium/|tree/netwerk/|tree/netwerk/sctp|tree/netwerk/srtp|tree/nsprpub/lib|tree/nsprpub/pr/src|tree/other-licenses/|tree/parser/|tree/python/|tree/security/nss/|tree/tools/" flags="VALUE_WORKSPACE_PATH" kind="sourcePath" name=""/>
+ </sourceEntries>
+ </configuration>
+ </storageModule>
+ <storageModule moduleId="org.eclipse.cdt.core.externalSettings"/>
+ </cconfiguration>
+ </storageModule>
+ <storageModule moduleId="cdtBuildSystem" version="4.0.0">
+ <project id="Empty.null.1281234804" name="Empty"/>
+ </storageModule>
+ <storageModule moduleId="scannerConfiguration">
+ <autodiscovery enabled="true" problemReportingEnabled="true" selectedProfileId=""/>
+ <scannerConfigBuildInfo instanceId="0.1674256904">
+ <autodiscovery enabled="true" problemReportingEnabled="true" selectedProfileId=""/>
+ </scannerConfigBuildInfo>
+ </storageModule>
+ <storageModule moduleId="refreshScope" versionNumber="2">
+ <configuration configurationName="Default"/>
+ </storageModule>
+ <storageModule moduleId="org.eclipse.cdt.core.LanguageSettingsProviders"/>
+</cproject>
+"""
+
+WORKSPACE_LANGUAGE_SETTINGS_TEMPLATE = """<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<plugin>
+ <extension point="org.eclipse.cdt.core.LanguageSettingsProvider">
+ <provider class="org.eclipse.cdt.managedbuilder.language.settings.providers.GCCBuiltinSpecsDetector" console="true" id="org.eclipse.cdt.managedbuilder.core.GCCBuiltinSpecsDetector" keep-relative-paths="false" name="CDT GCC Built-in Compiler Settings" parameter="@COMPILER_FLAGS@ -E -P -v -dD &quot;${INPUTS}&quot;">
+ <language-scope id="org.eclipse.cdt.core.gcc"/>
+ <language-scope id="org.eclipse.cdt.core.g++"/>
+ </provider>
+ </extension>
+</plugin>
+"""
+
+
+# The settings set via this template can be found in the UI by opening
+# the Properties for a directory in the Project Explorer tab, then going to
+# C/C++ General > Preprocessor Include Paths, Macros, etc., selecting the
+# C++ item from the Languages column, and then expanding the
+# CDT User Settings Entries item to the right.
+
+LANGUAGE_SETTINGS_TEMPLATE_HEADER = """<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<project>
+ <configuration id="0.1674256904" name="Default">
+ <extension point="org.eclipse.cdt.core.LanguageSettingsProvider">
+ <provider class="org.eclipse.cdt.core.language.settings.providers.LanguageSettingsGenericProvider" id="org.eclipse.cdt.ui.UserLanguageSettingsProvider" name="CDT User Setting Entries" prefer-non-shared="true" store-entries-with-project="true">
+ <language id="org.eclipse.cdt.core.g++">
+"""
+
+LANGUAGE_SETTINGS_TEMPLATE_DIR_HEADER = """ <resource project-relative-path="@RELATIVE_PATH@">
+ <entry kind="includeFile" name="@PREINCLUDE_FILE_PATH@">
+ <flag value="LOCAL"/>
+ </entry>
+"""
+
+LANGUAGE_SETTINGS_TEMPLATE_DIR_INCLUDE = """ <entry kind="includePath" name="@INCLUDE_PATH@">
+ <flag value="LOCAL"/>
+ </entry>
+"""
+
+LANGUAGE_SETTINGS_TEMPLATE_DIR_DEFINE = """ <entry kind="macro" name="@NAME@" value=@VALUE@/>
+"""
+
+LANGUAGE_SETTINGS_TEMPLATE_DIR_FOOTER = """ </resource>
+"""
+
+LANGUAGE_SETTINGS_TEMPLATE_FOOTER = """ </language>
+ </provider>
+ <provider class="org.eclipse.cdt.internal.build.crossgcc.CrossGCCBuiltinSpecsDetector" console="false" env-hash="-859273372804152468" id="org.eclipse.cdt.build.crossgcc.CrossGCCBuiltinSpecsDetector" keep-relative-paths="false" name="CDT Cross GCC Built-in Compiler Settings" parameter="@COMPILER_FLAGS@ -E -P -v -dD &quot;${INPUTS}&quot; -std=c++11" prefer-non-shared="true" store-entries-with-project="true">
+ <language-scope id="org.eclipse.cdt.core.gcc"/>
+ <language-scope id="org.eclipse.cdt.core.g++"/>
+ </provider>
+ <provider-reference id="org.eclipse.cdt.managedbuilder.core.MBSLanguageSettingsProvider" ref="shared-provider"/>
+ </extension>
+ </configuration>
+</project>
+"""
+
+
+GECKO_LAUNCH_CONFIG_TEMPLATE = """<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<launchConfiguration type="org.eclipse.cdt.launch.applicationLaunchType">
+<booleanAttribute key="org.eclipse.cdt.dsf.gdb.AUTO_SOLIB" value="true"/>
+<listAttribute key="org.eclipse.cdt.dsf.gdb.AUTO_SOLIB_LIST"/>
+<stringAttribute key="org.eclipse.cdt.dsf.gdb.DEBUG_NAME" value="lldb"/>
+<booleanAttribute key="org.eclipse.cdt.dsf.gdb.DEBUG_ON_FORK" value="false"/>
+<stringAttribute key="org.eclipse.cdt.dsf.gdb.GDB_INIT" value=""/>
+<booleanAttribute key="org.eclipse.cdt.dsf.gdb.NON_STOP" value="false"/>
+<booleanAttribute key="org.eclipse.cdt.dsf.gdb.REVERSE" value="false"/>
+<listAttribute key="org.eclipse.cdt.dsf.gdb.SOLIB_PATH"/>
+<stringAttribute key="org.eclipse.cdt.dsf.gdb.TRACEPOINT_MODE" value="TP_NORMAL_ONLY"/>
+<booleanAttribute key="org.eclipse.cdt.dsf.gdb.UPDATE_THREADLIST_ON_SUSPEND" value="false"/>
+<booleanAttribute key="org.eclipse.cdt.dsf.gdb.internal.ui.launching.LocalApplicationCDebuggerTab.DEFAULTS_SET" value="true"/>
+<intAttribute key="org.eclipse.cdt.launch.ATTR_BUILD_BEFORE_LAUNCH_ATTR" value="2"/>
+<stringAttribute key="org.eclipse.cdt.launch.COREFILE_PATH" value=""/>
+<stringAttribute key="org.eclipse.cdt.launch.DEBUGGER_ID" value="gdb"/>
+<stringAttribute key="org.eclipse.cdt.launch.DEBUGGER_START_MODE" value="run"/>
+<booleanAttribute key="org.eclipse.cdt.launch.DEBUGGER_STOP_AT_MAIN" value="false"/>
+<stringAttribute key="org.eclipse.cdt.launch.DEBUGGER_STOP_AT_MAIN_SYMBOL" value="main"/>
+<stringAttribute key="org.eclipse.cdt.launch.PROGRAM_ARGUMENTS" value="@LAUNCH_ARGS@"/>
+<stringAttribute key="org.eclipse.cdt.launch.PROGRAM_NAME" value="@LAUNCH_PROGRAM@"/>
+<stringAttribute key="org.eclipse.cdt.launch.PROJECT_ATTR" value="Gecko"/>
+<booleanAttribute key="org.eclipse.cdt.launch.PROJECT_BUILD_CONFIG_AUTO_ATTR" value="true"/>
+<stringAttribute key="org.eclipse.cdt.launch.PROJECT_BUILD_CONFIG_ID_ATTR" value=""/>
+<booleanAttribute key="org.eclipse.cdt.launch.use_terminal" value="true"/>
+<listAttribute key="org.eclipse.debug.core.MAPPED_RESOURCE_PATHS">
+<listEntry value="/gecko"/>
+</listAttribute>
+<listAttribute key="org.eclipse.debug.core.MAPPED_RESOURCE_TYPES">
+<listEntry value="4"/>
+</listAttribute>
+<booleanAttribute key="org.eclipse.debug.ui.ATTR_LAUNCH_IN_BACKGROUND" value="false"/>
+<stringAttribute key="process_factory_id" value="org.eclipse.cdt.dsf.gdb.GdbProcessFactory"/>
+</launchConfiguration>
+"""
+
+
+EDITOR_SETTINGS = """eclipse.preferences.version=1
+lineNumberRuler=true
+overviewRuler_migration=migrated_3.1
+printMargin=true
+printMarginColumn=80
+showCarriageReturn=false
+showEnclosedSpaces=false
+showLeadingSpaces=false
+showLineFeed=false
+showWhitespaceCharacters=true
+spacesForTabs=true
+tabWidth=2
+undoHistorySize=200
+"""
+
+
+STATIC_CORE_RESOURCES_PREFS = """eclipse.preferences.version=1
+refresh.enabled=true
+"""
+
+STATIC_CORE_RUNTIME_PREFS = """eclipse.preferences.version=1
+content-types/org.eclipse.cdt.core.cxxSource/file-extensions=mm
+content-types/org.eclipse.core.runtime.xml/file-extensions=xul
+content-types/org.eclipse.wst.jsdt.core.jsSource/file-extensions=jsm
+"""
+
+STATIC_UI_PREFS = """eclipse.preferences.version=1
+showIntro=false
+"""
+
+STATIC_CDT_CORE_PREFS = """eclipse.preferences.version=1
+indexer.updatePolicy=0
+"""
+
+FORMATTER_SETTINGS = """org.eclipse.cdt.core.formatter.alignment_for_arguments_in_method_invocation=16
+org.eclipse.cdt.core.formatter.alignment_for_assignment=16
+org.eclipse.cdt.core.formatter.alignment_for_base_clause_in_type_declaration=80
+org.eclipse.cdt.core.formatter.alignment_for_binary_expression=16
+org.eclipse.cdt.core.formatter.alignment_for_compact_if=16
+org.eclipse.cdt.core.formatter.alignment_for_conditional_expression=34
+org.eclipse.cdt.core.formatter.alignment_for_conditional_expression_chain=18
+org.eclipse.cdt.core.formatter.alignment_for_constructor_initializer_list=48
+org.eclipse.cdt.core.formatter.alignment_for_declarator_list=16
+org.eclipse.cdt.core.formatter.alignment_for_enumerator_list=48
+org.eclipse.cdt.core.formatter.alignment_for_expression_list=0
+org.eclipse.cdt.core.formatter.alignment_for_expressions_in_array_initializer=16
+org.eclipse.cdt.core.formatter.alignment_for_member_access=0
+org.eclipse.cdt.core.formatter.alignment_for_overloaded_left_shift_chain=16
+org.eclipse.cdt.core.formatter.alignment_for_parameters_in_method_declaration=16
+org.eclipse.cdt.core.formatter.alignment_for_throws_clause_in_method_declaration=16
+org.eclipse.cdt.core.formatter.brace_position_for_array_initializer=end_of_line
+org.eclipse.cdt.core.formatter.brace_position_for_block=end_of_line
+org.eclipse.cdt.core.formatter.brace_position_for_block_in_case=next_line_shifted
+org.eclipse.cdt.core.formatter.brace_position_for_method_declaration=next_line
+org.eclipse.cdt.core.formatter.brace_position_for_namespace_declaration=end_of_line
+org.eclipse.cdt.core.formatter.brace_position_for_switch=end_of_line
+org.eclipse.cdt.core.formatter.brace_position_for_type_declaration=next_line
+org.eclipse.cdt.core.formatter.comment.min_distance_between_code_and_line_comment=1
+org.eclipse.cdt.core.formatter.comment.never_indent_line_comments_on_first_column=true
+org.eclipse.cdt.core.formatter.comment.preserve_white_space_between_code_and_line_comments=true
+org.eclipse.cdt.core.formatter.compact_else_if=true
+org.eclipse.cdt.core.formatter.continuation_indentation=2
+org.eclipse.cdt.core.formatter.continuation_indentation_for_array_initializer=2
+org.eclipse.cdt.core.formatter.format_guardian_clause_on_one_line=false
+org.eclipse.cdt.core.formatter.indent_access_specifier_compare_to_type_header=false
+org.eclipse.cdt.core.formatter.indent_access_specifier_extra_spaces=0
+org.eclipse.cdt.core.formatter.indent_body_declarations_compare_to_access_specifier=true
+org.eclipse.cdt.core.formatter.indent_body_declarations_compare_to_namespace_header=false
+org.eclipse.cdt.core.formatter.indent_breaks_compare_to_cases=true
+org.eclipse.cdt.core.formatter.indent_declaration_compare_to_template_header=true
+org.eclipse.cdt.core.formatter.indent_empty_lines=false
+org.eclipse.cdt.core.formatter.indent_statements_compare_to_block=true
+org.eclipse.cdt.core.formatter.indent_statements_compare_to_body=true
+org.eclipse.cdt.core.formatter.indent_switchstatements_compare_to_cases=true
+org.eclipse.cdt.core.formatter.indent_switchstatements_compare_to_switch=false
+org.eclipse.cdt.core.formatter.indentation.size=2
+org.eclipse.cdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer=do not insert
+org.eclipse.cdt.core.formatter.insert_new_line_after_template_declaration=insert
+org.eclipse.cdt.core.formatter.insert_new_line_at_end_of_file_if_missing=do not insert
+org.eclipse.cdt.core.formatter.insert_new_line_before_catch_in_try_statement=do not insert
+org.eclipse.cdt.core.formatter.insert_new_line_before_closing_brace_in_array_initializer=do not insert
+org.eclipse.cdt.core.formatter.insert_new_line_before_colon_in_constructor_initializer_list=do not insert
+org.eclipse.cdt.core.formatter.insert_new_line_before_else_in_if_statement=do not insert
+org.eclipse.cdt.core.formatter.insert_new_line_before_identifier_in_function_declaration=insert
+org.eclipse.cdt.core.formatter.insert_new_line_before_while_in_do_statement=do not insert
+org.eclipse.cdt.core.formatter.insert_new_line_in_empty_block=insert
+org.eclipse.cdt.core.formatter.insert_space_after_assignment_operator=insert
+org.eclipse.cdt.core.formatter.insert_space_after_binary_operator=insert
+org.eclipse.cdt.core.formatter.insert_space_after_closing_angle_bracket_in_template_arguments=insert
+org.eclipse.cdt.core.formatter.insert_space_after_closing_angle_bracket_in_template_parameters=insert
+org.eclipse.cdt.core.formatter.insert_space_after_closing_brace_in_block=insert
+org.eclipse.cdt.core.formatter.insert_space_after_closing_paren_in_cast=insert
+org.eclipse.cdt.core.formatter.insert_space_after_colon_in_base_clause=insert
+org.eclipse.cdt.core.formatter.insert_space_after_colon_in_case=insert
+org.eclipse.cdt.core.formatter.insert_space_after_colon_in_conditional=insert
+org.eclipse.cdt.core.formatter.insert_space_after_colon_in_labeled_statement=insert
+org.eclipse.cdt.core.formatter.insert_space_after_comma_in_array_initializer=insert
+org.eclipse.cdt.core.formatter.insert_space_after_comma_in_base_types=insert
+org.eclipse.cdt.core.formatter.insert_space_after_comma_in_declarator_list=insert
+org.eclipse.cdt.core.formatter.insert_space_after_comma_in_enum_declarations=insert
+org.eclipse.cdt.core.formatter.insert_space_after_comma_in_expression_list=insert
+org.eclipse.cdt.core.formatter.insert_space_after_comma_in_method_declaration_parameters=insert
+org.eclipse.cdt.core.formatter.insert_space_after_comma_in_method_declaration_throws=insert
+org.eclipse.cdt.core.formatter.insert_space_after_comma_in_method_invocation_arguments=insert
+org.eclipse.cdt.core.formatter.insert_space_after_comma_in_template_arguments=insert
+org.eclipse.cdt.core.formatter.insert_space_after_comma_in_template_parameters=insert
+org.eclipse.cdt.core.formatter.insert_space_after_opening_angle_bracket_in_template_arguments=do not insert
+org.eclipse.cdt.core.formatter.insert_space_after_opening_angle_bracket_in_template_parameters=do not insert
+org.eclipse.cdt.core.formatter.insert_space_after_opening_brace_in_array_initializer=insert
+org.eclipse.cdt.core.formatter.insert_space_after_opening_bracket=do not insert
+org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_cast=do not insert
+org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_catch=do not insert
+org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_exception_specification=do not insert
+org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_for=do not insert
+org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_if=do not insert
+org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_method_declaration=do not insert
+org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_method_invocation=do not insert
+org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression=do not insert
+org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_switch=do not insert
+org.eclipse.cdt.core.formatter.insert_space_after_opening_paren_in_while=do not insert
+org.eclipse.cdt.core.formatter.insert_space_after_postfix_operator=do not insert
+org.eclipse.cdt.core.formatter.insert_space_after_prefix_operator=do not insert
+org.eclipse.cdt.core.formatter.insert_space_after_question_in_conditional=insert
+org.eclipse.cdt.core.formatter.insert_space_after_semicolon_in_for=insert
+org.eclipse.cdt.core.formatter.insert_space_after_unary_operator=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_assignment_operator=insert
+org.eclipse.cdt.core.formatter.insert_space_before_binary_operator=insert
+org.eclipse.cdt.core.formatter.insert_space_before_closing_angle_bracket_in_template_arguments=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_closing_angle_bracket_in_template_parameters=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_closing_brace_in_array_initializer=insert
+org.eclipse.cdt.core.formatter.insert_space_before_closing_bracket=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_cast=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_catch=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_exception_specification=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_for=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_if=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_method_declaration=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_method_invocation=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_switch=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_closing_paren_in_while=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_colon_in_base_clause=insert
+org.eclipse.cdt.core.formatter.insert_space_before_colon_in_case=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_colon_in_conditional=insert
+org.eclipse.cdt.core.formatter.insert_space_before_colon_in_default=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_colon_in_labeled_statement=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_comma_in_array_initializer=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_comma_in_base_types=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_comma_in_declarator_list=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_comma_in_enum_declarations=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_comma_in_expression_list=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_comma_in_method_declaration_parameters=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_comma_in_method_declaration_throws=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_comma_in_method_invocation_arguments=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_comma_in_template_arguments=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_comma_in_template_parameters=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_angle_bracket_in_template_arguments=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_angle_bracket_in_template_parameters=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_brace_in_array_initializer=insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_brace_in_block=insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_brace_in_method_declaration=insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_brace_in_namespace_declaration=insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_brace_in_switch=insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_brace_in_type_declaration=insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_bracket=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_catch=insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_exception_specification=insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_for=insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_if=insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_method_declaration=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_method_invocation=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_parenthesized_expression=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_switch=insert
+org.eclipse.cdt.core.formatter.insert_space_before_opening_paren_in_while=insert
+org.eclipse.cdt.core.formatter.insert_space_before_postfix_operator=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_prefix_operator=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_question_in_conditional=insert
+org.eclipse.cdt.core.formatter.insert_space_before_semicolon=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_semicolon_in_for=do not insert
+org.eclipse.cdt.core.formatter.insert_space_before_unary_operator=do not insert
+org.eclipse.cdt.core.formatter.insert_space_between_empty_braces_in_array_initializer=do not insert
+org.eclipse.cdt.core.formatter.insert_space_between_empty_brackets=do not insert
+org.eclipse.cdt.core.formatter.insert_space_between_empty_parens_in_exception_specification=do not insert
+org.eclipse.cdt.core.formatter.insert_space_between_empty_parens_in_method_declaration=do not insert
+org.eclipse.cdt.core.formatter.insert_space_between_empty_parens_in_method_invocation=do not insert
+org.eclipse.cdt.core.formatter.join_wrapped_lines=false
+org.eclipse.cdt.core.formatter.keep_else_statement_on_same_line=false
+org.eclipse.cdt.core.formatter.keep_empty_array_initializer_on_one_line=false
+org.eclipse.cdt.core.formatter.keep_imple_if_on_one_line=false
+org.eclipse.cdt.core.formatter.keep_then_statement_on_same_line=false
+org.eclipse.cdt.core.formatter.lineSplit=80
+org.eclipse.cdt.core.formatter.number_of_empty_lines_to_preserve=1
+org.eclipse.cdt.core.formatter.put_empty_statement_on_new_line=true
+org.eclipse.cdt.core.formatter.tabulation.char=space
+org.eclipse.cdt.core.formatter.tabulation.size=2
+org.eclipse.cdt.core.formatter.use_tabs_only_for_leading_indentations=false
+"""
+
+STATIC_CDT_UI_PREFS = """eclipse.preferences.version=1
+buildConsoleLines=10000
+Console.limitConsoleOutput=false
+ensureNewlineAtEOF=false
+formatter_profile=_Mozilla
+formatter_settings_version=1
+org.eclipse.cdt.ui.formatterprofiles.version=1
+removeTrailingWhitespace=true
+removeTrailingWhitespaceEditedLines=true
+scalability.numberOfLines=15000
+markOccurrences=true
+markOverloadedOperatorsOccurrences=true
+stickyOccurrences=false
+"""
+
+NOINDEX_TEMPLATE = """eclipse.preferences.version=1
+indexer/indexerId=org.eclipse.cdt.core.nullIndexer
+"""
diff --git a/python/mozbuild/mozbuild/backend/fastermake.py b/python/mozbuild/mozbuild/backend/fastermake.py
new file mode 100644
index 0000000000..324db29866
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/fastermake.py
@@ -0,0 +1,300 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from operator import itemgetter
+
+import mozpack.path as mozpath
+import six
+from mozpack.manifests import InstallManifest
+
+from mozbuild.backend.base import PartialBackend
+from mozbuild.backend.make import MakeBackend
+from mozbuild.frontend.context import ObjDirPath, Path
+from mozbuild.frontend.data import (
+ ChromeManifestEntry,
+ FinalTargetFiles,
+ FinalTargetPreprocessedFiles,
+ GeneratedFile,
+ JARManifest,
+ LocalizedFiles,
+ LocalizedPreprocessedFiles,
+ XPIDLModule,
+)
+from mozbuild.makeutil import Makefile
+from mozbuild.util import OrderedDefaultDict
+
+
+class FasterMakeBackend(MakeBackend, PartialBackend):
+ def _init(self):
+ super(FasterMakeBackend, self)._init()
+
+ self._manifest_entries = OrderedDefaultDict(set)
+
+ self._install_manifests = OrderedDefaultDict(InstallManifest)
+
+ self._dependencies = OrderedDefaultDict(list)
+ self._l10n_dependencies = OrderedDefaultDict(list)
+
+ self._has_xpidl = False
+
+ self._generated_files_map = {}
+ self._generated_files = []
+
+ def _add_preprocess(self, obj, path, dest, target=None, **kwargs):
+ if target is None:
+ target = mozpath.basename(path)
+ # This matches what PP_TARGETS do in config/rules.
+ if target.endswith(".in"):
+ target = target[:-3]
+ if target.endswith(".css"):
+ kwargs["marker"] = "%"
+ depfile = mozpath.join(
+ self.environment.topobjdir,
+ "faster",
+ ".deps",
+ mozpath.join(obj.install_target, dest, target).replace("/", "_"),
+ )
+ self._install_manifests[obj.install_target].add_preprocess(
+ mozpath.join(obj.srcdir, path),
+ mozpath.join(dest, target),
+ depfile,
+ **kwargs
+ )
+
+ def consume_object(self, obj):
+ if isinstance(obj, JARManifest) and obj.install_target.startswith("dist/bin"):
+ self._consume_jar_manifest(obj)
+
+ elif isinstance(
+ obj, (FinalTargetFiles, FinalTargetPreprocessedFiles)
+ ) and obj.install_target.startswith("dist/bin"):
+ ab_cd = self.environment.substs["MOZ_UI_LOCALE"][0]
+ localized = isinstance(obj, (LocalizedFiles, LocalizedPreprocessedFiles))
+ defines = obj.defines or {}
+ if defines:
+ defines = defines.defines
+ for path, files in obj.files.walk():
+ for f in files:
+ # For localized files we need to find the file from the locale directory.
+ if localized and not isinstance(f, ObjDirPath) and ab_cd != "en-US":
+ src = self.localized_path(obj.relsrcdir, f)
+
+ dep_target = "install-%s" % obj.install_target
+
+ if "*" not in src:
+ merge = mozpath.abspath(
+ mozpath.join(
+ self.environment.topobjdir,
+ "l10n_merge",
+ obj.relsrcdir,
+ f,
+ )
+ )
+ self._l10n_dependencies[dep_target].append(
+ (merge, f.full_path, src)
+ )
+ src = merge
+ else:
+ src = f.full_path
+
+ if isinstance(obj, FinalTargetPreprocessedFiles):
+ self._add_preprocess(
+ obj, src, path, target=f.target_basename, defines=defines
+ )
+ elif "*" in f:
+
+ def _prefix(s):
+ for p in mozpath.split(s):
+ if "*" not in p:
+ yield p + "/"
+
+ prefix = "".join(_prefix(src))
+
+ if "*" in f.target_basename:
+ target = path
+ else:
+ target = mozpath.join(path, f.target_basename)
+ self._install_manifests[obj.install_target].add_pattern_link(
+ prefix, src[len(prefix) :], target
+ )
+ else:
+ self._install_manifests[obj.install_target].add_link(
+ src, mozpath.join(path, f.target_basename)
+ )
+ if isinstance(f, ObjDirPath):
+ dep_target = "install-%s" % obj.install_target
+ dep = mozpath.relpath(f.full_path, self.environment.topobjdir)
+ if dep in self._generated_files_map:
+ # Only the first output file is specified as a
+ # dependency. If there are multiple output files
+ # from a single GENERATED_FILES invocation that are
+ # installed, we only want to run the command once.
+ dep = self._generated_files_map[dep]
+ self._dependencies[dep_target].append(dep)
+
+ elif isinstance(obj, ChromeManifestEntry) and obj.install_target.startswith(
+ "dist/bin"
+ ):
+ top_level = mozpath.join(obj.install_target, "chrome.manifest")
+ if obj.path != top_level:
+ entry = "manifest %s" % mozpath.relpath(obj.path, obj.install_target)
+ self._manifest_entries[top_level].add(entry)
+ self._manifest_entries[obj.path].add(str(obj.entry))
+
+ elif isinstance(obj, GeneratedFile):
+ if obj.outputs:
+ first_output = mozpath.relpath(
+ mozpath.join(obj.objdir, obj.outputs[0]), self.environment.topobjdir
+ )
+ for o in obj.outputs[1:]:
+ fullpath = mozpath.join(obj.objdir, o)
+ self._generated_files_map[
+ mozpath.relpath(fullpath, self.environment.topobjdir)
+ ] = first_output
+ self._generated_files.append(obj)
+ return False
+
+ elif isinstance(obj, XPIDLModule):
+ self._has_xpidl = True
+ # We're not actually handling XPIDL files.
+ return False
+
+ else:
+ return False
+
+ return True
+
+ def consume_finished(self):
+ mk = Makefile()
+ # Add the default rule at the very beginning.
+ mk.create_rule(["default"])
+ mk.add_statement("TOPSRCDIR = %s" % self.environment.topsrcdir)
+ mk.add_statement("TOPOBJDIR = %s" % self.environment.topobjdir)
+ mk.add_statement("MDDEPDIR = .deps")
+ mk.add_statement("TOUCH ?= touch")
+ mk.add_statement("include $(TOPSRCDIR)/config/makefiles/functions.mk")
+ mk.add_statement("include $(TOPSRCDIR)/config/AB_rCD.mk")
+ mk.add_statement("AB_CD = en-US")
+ if not self._has_xpidl:
+ mk.add_statement("NO_XPIDL = 1")
+
+ # Add a few necessary variables inherited from configure
+ for var in (
+ "PYTHON3",
+ "ACDEFINES",
+ "MOZ_BUILD_APP",
+ "MOZ_WIDGET_TOOLKIT",
+ ):
+ value = self.environment.substs.get(var)
+ if value is not None:
+ mk.add_statement("%s = %s" % (var, value))
+
+ install_manifests_bases = self._install_manifests.keys()
+
+ # Add information for chrome manifest generation
+ manifest_targets = []
+
+ for target, entries in six.iteritems(self._manifest_entries):
+ manifest_targets.append(target)
+ install_target = mozpath.basedir(target, install_manifests_bases)
+ self._install_manifests[install_target].add_content(
+ "".join("%s\n" % e for e in sorted(entries)),
+ mozpath.relpath(target, install_target),
+ )
+
+ # Add information for install manifests.
+ mk.add_statement(
+ "INSTALL_MANIFESTS = %s" % " ".join(sorted(self._install_manifests.keys()))
+ )
+
+ # Add dependencies we inferred:
+ for target, deps in sorted(six.iteritems(self._dependencies)):
+ mk.create_rule([target]).add_dependencies(
+ "$(TOPOBJDIR)/%s" % d for d in sorted(deps)
+ )
+
+ # This is not great, but it's better to have some dependencies on these Python files.
+ python_deps = [
+ "$(TOPSRCDIR)/python/mozbuild/mozbuild/action/l10n_merge.py",
+ "$(TOPSRCDIR)/third_party/python/compare-locales/compare_locales/compare.py",
+ "$(TOPSRCDIR)/third_party/python/compare-locales/compare_locales/paths.py",
+ ]
+ # Add l10n dependencies we inferred:
+ for target, deps in sorted(six.iteritems(self._l10n_dependencies)):
+ mk.create_rule([target]).add_dependencies(
+ "%s" % d[0] for d in sorted(deps, key=itemgetter(0))
+ )
+ for (merge, ref_file, l10n_file) in deps:
+ rule = mk.create_rule([merge]).add_dependencies(
+ [ref_file, l10n_file] + python_deps
+ )
+ rule.add_commands(
+ [
+ "$(PYTHON3) -m mozbuild.action.l10n_merge "
+ "--output {} --ref-file {} --l10n-file {}".format(
+ merge, ref_file, l10n_file
+ )
+ ]
+ )
+ # Add a dummy rule for the l10n file since it might not exist.
+ mk.create_rule([l10n_file])
+
+ mk.add_statement("include $(TOPSRCDIR)/config/faster/rules.mk")
+
+ for base, install_manifest in six.iteritems(self._install_manifests):
+ with self._write_file(
+ mozpath.join(
+ self.environment.topobjdir,
+ "faster",
+ "install_%s" % base.replace("/", "_"),
+ )
+ ) as fh:
+ install_manifest.write(fileobj=fh)
+
+ # Write a single unified manifest for consumption by |mach watch|.
+ # Since this doesn't start 'install_', it's not processed by the build.
+ unified_manifest = InstallManifest()
+ for base, install_manifest in six.iteritems(self._install_manifests):
+ # Expect 'dist/bin/**', which includes 'dist/bin' with no trailing slash.
+ assert base.startswith("dist/bin")
+ base = base[len("dist/bin") :]
+ if base and base[0] == "/":
+ base = base[1:]
+ unified_manifest.add_entries_from(install_manifest, base=base)
+
+ with self._write_file(
+ mozpath.join(
+ self.environment.topobjdir, "faster", "unified_install_dist_bin"
+ )
+ ) as fh:
+ unified_manifest.write(fileobj=fh)
+
+ for obj in self._generated_files:
+ for stmt in self._format_statements_for_generated_file(obj, "default"):
+ mk.add_statement(stmt)
+
+ with self._write_file(
+ mozpath.join(self.environment.topobjdir, "faster", "Makefile")
+ ) as fh:
+ mk.dump(fh, removal_guard=False)
+
+ def _pretty_path(self, path, obj):
+ if path.startswith(self.environment.topobjdir):
+ return mozpath.join(
+ "$(TOPOBJDIR)", mozpath.relpath(path, self.environment.topobjdir)
+ )
+ elif path.startswith(self.environment.topsrcdir):
+ return mozpath.join(
+ "$(TOPSRCDIR)", mozpath.relpath(path, self.environment.topsrcdir)
+ )
+ else:
+ return path
+
+ def _format_generated_file_input_name(self, path, obj):
+ return self._pretty_path(path.full_path, obj)
+
+ def _format_generated_file_output_name(self, path, obj):
+ if not isinstance(path, Path):
+ path = ObjDirPath(obj._context, "!" + path)
+ return self._pretty_path(path.full_path, obj)
diff --git a/python/mozbuild/mozbuild/backend/mach_commands.py b/python/mozbuild/mozbuild/backend/mach_commands.py
new file mode 100644
index 0000000000..1b83ebc826
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/mach_commands.py
@@ -0,0 +1,420 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import argparse
+import logging
+import os
+import subprocess
+import sys
+
+import mozpack.path as mozpath
+from mach.decorators import Command, CommandArgument
+from mozfile import which
+
+from mozbuild import build_commands
+
+
+@Command(
+ "ide",
+ category="devenv",
+ description="Generate a project and launch an IDE.",
+ virtualenv_name="build",
+)
+@CommandArgument("ide", choices=["eclipse", "visualstudio", "vscode"])
+@CommandArgument(
+ "--no-interactive",
+ default=False,
+ action="store_true",
+ help="Just generate the configuration",
+)
+@CommandArgument("args", nargs=argparse.REMAINDER)
+def run(command_context, ide, no_interactive, args):
+ interactive = not no_interactive
+
+ if ide == "eclipse":
+ backend = "CppEclipse"
+ elif ide == "visualstudio":
+ backend = "VisualStudio"
+ elif ide == "vscode":
+ backend = "Clangd"
+
+ if ide == "eclipse" and not which("eclipse"):
+ command_context.log(
+ logging.ERROR,
+ "ide",
+ {},
+ "Eclipse CDT 8.4 or later must be installed in your PATH.",
+ )
+ command_context.log(
+ logging.ERROR,
+ "ide",
+ {},
+ "Download: http://www.eclipse.org/cdt/downloads.php",
+ )
+ return 1
+
+ if ide == "vscode":
+ rc = build_commands.configure(command_context)
+
+ if rc != 0:
+ return rc
+
+ # First install what we can through install manifests.
+ rc = command_context._run_make(
+ directory=command_context.topobjdir,
+ target="pre-export",
+ line_handler=None,
+ )
+ if rc != 0:
+ return rc
+
+ # Then build the rest of the build dependencies by running the full
+ # export target, because we can't do anything better.
+ for target in ("export", "pre-compile"):
+ rc = command_context._run_make(
+ directory=command_context.topobjdir,
+ target=target,
+ line_handler=None,
+ )
+ if rc != 0:
+ return rc
+ else:
+ # Here we refresh the whole build. 'build export' is sufficient here and is
+ # probably more correct but it's also nice having a single target to get a fully
+ # built and indexed project (gives a easy target to use before go out to lunch).
+ res = command_context._mach_context.commands.dispatch(
+ "build", command_context._mach_context
+ )
+ if res != 0:
+ return 1
+
+ # Generate or refresh the IDE backend.
+ python = command_context.virtualenv_manager.python_path
+ config_status = os.path.join(command_context.topobjdir, "config.status")
+ args = [python, config_status, "--backend=%s" % backend]
+ res = command_context._run_command_in_objdir(
+ args=args, pass_thru=True, ensure_exit_code=False
+ )
+ if res != 0:
+ return 1
+
+ if ide == "eclipse":
+ eclipse_workspace_dir = get_eclipse_workspace_path(command_context)
+ subprocess.check_call(["eclipse", "-data", eclipse_workspace_dir])
+ elif ide == "visualstudio":
+ visual_studio_workspace_dir = get_visualstudio_workspace_path(command_context)
+ subprocess.call(["explorer.exe", visual_studio_workspace_dir])
+ elif ide == "vscode":
+ return setup_vscode(command_context, interactive)
+
+
+def get_eclipse_workspace_path(command_context):
+ from mozbuild.backend.cpp_eclipse import CppEclipseBackend
+
+ return CppEclipseBackend.get_workspace_path(
+ command_context.topsrcdir, command_context.topobjdir
+ )
+
+
+def get_visualstudio_workspace_path(command_context):
+ return os.path.normpath(
+ os.path.join(command_context.topobjdir, "msvc", "mozilla.sln")
+ )
+
+
+def setup_vscode(command_context, interactive):
+ from mozbuild.backend.clangd import find_vscode_cmd
+
+ # Check if platform has VSCode installed
+ if interactive:
+ vscode_cmd = find_vscode_cmd()
+ if vscode_cmd is None:
+ choice = prompt_bool(
+ "VSCode cannot be found, and may not be installed. Proceed?"
+ )
+ if not choice:
+ return 1
+
+ vscode_settings = mozpath.join(
+ command_context.topsrcdir, ".vscode", "settings.json"
+ )
+
+ new_settings = {}
+ artifact_prefix = ""
+ if command_context.config_environment.is_artifact_build:
+ artifact_prefix = (
+ "\nArtifact build configured: Skipping clang and rust setup. "
+ "If you later switch to a full build, please re-run this command."
+ )
+ else:
+ new_settings = setup_clangd_rust_in_vscode(command_context)
+
+ # Add file associations.
+ new_settings = {
+ **new_settings,
+ "files.associations": {
+ "*.jsm": "javascript",
+ "*.sjs": "javascript",
+ },
+ # Note, the top-level editor settings are left as default to allow the
+ # user's defaults (if any) to take effect.
+ "[javascript][javascriptreact][typescript][typescriptreact][json][html]": {
+ "editor.defaultFormatter": "esbenp.prettier-vscode",
+ "editor.formatOnSave": True,
+ },
+ }
+
+ import difflib
+ import json
+
+ # Load the existing .vscode/settings.json file, to check if if needs to
+ # be created or updated.
+ try:
+ with open(vscode_settings) as fh:
+ old_settings_str = fh.read()
+ except FileNotFoundError:
+ print(
+ "Configuration for {} will be created.{}".format(
+ vscode_settings, artifact_prefix
+ )
+ )
+ old_settings_str = None
+
+ if old_settings_str is None:
+ # No old settings exist
+ with open(vscode_settings, "w") as fh:
+ json.dump(new_settings, fh, indent=4)
+ else:
+ # Merge our new settings with the existing settings, and check if we
+ # need to make changes. Only prompt & write out the updated config
+ # file if settings actually changed.
+ try:
+ old_settings = json.loads(old_settings_str)
+ prompt_prefix = ""
+ except ValueError:
+ old_settings = {}
+ prompt_prefix = (
+ "\n**WARNING**: Parsing of existing settings file failed. "
+ "Existing settings will be lost!"
+ )
+
+ # If we've got an old section with the formatting configuration, remove it
+ # so that we effectively "upgrade" the user to include json from the new
+ # settings. The user is presented with the diffs so should spot any issues.
+ if "[javascript][javascriptreact][typescript][typescriptreact]" in old_settings:
+ old_settings.pop(
+ "[javascript][javascriptreact][typescript][typescriptreact]"
+ )
+ if (
+ "[javascript][javascriptreact][typescript][typescriptreact][json]"
+ in old_settings
+ ):
+ old_settings.pop(
+ "[javascript][javascriptreact][typescript][typescriptreact][json]"
+ )
+
+ settings = {**old_settings, **new_settings}
+
+ if old_settings != settings:
+ # Prompt the user with a diff of the changes we're going to make
+ new_settings_str = json.dumps(settings, indent=4)
+ if interactive:
+ print(
+ "\nThe following modifications to {settings} will occur:\n{diff}".format(
+ settings=vscode_settings,
+ diff="".join(
+ difflib.unified_diff(
+ old_settings_str.splitlines(keepends=True),
+ new_settings_str.splitlines(keepends=True),
+ "a/.vscode/settings.json",
+ "b/.vscode/settings.json",
+ n=30,
+ )
+ ),
+ )
+ )
+ choice = prompt_bool(
+ "{}{}\nProceed with modifications to {}?".format(
+ artifact_prefix, prompt_prefix, vscode_settings
+ )
+ )
+ if not choice:
+ return 1
+
+ with open(vscode_settings, "w") as fh:
+ fh.write(new_settings_str)
+
+ if not interactive:
+ return 0
+
+ # Open vscode with new configuration, or ask the user to do so if the
+ # binary was not found.
+ if vscode_cmd is None:
+ print(
+ "Please open VS Code manually and load directory: {}".format(
+ command_context.topsrcdir
+ )
+ )
+ return 0
+
+ rc = subprocess.call(vscode_cmd + [command_context.topsrcdir])
+
+ if rc != 0:
+ command_context.log(
+ logging.ERROR,
+ "ide",
+ {},
+ "Unable to open VS Code. Please open VS Code manually and load "
+ "directory: {}".format(command_context.topsrcdir),
+ )
+ return rc
+
+ return 0
+
+
+def setup_clangd_rust_in_vscode(command_context):
+ clangd_cc_path = mozpath.join(command_context.topobjdir, "clangd")
+
+ # Verify if the required files are present
+ clang_tools_path = mozpath.join(
+ command_context._mach_context.state_dir, "clang-tools"
+ )
+ clang_tidy_bin = mozpath.join(clang_tools_path, "clang-tidy", "bin")
+
+ clangd_path = mozpath.join(
+ clang_tidy_bin,
+ "clangd" + command_context.config_environment.substs.get("BIN_SUFFIX", ""),
+ )
+
+ if not os.path.exists(clangd_path):
+ command_context.log(
+ logging.ERROR,
+ "ide",
+ {},
+ "Unable to locate clangd in {}.".format(clang_tidy_bin),
+ )
+ rc = get_clang_tools(command_context, clang_tools_path)
+
+ if rc != 0:
+ return rc
+
+ import multiprocessing
+
+ from mozbuild.code_analysis.utils import ClangTidyConfig
+
+ clang_tidy_cfg = ClangTidyConfig(command_context.topsrcdir)
+
+ if sys.platform == "win32":
+ cargo_check_command = [sys.executable, "mach"]
+ else:
+ cargo_check_command = ["./mach"]
+
+ cargo_check_command += [
+ "--log-no-times",
+ "cargo",
+ "check",
+ "-j",
+ str(multiprocessing.cpu_count() // 2),
+ "--all-crates",
+ "--message-format-json",
+ ]
+
+ clang_tidy = {}
+ clang_tidy["Checks"] = ",".join(clang_tidy_cfg.checks)
+ clang_tidy.update(clang_tidy_cfg.checks_config)
+
+ # Write .clang-tidy yml
+ import yaml
+
+ with open(".clang-tidy", "w") as file:
+ yaml.dump(clang_tidy, file)
+
+ clangd_cfg = {
+ "CompileFlags": {
+ "CompilationDatabase": clangd_cc_path,
+ }
+ }
+
+ with open(".clangd", "w") as file:
+ yaml.dump(clangd_cfg, file)
+
+ return {
+ "clangd.path": clangd_path,
+ "clangd.arguments": [
+ "-j",
+ str(multiprocessing.cpu_count() // 2),
+ "--limit-results",
+ "0",
+ "--completion-style",
+ "detailed",
+ "--background-index",
+ "--all-scopes-completion",
+ "--log",
+ "info",
+ "--pch-storage",
+ "disk",
+ "--clang-tidy",
+ ],
+ "rust-analyzer.server.extraEnv": {
+ # Point rust-analyzer at the real target directory used by our
+ # build, so it can discover the files created when we run `./mach
+ # cargo check`.
+ "CARGO_TARGET_DIR": command_context.topobjdir,
+ },
+ "rust-analyzer.cargo.buildScripts.overrideCommand": cargo_check_command,
+ "rust-analyzer.check.overrideCommand": cargo_check_command,
+ }
+
+
+def get_clang_tools(command_context, clang_tools_path):
+ import shutil
+
+ if os.path.isdir(clang_tools_path):
+ shutil.rmtree(clang_tools_path)
+
+ # Create base directory where we store clang binary
+ os.mkdir(clang_tools_path)
+
+ from mozbuild.artifact_commands import artifact_toolchain
+
+ job, _ = command_context.platform
+
+ if job is None:
+ command_context.log(
+ logging.ERROR,
+ "ide",
+ {},
+ "The current platform isn't supported. "
+ "Currently only the following platforms are "
+ "supported: win32/win64, linux64 and macosx64.",
+ )
+ return 1
+
+ job += "-clang-tidy"
+
+ # We want to unpack data in the clang-tidy mozbuild folder
+ currentWorkingDir = os.getcwd()
+ os.chdir(clang_tools_path)
+ rc = artifact_toolchain(
+ command_context, verbose=False, from_build=[job], no_unpack=False, retry=0
+ )
+ # Change back the cwd
+ os.chdir(currentWorkingDir)
+
+ return rc
+
+
+def prompt_bool(prompt, limit=5):
+ """Prompts the user with prompt and requires a boolean value."""
+ from distutils.util import strtobool
+
+ for _ in range(limit):
+ try:
+ return strtobool(input(prompt + " [Y/N]\n"))
+ except ValueError:
+ print(
+ "ERROR! Please enter a valid option! Please use any of the following:"
+ " Y, N, True, False, 1, 0"
+ )
+ return False
diff --git a/python/mozbuild/mozbuild/backend/make.py b/python/mozbuild/mozbuild/backend/make.py
new file mode 100644
index 0000000000..90b37e6758
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/make.py
@@ -0,0 +1,139 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import mozpack.path as mozpath
+
+from mozbuild.frontend.data import GeneratedFile
+from mozbuild.shellutil import quote as shell_quote
+
+from .common import CommonBackend
+
+
+class MakeBackend(CommonBackend):
+ """Class encapsulating logic for backends that use Make."""
+
+ def _init(self):
+ CommonBackend._init(self)
+
+ def _format_statements_for_generated_file(self, obj, tier, extra_dependencies=""):
+ """Return the list of statements to write to the Makefile for this
+ GeneratedFile.
+
+ This function will invoke _format_generated_file_input_name and
+ _format_generated_file_output_name to munge the input/output filenames
+ before sending them to the output.
+ """
+ assert isinstance(obj, GeneratedFile)
+
+ # Localized generated files can use {AB_CD} and {AB_rCD} in their
+ # output paths.
+ if obj.localized:
+ substs = {"AB_CD": "$(AB_CD)", "AB_rCD": "$(AB_rCD)"}
+ else:
+ substs = {}
+
+ outputs = []
+ needs_AB_rCD = False
+ for o in obj.outputs:
+ needs_AB_rCD = needs_AB_rCD or ("AB_rCD" in o)
+ try:
+ outputs.append(
+ self._format_generated_file_output_name(o.format(**substs), obj)
+ )
+ except KeyError as e:
+ raise ValueError(
+ "%s not in %s is not a valid substitution in %s"
+ % (e.args[0], ", ".join(sorted(substs.keys())), o)
+ )
+
+ first_output = outputs[0]
+ dep_file = mozpath.join(
+ mozpath.dirname(first_output),
+ "$(MDDEPDIR)",
+ "%s.pp" % mozpath.basename(first_output),
+ )
+ # The stub target file needs to go in MDDEPDIR so that it doesn't
+ # get written into generated Android resource directories, breaking
+ # Gradle tooling and/or polluting the Android packages.
+ stub_file = mozpath.join(
+ mozpath.dirname(first_output),
+ "$(MDDEPDIR)",
+ "%s.stub" % mozpath.basename(first_output),
+ )
+
+ if obj.inputs:
+ inputs = [
+ self._format_generated_file_input_name(f, obj) for f in obj.inputs
+ ]
+ else:
+ inputs = []
+
+ force = ""
+ if obj.force:
+ force = " FORCE"
+ elif obj.localized:
+ force = " $(if $(IS_LANGUAGE_REPACK),FORCE)"
+
+ ret = []
+
+ if obj.script:
+ # If we are doing an artifact build, we don't run compiler, so
+ # we can skip generated files that are needed during compile,
+ # or let the rule run as the result of something depending on
+ # it.
+ if (
+ not (obj.required_before_compile or obj.required_during_compile)
+ or not self.environment.is_artifact_build
+ ):
+ if tier and not needs_AB_rCD:
+ # Android localized resources have special Makefile
+ # handling.
+
+ # Double-colon tiers via a variable that the backend adds as a dependency
+ # later. See https://bugzilla.mozilla.org/show_bug.cgi?id=1645986#c0 as
+ # to why.
+ if tier in ("export", "pre-compile", "libs", "misc"):
+ dep = "%s_TARGETS" % tier.replace("-", "_").upper()
+ ret.append("%s += %s" % (dep, stub_file))
+ else:
+ ret.append("%s: %s" % (tier, stub_file))
+ for output in outputs:
+ ret.append("%s: %s ;" % (output, stub_file))
+ ret.append("EXTRA_MDDEPEND_FILES += %s" % dep_file)
+
+ ret.append(
+ (
+ """{stub}: {script}{inputs}{backend}{force}
+\t$(REPORT_BUILD)
+\t$(call py_action,file_generate,{locale}{script} """ # wrap for E501
+ """{method} {output} {dep_file} {stub}{inputs}{flags})
+\t@$(TOUCH) $@
+"""
+ ).format(
+ stub=stub_file,
+ output=first_output,
+ dep_file=dep_file,
+ inputs=" " + " ".join(inputs) if inputs else "",
+ flags=" " + " ".join(shell_quote(f) for f in obj.flags)
+ if obj.flags
+ else "",
+ backend=" " + extra_dependencies if extra_dependencies else "",
+ # Locale repacks repack multiple locales from a single configured objdir,
+ # so standard mtime dependencies won't work properly when the build is re-run
+ # with a different locale as input. IS_LANGUAGE_REPACK will reliably be set
+ # in this situation, so simply force the generation to run in that case.
+ force=force,
+ locale="--locale=$(AB_CD) " if obj.localized else "",
+ script=obj.script,
+ method=obj.method,
+ )
+ )
+
+ return ret
+
+ def _format_generated_file_input_name(self, path, obj):
+ raise NotImplementedError("Subclass must implement")
+
+ def _format_generated_file_output_name(self, path, obj):
+ raise NotImplementedError("Subclass must implement")
diff --git a/python/mozbuild/mozbuild/backend/recursivemake.py b/python/mozbuild/mozbuild/backend/recursivemake.py
new file mode 100644
index 0000000000..d92864d081
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/recursivemake.py
@@ -0,0 +1,1904 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import io
+import logging
+import os
+import re
+from collections import defaultdict, namedtuple
+from itertools import chain
+from operator import itemgetter
+
+import mozpack.path as mozpath
+import six
+from mozpack.manifests import InstallManifest
+from six import StringIO
+
+from mozbuild import frontend
+from mozbuild.frontend.context import (
+ AbsolutePath,
+ ObjDirPath,
+ Path,
+ RenamedSourcePath,
+ SourcePath,
+)
+from mozbuild.shellutil import quote as shell_quote
+
+from ..frontend.data import (
+ BaseLibrary,
+ BaseProgram,
+ BaseRustLibrary,
+ ChromeManifestEntry,
+ ComputedFlags,
+ ConfigFileSubstitution,
+ ContextDerived,
+ Defines,
+ DirectoryTraversal,
+ ExternalLibrary,
+ FinalTargetFiles,
+ FinalTargetPreprocessedFiles,
+ GeneratedFile,
+ HostDefines,
+ HostLibrary,
+ HostProgram,
+ HostRustProgram,
+ HostSharedLibrary,
+ HostSimpleProgram,
+ HostSources,
+ InstallationTarget,
+ JARManifest,
+ Linkable,
+ LocalInclude,
+ LocalizedFiles,
+ LocalizedPreprocessedFiles,
+ ObjdirFiles,
+ ObjdirPreprocessedFiles,
+ PerSourceFlag,
+ Program,
+ RustProgram,
+ RustTests,
+ SandboxedWasmLibrary,
+ SharedLibrary,
+ SimpleProgram,
+ Sources,
+ StaticLibrary,
+ TestManifest,
+ VariablePassthru,
+ WasmSources,
+ XPIDLModule,
+)
+from ..makeutil import Makefile
+from ..util import FileAvoidWrite, OrderedDefaultDict, ensureParentDir, pairwise
+from .common import CommonBackend
+from .make import MakeBackend
+
+# To protect against accidentally adding logic to Makefiles that belong in moz.build,
+# we check if moz.build-like variables are defined in Makefiles. If they are, we throw
+# an error to encourage the usage of moz.build instead.
+_MOZBUILD_ONLY_VARIABLES = set(frontend.context.VARIABLES.keys()) - {
+ # The migration to moz.build from Makefiles still isn't complete, and there's still
+ # some straggling Makefile logic that uses variables that only moz.build should
+ # use.
+ # These remaining variables are excluded from our blacklist. As the variables here
+ # are migrated from Makefiles in the future, they should be removed from this
+ # "override" list.
+ "XPI_NAME",
+ "USE_EXTENSION_MANIFEST",
+ "CFLAGS",
+ "CXXFLAGS",
+}
+
+DEPRECATED_VARIABLES = [
+ "ALLOW_COMPILER_WARNINGS",
+ "EXPORT_LIBRARY",
+ "EXTRA_LIBS",
+ "FAIL_ON_WARNINGS",
+ "HOST_LIBS",
+ "LIBXUL_LIBRARY",
+ "MOCHITEST_A11Y_FILES",
+ "MOCHITEST_BROWSER_FILES",
+ "MOCHITEST_BROWSER_FILES_PARTS",
+ "MOCHITEST_CHROME_FILES",
+ "MOCHITEST_FILES",
+ "MOCHITEST_FILES_PARTS",
+ "MOCHITEST_METRO_FILES",
+ "MOCHITEST_ROBOCOP_FILES",
+ "MODULE_OPTIMIZE_FLAGS",
+ "MOZ_CHROME_FILE_FORMAT",
+ "SHORT_LIBNAME",
+ "TESTING_JS_MODULES",
+ "TESTING_JS_MODULE_DIR",
+]
+
+MOZBUILD_VARIABLES_MESSAGE = "It should only be defined in moz.build files."
+
+DEPRECATED_VARIABLES_MESSAGE = (
+ "This variable has been deprecated. It does nothing. It must be removed "
+ "in order to build."
+)
+
+
+def make_quote(s):
+ return s.replace("#", "\#").replace("$", "$$")
+
+
+class BackendMakeFile(object):
+ """Represents a generated backend.mk file.
+
+ This is both a wrapper around a file handle as well as a container that
+ holds accumulated state.
+
+ It's worth taking a moment to explain the make dependencies. The
+ generated backend.mk as well as the Makefile.in (if it exists) are in the
+ GLOBAL_DEPS list. This means that if one of them changes, all targets
+ in that Makefile are invalidated. backend.mk also depends on all of its
+ input files.
+
+ It's worth considering the effect of file mtimes on build behavior.
+
+ Since we perform an "all or none" traversal of moz.build files (the whole
+ tree is scanned as opposed to individual files), if we were to blindly
+ write backend.mk files, the net effect of updating a single mozbuild file
+ in the tree is all backend.mk files have new mtimes. This would in turn
+ invalidate all make targets across the whole tree! This would effectively
+ undermine incremental builds as any mozbuild change would cause the entire
+ tree to rebuild!
+
+ The solution is to not update the mtimes of backend.mk files unless they
+ actually change. We use FileAvoidWrite to accomplish this.
+ """
+
+ def __init__(self, srcdir, objdir, environment, topsrcdir, topobjdir, dry_run):
+ self.topsrcdir = topsrcdir
+ self.srcdir = srcdir
+ self.objdir = objdir
+ self.relobjdir = mozpath.relpath(objdir, topobjdir)
+ self.environment = environment
+ self.name = mozpath.join(objdir, "backend.mk")
+
+ self.xpt_name = None
+
+ self.fh = FileAvoidWrite(self.name, capture_diff=True, dry_run=dry_run)
+ self.fh.write("# THIS FILE WAS AUTOMATICALLY GENERATED. DO NOT EDIT.\n")
+ self.fh.write("\n")
+
+ def write(self, buf):
+ self.fh.write(buf)
+
+ def write_once(self, buf):
+ buf = six.ensure_text(buf)
+ if "\n" + buf not in six.ensure_text(self.fh.getvalue()):
+ self.write(buf)
+
+ # For compatibility with makeutil.Makefile
+ def add_statement(self, stmt):
+ self.write("%s\n" % stmt)
+
+ def close(self):
+ if self.xpt_name:
+ # We just recompile all xpidls because it's easier and less error
+ # prone.
+ self.fh.write("NONRECURSIVE_TARGETS += export\n")
+ self.fh.write("NONRECURSIVE_TARGETS_export += xpidl\n")
+ self.fh.write(
+ "NONRECURSIVE_TARGETS_export_xpidl_DIRECTORY = "
+ "$(DEPTH)/xpcom/xpidl\n"
+ )
+ self.fh.write("NONRECURSIVE_TARGETS_export_xpidl_TARGETS += " "export\n")
+
+ return self.fh.close()
+
+ @property
+ def diff(self):
+ return self.fh.diff
+
+
+class RecursiveMakeTraversal(object):
+ """
+ Helper class to keep track of how the "traditional" recursive make backend
+ recurses subdirectories. This is useful until all adhoc rules are removed
+ from Makefiles.
+
+ Each directory may have one or more types of subdirectories:
+ - (normal) dirs
+ - tests
+ """
+
+ SubDirectoryCategories = ["dirs", "tests"]
+ SubDirectoriesTuple = namedtuple("SubDirectories", SubDirectoryCategories)
+
+ class SubDirectories(SubDirectoriesTuple):
+ def __new__(self):
+ return RecursiveMakeTraversal.SubDirectoriesTuple.__new__(self, [], [])
+
+ def __init__(self):
+ self._traversal = {}
+ self._attached = set()
+
+ def add(self, dir, dirs=[], tests=[]):
+ """
+ Adds a directory to traversal, registering its subdirectories,
+ sorted by categories. If the directory was already added to
+ traversal, adds the new subdirectories to the already known lists.
+ """
+ subdirs = self._traversal.setdefault(dir, self.SubDirectories())
+ for key, value in (("dirs", dirs), ("tests", tests)):
+ assert key in self.SubDirectoryCategories
+ # Callers give us generators
+ value = list(value)
+ getattr(subdirs, key).extend(value)
+ self._attached |= set(value)
+
+ @staticmethod
+ def default_filter(current, subdirs):
+ """
+ Default filter for use with compute_dependencies and traverse.
+ """
+ return current, [], subdirs.dirs + subdirs.tests
+
+ def call_filter(self, current, filter):
+ """
+ Helper function to call a filter from compute_dependencies and
+ traverse.
+ """
+ return filter(current, self.get_subdirs(current))
+
+ def compute_dependencies(self, filter=None):
+ """
+ Compute make dependencies corresponding to the registered directory
+ traversal.
+
+ filter is a function with the following signature:
+ def filter(current, subdirs)
+
+ where current is the directory being traversed, and subdirs the
+ SubDirectories instance corresponding to it.
+ The filter function returns a tuple (filtered_current, filtered_parallel,
+ filtered_dirs) where filtered_current is either current or None if
+ the current directory is to be skipped, and filtered_parallel and
+ filtered_dirs are lists of parallel directories and sequential
+ directories, which can be rearranged from whatever is given in the
+ SubDirectories members.
+
+ The default filter corresponds to a default recursive traversal.
+
+ """
+ filter = filter or self.default_filter
+
+ deps = {}
+
+ def recurse(start_node, prev_nodes=None):
+ current, parallel, sequential = self.call_filter(start_node, filter)
+ if current is not None:
+ if start_node != "":
+ deps[start_node] = prev_nodes
+ prev_nodes = (start_node,)
+ if start_node not in self._traversal:
+ return prev_nodes
+ parallel_nodes = []
+ for node in parallel:
+ nodes = recurse(node, prev_nodes)
+ if nodes and nodes != ("",):
+ parallel_nodes.extend(nodes)
+ if parallel_nodes:
+ prev_nodes = tuple(parallel_nodes)
+ for dir in sequential:
+ prev_nodes = recurse(dir, prev_nodes)
+ return prev_nodes
+
+ return recurse(""), deps
+
+ def traverse(self, start, filter=None):
+ """
+ Iterate over the filtered subdirectories, following the traditional
+ make traversal order.
+ """
+ if filter is None:
+ filter = self.default_filter
+
+ current, parallel, sequential = self.call_filter(start, filter)
+ if current is not None:
+ yield start
+ if start not in self._traversal:
+ return
+ for node in parallel:
+ for n in self.traverse(node, filter):
+ yield n
+ for dir in sequential:
+ for d in self.traverse(dir, filter):
+ yield d
+
+ def get_subdirs(self, dir):
+ """
+ Returns all direct subdirectories under the given directory.
+ """
+ result = self._traversal.get(dir, self.SubDirectories())
+ if dir == "":
+ unattached = set(self._traversal) - self._attached - set([""])
+ if unattached:
+ new_result = self.SubDirectories()
+ new_result.dirs.extend(result.dirs)
+ new_result.dirs.extend(sorted(unattached))
+ new_result.tests.extend(result.tests)
+ result = new_result
+ return result
+
+
+class RecursiveMakeBackend(MakeBackend):
+ """Backend that integrates with the existing recursive make build system.
+
+ This backend facilitates the transition from Makefile.in to moz.build
+ files.
+
+ This backend performs Makefile.in -> Makefile conversion. It also writes
+ out .mk files containing content derived from moz.build files. Both are
+ consumed by the recursive make builder.
+
+ This backend may eventually evolve to write out non-recursive make files.
+ However, as long as there are Makefile.in files in the tree, we are tied to
+ recursive make and thus will need this backend.
+ """
+
+ def _init(self):
+ MakeBackend._init(self)
+
+ self._backend_files = {}
+ self._idl_dirs = set()
+
+ self._makefile_in_count = 0
+ self._makefile_out_count = 0
+
+ self._test_manifests = {}
+
+ self.backend_input_files.add(
+ mozpath.join(self.environment.topobjdir, "config", "autoconf.mk")
+ )
+
+ self._install_manifests = defaultdict(InstallManifest)
+ # The build system relies on some install manifests always existing
+ # even if they are empty, because the directories are still filled
+ # by the build system itself, and the install manifests are only
+ # used for a "magic" rm -rf.
+ self._install_manifests["dist_public"]
+ self._install_manifests["dist_private"]
+
+ self._traversal = RecursiveMakeTraversal()
+ self._compile_graph = OrderedDefaultDict(set)
+ self._rust_targets = set()
+ self._gkrust_target = None
+ self._pre_compile = set()
+
+ self._no_skip = {
+ "pre-export": set(),
+ "export": set(),
+ "libs": set(),
+ "misc": set(),
+ "tools": set(),
+ "check": set(),
+ "syms": set(),
+ }
+
+ def summary(self):
+ summary = super(RecursiveMakeBackend, self).summary()
+ summary.extend(
+ "; {makefile_in:d} -> {makefile_out:d} Makefile",
+ makefile_in=self._makefile_in_count,
+ makefile_out=self._makefile_out_count,
+ )
+ return summary
+
+ def _get_backend_file_for(self, obj):
+ # For generated files that we put in the export or misc tiers, we use the
+ # top-level backend file, except for localized files, which we need to keep
+ # in each directory for dependencies from jar manifests for l10n repacks.
+ if (
+ isinstance(obj, GeneratedFile)
+ and not obj.required_during_compile
+ and not obj.localized
+ ):
+ objdir = self.environment.topobjdir
+ else:
+ objdir = obj.objdir
+
+ if objdir not in self._backend_files:
+ self._backend_files[objdir] = BackendMakeFile(
+ obj.srcdir,
+ objdir,
+ obj.config,
+ obj.topsrcdir,
+ self.environment.topobjdir,
+ self.dry_run,
+ )
+ return self._backend_files[objdir]
+
+ def consume_object(self, obj):
+ """Write out build files necessary to build with recursive make."""
+
+ if not isinstance(obj, ContextDerived):
+ return False
+
+ backend_file = self._get_backend_file_for(obj)
+
+ consumed = CommonBackend.consume_object(self, obj)
+
+ # CommonBackend handles XPIDLModule, but we want to do
+ # some extra things for them.
+ if isinstance(obj, XPIDLModule):
+ backend_file.xpt_name = "%s.xpt" % obj.name
+ self._idl_dirs.add(obj.relobjdir)
+
+ # If CommonBackend acknowledged the object, we're done with it.
+ if consumed:
+ return True
+
+ if not isinstance(obj, Defines):
+ self.consume_object(obj.defines)
+
+ if isinstance(obj, Linkable):
+ self._process_test_support_file(obj)
+
+ if isinstance(obj, DirectoryTraversal):
+ self._process_directory_traversal(obj, backend_file)
+ elif isinstance(obj, ConfigFileSubstitution):
+ # Other ConfigFileSubstitution should have been acked by
+ # CommonBackend.
+ assert os.path.basename(obj.output_path) == "Makefile"
+ self._create_makefile(obj)
+ elif isinstance(obj, Sources):
+ suffix_map = {
+ ".s": "ASFILES",
+ ".c": "CSRCS",
+ ".m": "CMSRCS",
+ ".mm": "CMMSRCS",
+ ".cpp": "CPPSRCS",
+ ".S": "SSRCS",
+ }
+ variables = [suffix_map[obj.canonical_suffix]]
+ for files, base, cls, prefix in (
+ (obj.static_files, backend_file.srcdir, SourcePath, ""),
+ (obj.generated_files, backend_file.objdir, ObjDirPath, "!"),
+ ):
+ for f in sorted(files):
+ p = self._pretty_path(
+ cls(obj._context, prefix + mozpath.relpath(f, base)),
+ backend_file,
+ )
+ for var in variables:
+ backend_file.write("%s += %s\n" % (var, p))
+ self._compile_graph[mozpath.join(backend_file.relobjdir, "target-objects")]
+ elif isinstance(obj, HostSources):
+ suffix_map = {
+ ".c": "HOST_CSRCS",
+ ".mm": "HOST_CMMSRCS",
+ ".cpp": "HOST_CPPSRCS",
+ }
+ variables = [suffix_map[obj.canonical_suffix]]
+ for files, base, cls, prefix in (
+ (obj.static_files, backend_file.srcdir, SourcePath, ""),
+ (obj.generated_files, backend_file.objdir, ObjDirPath, "!"),
+ ):
+ for f in sorted(files):
+ p = self._pretty_path(
+ cls(obj._context, prefix + mozpath.relpath(f, base)),
+ backend_file,
+ )
+ for var in variables:
+ backend_file.write("%s += %s\n" % (var, p))
+ self._compile_graph[mozpath.join(backend_file.relobjdir, "host-objects")]
+ elif isinstance(obj, WasmSources):
+ suffix_map = {".c": "WASM_CSRCS", ".cpp": "WASM_CPPSRCS"}
+ variables = [suffix_map[obj.canonical_suffix]]
+ for files, base, cls, prefix in (
+ (obj.static_files, backend_file.srcdir, SourcePath, ""),
+ (obj.generated_files, backend_file.objdir, ObjDirPath, "!"),
+ ):
+ for f in sorted(files):
+ p = self._pretty_path(
+ cls(obj._context, prefix + mozpath.relpath(f, base)),
+ backend_file,
+ )
+ for var in variables:
+ backend_file.write("%s += %s\n" % (var, p))
+ self._compile_graph[mozpath.join(backend_file.relobjdir, "target-objects")]
+ elif isinstance(obj, VariablePassthru):
+ # Sorted so output is consistent and we don't bump mtimes.
+ for k, v in sorted(obj.variables.items()):
+ if isinstance(v, list):
+ for item in v:
+ backend_file.write(
+ "%s += %s\n" % (k, make_quote(shell_quote(item)))
+ )
+ elif isinstance(v, bool):
+ if v:
+ backend_file.write("%s := 1\n" % k)
+ elif isinstance(v, Path):
+ path = self._pretty_path(Path(obj._context, v), backend_file)
+ backend_file.write("%s := %s\n" % (k, path))
+ else:
+ backend_file.write("%s := %s\n" % (k, v))
+ elif isinstance(obj, HostDefines):
+ self._process_defines(obj, backend_file, which="HOST_DEFINES")
+ elif isinstance(obj, Defines):
+ self._process_defines(obj, backend_file)
+
+ elif isinstance(obj, GeneratedFile):
+ if obj.required_before_export:
+ tier = "pre-export"
+ elif obj.required_before_compile:
+ tier = "export"
+ elif obj.required_during_compile:
+ tier = "pre-compile"
+ else:
+ tier = "misc"
+ relobjdir = mozpath.relpath(obj.objdir, self.environment.topobjdir)
+ if tier == "pre-compile":
+ self._pre_compile.add(relobjdir)
+ else:
+ self._no_skip[tier].add(relobjdir)
+ backend_file.write_once("include $(topsrcdir)/config/AB_rCD.mk\n")
+ relobjdir = mozpath.relpath(obj.objdir, backend_file.objdir)
+ # For generated files that we handle in the top-level backend file,
+ # we want to have a `directory/tier` target depending on the file.
+ # For the others, we want a `tier` target.
+ if tier != "pre-compile" and relobjdir:
+ tier = "%s/%s" % (relobjdir, tier)
+ for stmt in self._format_statements_for_generated_file(
+ obj, tier, extra_dependencies="backend.mk" if obj.flags else ""
+ ):
+ backend_file.write(stmt + "\n")
+
+ elif isinstance(obj, JARManifest):
+ self._no_skip["misc"].add(backend_file.relobjdir)
+ backend_file.write("JAR_MANIFEST := %s\n" % obj.path.full_path)
+
+ elif isinstance(obj, RustProgram):
+ self._process_rust_program(obj, backend_file)
+ # Hook the program into the compile graph.
+ build_target = self._build_target_for_obj(obj)
+ self._compile_graph[build_target]
+ self._rust_targets.add(build_target)
+
+ elif isinstance(obj, HostRustProgram):
+ self._process_host_rust_program(obj, backend_file)
+ # Hook the program into the compile graph.
+ build_target = self._build_target_for_obj(obj)
+ self._compile_graph[build_target]
+ self._rust_targets.add(build_target)
+
+ elif isinstance(obj, RustTests):
+ self._process_rust_tests(obj, backend_file)
+
+ elif isinstance(obj, Program):
+ self._process_program(obj, backend_file)
+ self._process_linked_libraries(obj, backend_file)
+ self._no_skip["syms"].add(backend_file.relobjdir)
+
+ elif isinstance(obj, HostProgram):
+ self._process_host_program(obj, backend_file)
+ self._process_linked_libraries(obj, backend_file)
+
+ elif isinstance(obj, SimpleProgram):
+ self._process_simple_program(obj, backend_file)
+ self._process_linked_libraries(obj, backend_file)
+ self._no_skip["syms"].add(backend_file.relobjdir)
+
+ elif isinstance(obj, HostSimpleProgram):
+ self._process_host_simple_program(obj.program, backend_file)
+ self._process_linked_libraries(obj, backend_file)
+
+ elif isinstance(obj, LocalInclude):
+ self._process_local_include(obj.path, backend_file)
+
+ elif isinstance(obj, PerSourceFlag):
+ self._process_per_source_flag(obj, backend_file)
+
+ elif isinstance(obj, ComputedFlags):
+ self._process_computed_flags(obj, backend_file)
+
+ elif isinstance(obj, InstallationTarget):
+ self._process_installation_target(obj, backend_file)
+
+ elif isinstance(obj, BaseRustLibrary):
+ self.backend_input_files.add(obj.cargo_file)
+ self._process_rust_library(obj, backend_file)
+ # No need to call _process_linked_libraries, because Rust
+ # libraries are self-contained objects at this point.
+
+ # Hook the library into the compile graph.
+ build_target = self._build_target_for_obj(obj)
+ self._compile_graph[build_target]
+ self._rust_targets.add(build_target)
+ if obj.is_gkrust:
+ self._gkrust_target = build_target
+
+ elif isinstance(obj, SharedLibrary):
+ self._process_shared_library(obj, backend_file)
+ self._process_linked_libraries(obj, backend_file)
+ self._no_skip["syms"].add(backend_file.relobjdir)
+
+ elif isinstance(obj, StaticLibrary):
+ self._process_static_library(obj, backend_file)
+ self._process_linked_libraries(obj, backend_file)
+
+ elif isinstance(obj, SandboxedWasmLibrary):
+ self._process_sandboxed_wasm_library(obj, backend_file)
+
+ elif isinstance(obj, HostLibrary):
+ self._process_linked_libraries(obj, backend_file)
+
+ elif isinstance(obj, HostSharedLibrary):
+ self._process_host_shared_library(obj, backend_file)
+ self._process_linked_libraries(obj, backend_file)
+
+ elif isinstance(obj, ObjdirFiles):
+ self._process_objdir_files(obj, obj.files, backend_file)
+
+ elif isinstance(obj, ObjdirPreprocessedFiles):
+ self._process_final_target_pp_files(
+ obj, obj.files, backend_file, "OBJDIR_PP_FILES"
+ )
+
+ elif isinstance(obj, LocalizedFiles):
+ self._process_localized_files(obj, obj.files, backend_file)
+
+ elif isinstance(obj, LocalizedPreprocessedFiles):
+ self._process_localized_pp_files(obj, obj.files, backend_file)
+
+ elif isinstance(obj, FinalTargetFiles):
+ self._process_final_target_files(obj, obj.files, backend_file)
+
+ elif isinstance(obj, FinalTargetPreprocessedFiles):
+ self._process_final_target_pp_files(
+ obj, obj.files, backend_file, "DIST_FILES"
+ )
+
+ elif isinstance(obj, ChromeManifestEntry):
+ self._process_chrome_manifest_entry(obj, backend_file)
+
+ elif isinstance(obj, TestManifest):
+ self._process_test_manifest(obj, backend_file)
+
+ else:
+ return False
+
+ return True
+
+ def _fill_root_mk(self):
+ """
+ Create two files, root.mk and root-deps.mk, the first containing
+ convenience variables, and the other dependency definitions for a
+ hopefully proper directory traversal.
+ """
+ for tier, no_skip in self._no_skip.items():
+ self.log(
+ logging.DEBUG,
+ "fill_root_mk",
+ {"number": len(no_skip), "tier": tier},
+ "Using {number} directories during {tier}",
+ )
+
+ def should_skip(tier, dir):
+ if tier in self._no_skip:
+ return dir not in self._no_skip[tier]
+ return False
+
+ # Traverse directories in parallel, and skip static dirs
+ def parallel_filter(current, subdirs):
+ all_subdirs = subdirs.dirs + subdirs.tests
+ if should_skip(tier, current) or current.startswith("subtiers/"):
+ current = None
+ return current, all_subdirs, []
+
+ # build everything in parallel, including static dirs
+ # Because of bug 925236 and possible other unknown race conditions,
+ # don't parallelize the libs tier.
+ def libs_filter(current, subdirs):
+ if should_skip("libs", current) or current.startswith("subtiers/"):
+ current = None
+ return current, [], subdirs.dirs + subdirs.tests
+
+ # Because of bug 925236 and possible other unknown race conditions,
+ # don't parallelize the tools tier. There aren't many directories for
+ # this tier anyways.
+ def tools_filter(current, subdirs):
+ if should_skip("tools", current) or current.startswith("subtiers/"):
+ current = None
+ return current, [], subdirs.dirs + subdirs.tests
+
+ filters = [
+ ("export", parallel_filter),
+ ("libs", libs_filter),
+ ("misc", parallel_filter),
+ ("tools", tools_filter),
+ ("check", parallel_filter),
+ ]
+
+ root_deps_mk = Makefile()
+
+ # Fill the dependencies for traversal of each tier.
+ for tier, filter in sorted(filters, key=itemgetter(0)):
+ main, all_deps = self._traversal.compute_dependencies(filter)
+ for dir, deps in sorted(all_deps.items()):
+ if deps is not None or (dir in self._idl_dirs and tier == "export"):
+ rule = root_deps_mk.create_rule(["%s/%s" % (dir, tier)])
+ if deps:
+ rule.add_dependencies(
+ "%s/%s" % (d, tier) for d in sorted(deps) if d
+ )
+ rule = root_deps_mk.create_rule(["recurse_%s" % tier])
+ if main:
+ rule.add_dependencies("%s/%s" % (d, tier) for d in sorted(main))
+
+ rule = root_deps_mk.create_rule(["recurse_pre-compile"])
+ rule.add_dependencies("%s/pre-compile" % d for d in sorted(self._pre_compile))
+
+ targets_with_pre_compile = sorted(
+ t for t in self._compile_graph if mozpath.dirname(t) in self._pre_compile
+ )
+ for t in targets_with_pre_compile:
+ relobjdir = mozpath.dirname(t)
+ rule = root_deps_mk.create_rule([t])
+ rule.add_dependencies(["%s/pre-compile" % relobjdir])
+
+ all_compile_deps = (
+ six.moves.reduce(lambda x, y: x | y, self._compile_graph.values())
+ if self._compile_graph
+ else set()
+ )
+ # Include the following as dependencies of the top recursion target for
+ # compilation:
+ # - nodes that are not dependended upon by anything. Typically, this
+ # would include programs, that need to be recursed, but that nothing
+ # depends on.
+ # - nodes that have no dependencies of their own. Technically, this is
+ # not necessary, because other things have dependencies on them, and
+ # they all end up rooting to nodes from the above category. But the
+ # way make works[1] is such that there can be benefits listing them
+ # as direct dependencies of the top recursion target, to somehow
+ # prioritize them.
+ # 1. See bug 1262241 comment 5.
+ compile_roots = [
+ t
+ for t, deps in six.iteritems(self._compile_graph)
+ if not deps or t not in all_compile_deps
+ ]
+
+ def add_category_rules(category, roots, graph):
+ rule = root_deps_mk.create_rule(["recurse_%s" % category])
+ # Directories containing rust compilations don't generally depend
+ # on other directories in the tree, so putting them first here will
+ # start them earlier in the build.
+ rust_roots = sorted(r for r in roots if r in self._rust_targets)
+ if category == "compile" and rust_roots:
+ rust_rule = root_deps_mk.create_rule(["recurse_rust"])
+ rust_rule.add_dependencies(rust_roots)
+ # Ensure our cargo invocations are serialized, and gecko comes
+ # first. Cargo will lock on the build output directory anyway,
+ # so trying to run things in parallel is not useful. Dependencies
+ # for gecko are especially expensive to build and parallelize
+ # poorly, so prioritizing these will save some idle time in full
+ # builds.
+ for prior_target, target in pairwise(
+ sorted(
+ [t for t in rust_roots], key=lambda t: t != self._gkrust_target
+ )
+ ):
+ r = root_deps_mk.create_rule([target])
+ r.add_dependencies([prior_target])
+
+ rule.add_dependencies(chain(rust_roots, sorted(roots)))
+ for target, deps in sorted(graph.items()):
+ if deps:
+ rule = root_deps_mk.create_rule([target])
+ rule.add_dependencies(sorted(deps))
+
+ non_default_roots = defaultdict(list)
+ non_default_graphs = defaultdict(lambda: OrderedDefaultDict(set))
+
+ for root in compile_roots:
+ # If this is a non-default target, separate the root from the
+ # rest of the compile graph.
+ target_name = mozpath.basename(root)
+
+ if target_name not in ("target", "target-objects", "host", "host-objects"):
+ non_default_roots[target_name].append(root)
+ non_default_graphs[target_name][root] = self._compile_graph[root]
+ del self._compile_graph[root]
+
+ for root in chain(*non_default_roots.values()):
+ compile_roots.remove(root)
+ dirname = mozpath.dirname(root)
+ # If a directory only contains non-default compile targets, we don't
+ # attempt to dump symbols there.
+ if (
+ dirname in self._no_skip["syms"]
+ and "%s/target" % dirname not in self._compile_graph
+ ):
+ self._no_skip["syms"].remove(dirname)
+
+ add_category_rules("compile", compile_roots, self._compile_graph)
+ for category, graph in sorted(six.iteritems(non_default_graphs)):
+ add_category_rules(category, non_default_roots[category], graph)
+
+ root_mk = Makefile()
+
+ # Fill root.mk with the convenience variables.
+ for tier, filter in filters:
+ all_dirs = self._traversal.traverse("", filter)
+ root_mk.add_statement("%s_dirs := %s" % (tier, " ".join(all_dirs)))
+
+ # Need a list of compile targets because we can't use pattern rules:
+ # https://savannah.gnu.org/bugs/index.php?42833
+ root_mk.add_statement(
+ "pre_compile_targets := %s"
+ % " ".join(sorted("%s/pre-compile" % p for p in self._pre_compile))
+ )
+ root_mk.add_statement(
+ "compile_targets := %s"
+ % " ".join(sorted(set(self._compile_graph.keys()) | all_compile_deps))
+ )
+ root_mk.add_statement(
+ "syms_targets := %s"
+ % " ".join(sorted(set("%s/syms" % d for d in self._no_skip["syms"])))
+ )
+ root_mk.add_statement(
+ "rust_targets := %s" % " ".join(sorted(self._rust_targets))
+ )
+
+ root_mk.add_statement(
+ "non_default_tiers := %s" % " ".join(sorted(non_default_roots.keys()))
+ )
+
+ for category, graphs in sorted(six.iteritems(non_default_graphs)):
+ category_dirs = [mozpath.dirname(target) for target in graphs.keys()]
+ root_mk.add_statement("%s_dirs := %s" % (category, " ".join(category_dirs)))
+
+ root_mk.add_statement("include root-deps.mk")
+
+ with self._write_file(
+ mozpath.join(self.environment.topobjdir, "root.mk")
+ ) as root:
+ root_mk.dump(root, removal_guard=False)
+
+ with self._write_file(
+ mozpath.join(self.environment.topobjdir, "root-deps.mk")
+ ) as root_deps:
+ root_deps_mk.dump(root_deps, removal_guard=False)
+
+ def _add_unified_build_rules(
+ self,
+ makefile,
+ unified_source_mapping,
+ unified_files_makefile_variable="unified_files",
+ include_curdir_build_rules=True,
+ ):
+
+ # In case it's a generator.
+ unified_source_mapping = sorted(unified_source_mapping)
+
+ explanation = (
+ "\n"
+ "# We build files in 'unified' mode by including several files\n"
+ "# together into a single source file. This cuts down on\n"
+ "# compilation times and debug information size."
+ )
+ makefile.add_statement(explanation)
+
+ all_sources = " ".join(source for source, _ in unified_source_mapping)
+ makefile.add_statement(
+ "%s := %s" % (unified_files_makefile_variable, all_sources)
+ )
+
+ if include_curdir_build_rules:
+ makefile.add_statement(
+ "\n"
+ '# Make sometimes gets confused between "foo" and "$(CURDIR)/foo".\n'
+ "# Help it out by explicitly specifiying dependencies."
+ )
+ makefile.add_statement(
+ "all_absolute_unified_files := \\\n"
+ " $(addprefix $(CURDIR)/,$(%s))" % unified_files_makefile_variable
+ )
+ rule = makefile.create_rule(["$(all_absolute_unified_files)"])
+ rule.add_dependencies(["$(CURDIR)/%: %"])
+
+ def _check_blacklisted_variables(self, makefile_in, makefile_content):
+ if "EXTERNALLY_MANAGED_MAKE_FILE" in makefile_content:
+ # Bypass the variable restrictions for externally managed makefiles.
+ return
+
+ for l in makefile_content.splitlines():
+ l = l.strip()
+ # Don't check comments
+ if l.startswith("#"):
+ continue
+ for x in chain(_MOZBUILD_ONLY_VARIABLES, DEPRECATED_VARIABLES):
+ if x not in l:
+ continue
+
+ # Finding the variable name in the Makefile is not enough: it
+ # may just appear as part of something else, like DIRS appears
+ # in GENERATED_DIRS.
+ if re.search(r"\b%s\s*[:?+]?=" % x, l):
+ if x in _MOZBUILD_ONLY_VARIABLES:
+ message = MOZBUILD_VARIABLES_MESSAGE
+ else:
+ message = DEPRECATED_VARIABLES_MESSAGE
+ raise Exception(
+ "Variable %s is defined in %s. %s" % (x, makefile_in, message)
+ )
+
+ def consume_finished(self):
+ CommonBackend.consume_finished(self)
+
+ for objdir, backend_file in sorted(self._backend_files.items()):
+ srcdir = backend_file.srcdir
+ with self._write_file(fh=backend_file) as bf:
+ makefile_in = mozpath.join(srcdir, "Makefile.in")
+ makefile = mozpath.join(objdir, "Makefile")
+
+ # If Makefile.in exists, use it as a template. Otherwise,
+ # create a stub.
+ stub = not os.path.exists(makefile_in)
+ if not stub:
+ self.log(
+ logging.DEBUG,
+ "substitute_makefile",
+ {"path": makefile},
+ "Substituting makefile: {path}",
+ )
+ self._makefile_in_count += 1
+
+ # In the export and libs tiers, we don't skip directories
+ # containing a Makefile.in.
+ # topobjdir is handled separatedly, don't do anything for
+ # it.
+ if bf.relobjdir:
+ for tier in ("export", "libs"):
+ self._no_skip[tier].add(bf.relobjdir)
+ else:
+ self.log(
+ logging.DEBUG,
+ "stub_makefile",
+ {"path": makefile},
+ "Creating stub Makefile: {path}",
+ )
+
+ obj = self.Substitution()
+ obj.output_path = makefile
+ obj.input_path = makefile_in
+ obj.topsrcdir = backend_file.topsrcdir
+ obj.topobjdir = bf.environment.topobjdir
+ obj.config = bf.environment
+ self._create_makefile(obj, stub=stub)
+ with io.open(obj.output_path, encoding="utf-8") as fh:
+ content = fh.read()
+ # Directories with a Makefile containing a tools target, or
+ # XPI_PKGNAME can't be skipped and must run during the
+ # 'tools' tier.
+ for t in ("XPI_PKGNAME", "tools"):
+ if t not in content:
+ continue
+ if t == "tools" and not re.search(
+ "(?:^|\s)tools.*::", content, re.M
+ ):
+ continue
+ if objdir == self.environment.topobjdir:
+ continue
+ self._no_skip["tools"].add(
+ mozpath.relpath(objdir, self.environment.topobjdir)
+ )
+
+ # Directories with a Makefile containing a check target
+ # can't be skipped and must run during the 'check' tier.
+ if re.search("(?:^|\s)check.*::", content, re.M):
+ self._no_skip["check"].add(
+ mozpath.relpath(objdir, self.environment.topobjdir)
+ )
+
+ # Detect any Makefile.ins that contain variables on the
+ # moz.build-only list
+ self._check_blacklisted_variables(makefile_in, content)
+
+ self._fill_root_mk()
+
+ # Make the master test manifest files.
+ for flavor, t in self._test_manifests.items():
+ install_prefix, manifests = t
+ manifest_stem = mozpath.join(install_prefix, "%s.ini" % flavor)
+ self._write_master_test_manifest(
+ mozpath.join(self.environment.topobjdir, "_tests", manifest_stem),
+ manifests,
+ )
+
+ # Catch duplicate inserts.
+ try:
+ self._install_manifests["_tests"].add_optional_exists(manifest_stem)
+ except ValueError:
+ pass
+
+ self._write_manifests("install", self._install_manifests)
+
+ ensureParentDir(mozpath.join(self.environment.topobjdir, "dist", "foo"))
+
+ def _pretty_path_parts(self, path, backend_file):
+ assert isinstance(path, Path)
+ if isinstance(path, SourcePath):
+ if path.full_path.startswith(backend_file.srcdir):
+ return "$(srcdir)", path.full_path[len(backend_file.srcdir) :]
+ if path.full_path.startswith(backend_file.topsrcdir):
+ return "$(topsrcdir)", path.full_path[len(backend_file.topsrcdir) :]
+ elif isinstance(path, ObjDirPath):
+ if path.full_path.startswith(backend_file.objdir):
+ return "", path.full_path[len(backend_file.objdir) + 1 :]
+ if path.full_path.startswith(self.environment.topobjdir):
+ return "$(DEPTH)", path.full_path[len(self.environment.topobjdir) :]
+
+ return "", path.full_path
+
+ def _pretty_path(self, path, backend_file):
+ return "".join(self._pretty_path_parts(path, backend_file))
+
+ def _process_unified_sources(self, obj):
+ backend_file = self._get_backend_file_for(obj)
+
+ suffix_map = {
+ ".c": "UNIFIED_CSRCS",
+ ".m": "UNIFIED_CMSRCS",
+ ".mm": "UNIFIED_CMMSRCS",
+ ".cpp": "UNIFIED_CPPSRCS",
+ }
+
+ var = suffix_map[obj.canonical_suffix]
+ non_unified_var = var[len("UNIFIED_") :]
+
+ if obj.have_unified_mapping:
+ self._add_unified_build_rules(
+ backend_file,
+ obj.unified_source_mapping,
+ unified_files_makefile_variable=var,
+ include_curdir_build_rules=False,
+ )
+ backend_file.write("%s += $(%s)\n" % (non_unified_var, var))
+ else:
+ # Sorted so output is consistent and we don't bump mtimes.
+ source_files = list(sorted(obj.files))
+
+ backend_file.write("%s += %s\n" % (non_unified_var, " ".join(source_files)))
+
+ self._compile_graph[mozpath.join(backend_file.relobjdir, "target-objects")]
+
+ def _process_directory_traversal(self, obj, backend_file):
+ """Process a data.DirectoryTraversal instance."""
+ fh = backend_file.fh
+
+ def relativize(base, dirs):
+ return (mozpath.relpath(d.translated, base) for d in dirs)
+
+ if obj.dirs:
+ fh.write(
+ "DIRS := %s\n" % " ".join(relativize(backend_file.objdir, obj.dirs))
+ )
+ self._traversal.add(
+ backend_file.relobjdir,
+ dirs=relativize(self.environment.topobjdir, obj.dirs),
+ )
+
+ # The directory needs to be registered whether subdirectories have been
+ # registered or not.
+ self._traversal.add(backend_file.relobjdir)
+
+ def _process_defines(self, obj, backend_file, which="DEFINES"):
+ """Output the DEFINES rules to the given backend file."""
+ defines = list(obj.get_defines())
+ if defines:
+ defines = " ".join(shell_quote(d) for d in defines)
+ backend_file.write_once("%s += %s\n" % (which, defines))
+
+ def _process_installation_target(self, obj, backend_file):
+ # A few makefiles need to be able to override the following rules via
+ # make XPI_NAME=blah commands, so we default to the lazy evaluation as
+ # much as possible here to avoid breaking things.
+ if obj.xpiname:
+ backend_file.write("XPI_NAME = %s\n" % (obj.xpiname))
+ if obj.subdir:
+ backend_file.write("DIST_SUBDIR = %s\n" % (obj.subdir))
+ if obj.target and not obj.is_custom():
+ backend_file.write("FINAL_TARGET = $(DEPTH)/%s\n" % (obj.target))
+ else:
+ backend_file.write(
+ "FINAL_TARGET = $(if $(XPI_NAME),$(DIST)/xpi-stage/$(XPI_NAME),"
+ "$(DIST)/bin)$(DIST_SUBDIR:%=/%)\n"
+ )
+
+ if not obj.enabled:
+ backend_file.write("NO_DIST_INSTALL := 1\n")
+
+ def _handle_idl_manager(self, manager):
+ build_files = self._install_manifests["xpidl"]
+
+ for p in ("Makefile", "backend.mk", ".deps/.mkdir.done"):
+ build_files.add_optional_exists(p)
+
+ for stem in manager.idl_stems():
+ self._install_manifests["dist_include"].add_optional_exists("%s.h" % stem)
+
+ for module in manager.modules:
+ build_files.add_optional_exists(mozpath.join(".deps", "%s.pp" % module))
+
+ modules = manager.modules
+ xpt_modules = sorted(modules.keys())
+
+ mk = Makefile()
+ all_directories = set()
+
+ for module_name in xpt_modules:
+ module = manager.modules[module_name]
+ all_directories |= module.directories
+ deps = sorted(module.idl_files)
+
+ # It may seem strange to have the .idl files listed as
+ # prerequisites both here and in the auto-generated .pp files.
+ # It is necessary to list them here to handle the case where a
+ # new .idl is added to an xpt. If we add a new .idl and nothing
+ # else has changed, the new .idl won't be referenced anywhere
+ # except in the command invocation. Therefore, the .xpt won't
+ # be rebuilt because the dependencies say it is up to date. By
+ # listing the .idls here, we ensure the make file has a
+ # reference to the new .idl. Since the new .idl presumably has
+ # an mtime newer than the .xpt, it will trigger xpt generation.
+
+ mk.add_statement("%s_deps := %s" % (module_name, " ".join(deps)))
+
+ build_files.add_optional_exists("%s.xpt" % module_name)
+
+ mk.add_statement("all_idl_dirs := %s" % " ".join(sorted(all_directories)))
+
+ rules = StringIO()
+ mk.dump(rules, removal_guard=False)
+
+ # Create dependency for output header so we force regeneration if the
+ # header was deleted. This ideally should not be necessary. However,
+ # some processes (such as PGO at the time this was implemented) wipe
+ # out dist/include without regard to our install manifests.
+
+ obj = self.Substitution()
+ obj.output_path = mozpath.join(
+ self.environment.topobjdir, "config", "makefiles", "xpidl", "Makefile"
+ )
+ obj.input_path = mozpath.join(
+ self.environment.topsrcdir, "config", "makefiles", "xpidl", "Makefile.in"
+ )
+ obj.topsrcdir = self.environment.topsrcdir
+ obj.topobjdir = self.environment.topobjdir
+ obj.config = self.environment
+ self._create_makefile(
+ obj,
+ extra=dict(
+ xpidl_rules=rules.getvalue(), xpidl_modules=" ".join(xpt_modules)
+ ),
+ )
+
+ def _process_program(self, obj, backend_file):
+ backend_file.write(
+ "PROGRAM = %s\n" % self._pretty_path(obj.output_path, backend_file)
+ )
+ if not obj.cxx_link and not self.environment.bin_suffix:
+ backend_file.write("PROG_IS_C_ONLY_%s := 1\n" % obj.program)
+
+ def _process_host_program(self, program, backend_file):
+ backend_file.write(
+ "HOST_PROGRAM = %s\n" % self._pretty_path(program.output_path, backend_file)
+ )
+
+ def _process_rust_program_base(
+ self, obj, backend_file, target_variable, target_cargo_variable
+ ):
+ backend_file.write_once("CARGO_FILE := %s\n" % obj.cargo_file)
+ target_dir = mozpath.normpath(backend_file.environment.topobjdir)
+ backend_file.write_once("CARGO_TARGET_DIR := %s\n" % target_dir)
+ backend_file.write("%s += $(DEPTH)/%s\n" % (target_variable, obj.location))
+ backend_file.write("%s += %s\n" % (target_cargo_variable, obj.name))
+
+ def _process_rust_program(self, obj, backend_file):
+ self._process_rust_program_base(
+ obj, backend_file, "RUST_PROGRAMS", "RUST_CARGO_PROGRAMS"
+ )
+
+ def _process_host_rust_program(self, obj, backend_file):
+ self._process_rust_program_base(
+ obj, backend_file, "HOST_RUST_PROGRAMS", "HOST_RUST_CARGO_PROGRAMS"
+ )
+
+ def _process_rust_tests(self, obj, backend_file):
+ if obj.config.substs.get("MOZ_RUST_TESTS"):
+ # If --enable-rust-tests has been set, run these as a part of
+ # make check.
+ self._no_skip["check"].add(backend_file.relobjdir)
+ backend_file.write("check:: force-cargo-test-run\n")
+ build_target = self._build_target_for_obj(obj)
+ self._compile_graph[build_target]
+ self._process_non_default_target(obj, "force-cargo-test-run", backend_file)
+ backend_file.write_once("CARGO_FILE := $(srcdir)/Cargo.toml\n")
+ backend_file.write_once("RUST_TESTS := %s\n" % " ".join(obj.names))
+ backend_file.write_once("RUST_TEST_FEATURES := %s\n" % " ".join(obj.features))
+
+ def _process_simple_program(self, obj, backend_file):
+ if obj.is_unit_test:
+ backend_file.write("CPP_UNIT_TESTS += %s\n" % obj.program)
+ assert obj.cxx_link
+ else:
+ backend_file.write("SIMPLE_PROGRAMS += %s\n" % obj.program)
+ if not obj.cxx_link and not self.environment.bin_suffix:
+ backend_file.write("PROG_IS_C_ONLY_%s := 1\n" % obj.program)
+
+ def _process_host_simple_program(self, program, backend_file):
+ backend_file.write("HOST_SIMPLE_PROGRAMS += %s\n" % program)
+
+ def _process_test_support_file(self, obj):
+ # Ensure test support programs and libraries are tracked by an
+ # install manifest for the benefit of the test packager.
+ if not obj.install_target.startswith("_tests"):
+ return
+
+ dest_basename = None
+ if isinstance(obj, BaseLibrary):
+ dest_basename = obj.lib_name
+ elif isinstance(obj, BaseProgram):
+ dest_basename = obj.program
+ if dest_basename is None:
+ return
+
+ self._install_manifests["_tests"].add_optional_exists(
+ mozpath.join(obj.install_target[len("_tests") + 1 :], dest_basename)
+ )
+
+ def _process_test_manifest(self, obj, backend_file):
+ # Much of the logic in this function could be moved to CommonBackend.
+ for source in obj.source_relpaths:
+ self.backend_input_files.add(mozpath.join(obj.topsrcdir, source))
+
+ # Don't allow files to be defined multiple times unless it is allowed.
+ # We currently allow duplicates for non-test files or test files if
+ # the manifest is listed as a duplicate.
+ for source, (dest, is_test) in obj.installs.items():
+ try:
+ self._install_manifests["_test_files"].add_link(source, dest)
+ except ValueError:
+ if not obj.dupe_manifest and is_test:
+ raise
+
+ for base, pattern, dest in obj.pattern_installs:
+ try:
+ self._install_manifests["_test_files"].add_pattern_link(
+ base, pattern, dest
+ )
+ except ValueError:
+ if not obj.dupe_manifest:
+ raise
+
+ for dest in obj.external_installs:
+ try:
+ self._install_manifests["_test_files"].add_optional_exists(dest)
+ except ValueError:
+ if not obj.dupe_manifest:
+ raise
+
+ m = self._test_manifests.setdefault(obj.flavor, (obj.install_prefix, set()))
+ m[1].add(obj.manifest_obj_relpath)
+
+ try:
+ from reftest import ReftestManifest
+
+ if isinstance(obj.manifest, ReftestManifest):
+ # Mark included files as part of the build backend so changes
+ # result in re-config.
+ self.backend_input_files |= obj.manifest.manifests
+ except ImportError:
+ # Ignore errors caused by the reftest module not being present.
+ # This can happen when building SpiderMonkey standalone, for example.
+ pass
+
+ def _process_local_include(self, local_include, backend_file):
+ d, path = self._pretty_path_parts(local_include, backend_file)
+ if isinstance(local_include, ObjDirPath) and not d:
+ # path doesn't start with a slash in this case
+ d = "$(CURDIR)/"
+ elif d == "$(DEPTH)":
+ d = "$(topobjdir)"
+ quoted_path = shell_quote(path) if path else path
+ if quoted_path != path:
+ path = quoted_path[0] + d + quoted_path[1:]
+ else:
+ path = d + path
+ backend_file.write("LOCAL_INCLUDES += -I%s\n" % path)
+
+ def _process_per_source_flag(self, per_source_flag, backend_file):
+ for flag in per_source_flag.flags:
+ backend_file.write(
+ "%s_FLAGS += %s\n" % (mozpath.basename(per_source_flag.file_name), flag)
+ )
+
+ def _process_computed_flags(self, computed_flags, backend_file):
+ for var, flags in computed_flags.get_flags():
+ backend_file.write(
+ "COMPUTED_%s += %s\n"
+ % (var, " ".join(make_quote(shell_quote(f)) for f in flags))
+ )
+
+ def _process_non_default_target(self, libdef, target_name, backend_file):
+ backend_file.write("%s:: %s\n" % (libdef.output_category, target_name))
+ backend_file.write("MOZBUILD_NON_DEFAULT_TARGETS += %s\n" % target_name)
+
+ def _process_shared_library(self, libdef, backend_file):
+ backend_file.write_once("LIBRARY_NAME := %s\n" % libdef.basename)
+ backend_file.write("FORCE_SHARED_LIB := 1\n")
+ backend_file.write("IMPORT_LIBRARY := %s\n" % libdef.import_name)
+ backend_file.write("SHARED_LIBRARY := %s\n" % libdef.lib_name)
+ if libdef.soname:
+ backend_file.write("DSO_SONAME := %s\n" % libdef.soname)
+ if libdef.symbols_file:
+ if libdef.symbols_link_arg:
+ backend_file.write("EXTRA_DSO_LDOPTS += %s\n" % libdef.symbols_link_arg)
+ if not libdef.cxx_link:
+ backend_file.write("LIB_IS_C_ONLY := 1\n")
+ if libdef.output_category:
+ self._process_non_default_target(libdef, libdef.lib_name, backend_file)
+ # Override the install rule target for this library. This is hacky,
+ # but can go away as soon as we start building libraries in their
+ # final location (bug 1459764).
+ backend_file.write("SHARED_LIBRARY_TARGET := %s\n" % libdef.output_category)
+
+ def _process_static_library(self, libdef, backend_file):
+ backend_file.write_once("LIBRARY_NAME := %s\n" % libdef.basename)
+ backend_file.write("FORCE_STATIC_LIB := 1\n")
+ backend_file.write("REAL_LIBRARY := %s\n" % libdef.lib_name)
+ if libdef.no_expand_lib:
+ backend_file.write("NO_EXPAND_LIBS := 1\n")
+
+ def _process_sandboxed_wasm_library(self, libdef, backend_file):
+ backend_file.write("WASM_ARCHIVE := %s\n" % libdef.basename)
+
+ def _process_rust_library(self, libdef, backend_file):
+ backend_file.write_once(
+ "%s := %s\n" % (libdef.LIB_FILE_VAR, libdef.import_name)
+ )
+ backend_file.write_once("CARGO_FILE := $(srcdir)/Cargo.toml\n")
+ # Need to normalize the path so Cargo sees the same paths from all
+ # possible invocations of Cargo with this CARGO_TARGET_DIR. Otherwise,
+ # Cargo's dependency calculations don't work as we expect and we wind
+ # up recompiling lots of things.
+ target_dir = mozpath.normpath(backend_file.environment.topobjdir)
+ backend_file.write("CARGO_TARGET_DIR := %s\n" % target_dir)
+ if libdef.features:
+ backend_file.write(
+ "%s := %s\n" % (libdef.FEATURES_VAR, " ".join(libdef.features))
+ )
+ if libdef.output_category:
+ self._process_non_default_target(libdef, libdef.import_name, backend_file)
+
+ def _process_host_shared_library(self, libdef, backend_file):
+ backend_file.write("HOST_SHARED_LIBRARY = %s\n" % libdef.lib_name)
+
+ def _build_target_for_obj(self, obj):
+ if hasattr(obj, "output_category") and obj.output_category:
+ target_name = obj.output_category
+ else:
+ target_name = obj.KIND
+ if target_name == "wasm":
+ target_name = "target"
+ return "%s/%s" % (
+ mozpath.relpath(obj.objdir, self.environment.topobjdir),
+ target_name,
+ )
+
+ def _process_linked_libraries(self, obj, backend_file):
+ def pretty_relpath(lib, name):
+ return os.path.normpath(
+ mozpath.join(mozpath.relpath(lib.objdir, obj.objdir), name)
+ )
+
+ objs, shared_libs, os_libs, static_libs = self._expand_libs(obj)
+
+ obj_target = obj.name
+ if isinstance(obj, Program):
+ obj_target = self._pretty_path(obj.output_path, backend_file)
+
+ objs_ref = " \\\n ".join(os.path.relpath(o, obj.objdir) for o in objs)
+ # Don't bother with a list file if we're only linking objects built
+ # in this directory or building a real static library. This
+ # accommodates clang-plugin, where we would otherwise pass an
+ # incorrect list file format to the host compiler as well as when
+ # creating an archive with AR, which doesn't understand list files.
+ if (
+ objs == obj.objs
+ and not isinstance(obj, (HostLibrary, StaticLibrary, SandboxedWasmLibrary))
+ or isinstance(obj, (StaticLibrary, SandboxedWasmLibrary))
+ and obj.no_expand_lib
+ ):
+ backend_file.write_once("%s_OBJS := %s\n" % (obj.name, objs_ref))
+ backend_file.write("%s: %s\n" % (obj_target, objs_ref))
+ elif not isinstance(obj, (HostLibrary, StaticLibrary, SandboxedWasmLibrary)):
+ list_file_path = "%s.list" % obj.name.replace(".", "_")
+ list_file_ref = self._make_list_file(
+ obj.KIND, obj.objdir, objs, list_file_path
+ )
+ backend_file.write_once("%s_OBJS := %s\n" % (obj.name, list_file_ref))
+ backend_file.write_once("%s: %s\n" % (obj_target, list_file_path))
+ backend_file.write("%s: %s\n" % (obj_target, objs_ref))
+
+ if getattr(obj, "symbols_file", None):
+ backend_file.write_once("%s: %s\n" % (obj_target, obj.symbols_file))
+
+ for lib in shared_libs:
+ assert obj.KIND != "host" and obj.KIND != "wasm"
+ backend_file.write_once(
+ "SHARED_LIBS += %s\n" % pretty_relpath(lib, lib.import_name)
+ )
+
+ # We have to link any Rust libraries after all intermediate static
+ # libraries have been listed to ensure that the Rust libraries are
+ # searched after the C/C++ objects that might reference Rust symbols.
+ var = "HOST_LIBS" if obj.KIND == "host" else "STATIC_LIBS"
+ for lib in chain(
+ (l for l in static_libs if not isinstance(l, BaseRustLibrary)),
+ (l for l in static_libs if isinstance(l, BaseRustLibrary)),
+ ):
+ backend_file.write_once(
+ "%s += %s\n" % (var, pretty_relpath(lib, lib.import_name))
+ )
+
+ for lib in os_libs:
+ if obj.KIND == "target":
+ backend_file.write_once("OS_LIBS += %s\n" % lib)
+ elif obj.KIND == "host":
+ backend_file.write_once("HOST_EXTRA_LIBS += %s\n" % lib)
+
+ if not isinstance(obj, (StaticLibrary, HostLibrary)) or obj.no_expand_lib:
+ # This will create the node even if there aren't any linked libraries.
+ build_target = self._build_target_for_obj(obj)
+ self._compile_graph[build_target]
+
+ # Make the build target depend on all the target/host-objects that
+ # recursively are linked into it.
+ def recurse_libraries(obj):
+ for lib in obj.linked_libraries:
+ if (
+ isinstance(lib, (StaticLibrary, HostLibrary))
+ and not lib.no_expand_lib
+ ):
+ recurse_libraries(lib)
+ elif not isinstance(lib, ExternalLibrary):
+ self._compile_graph[build_target].add(
+ self._build_target_for_obj(lib)
+ )
+ relobjdir = mozpath.relpath(obj.objdir, self.environment.topobjdir)
+ objects_target = mozpath.join(relobjdir, "%s-objects" % obj.KIND)
+ if objects_target in self._compile_graph:
+ self._compile_graph[build_target].add(objects_target)
+
+ recurse_libraries(obj)
+
+ # Process library-based defines
+ self._process_defines(obj.lib_defines, backend_file)
+
+ def _add_install_target(self, backend_file, install_target, tier, dest, files):
+ self._no_skip[tier].add(backend_file.relobjdir)
+ for f in files:
+ backend_file.write("%s_FILES += %s\n" % (install_target, f))
+ backend_file.write("%s_DEST := %s\n" % (install_target, dest))
+ backend_file.write("%s_TARGET := %s\n" % (install_target, tier))
+ backend_file.write("INSTALL_TARGETS += %s\n" % install_target)
+
+ def _process_final_target_files(self, obj, files, backend_file):
+ target = obj.install_target
+ path = mozpath.basedir(
+ target, ("dist/bin", "dist/xpi-stage", "_tests", "dist/include")
+ )
+ if not path:
+ raise Exception("Cannot install to " + target)
+
+ # Exports are not interesting to artifact builds.
+ if path == "dist/include" and self.environment.is_artifact_build:
+ return
+
+ manifest = path.replace("/", "_")
+ install_manifest = self._install_manifests[manifest]
+ reltarget = mozpath.relpath(target, path)
+
+ for path, files in files.walk():
+ target_var = (mozpath.join(target, path) if path else target).replace(
+ "/", "_"
+ )
+ # We don't necessarily want to combine these, because non-wildcard
+ # absolute files tend to be libraries, and we don't want to mix
+ # those in with objdir headers that will be installed during export.
+ # (See bug 1642882 for details.)
+ objdir_files = []
+ absolute_files = []
+
+ for f in files:
+ assert not isinstance(f, RenamedSourcePath)
+ dest_dir = mozpath.join(reltarget, path)
+ dest_file = mozpath.join(dest_dir, f.target_basename)
+ if not isinstance(f, ObjDirPath):
+ if "*" in f:
+ if f.startswith("/") or isinstance(f, AbsolutePath):
+ basepath, wild = os.path.split(f.full_path)
+ if "*" in basepath:
+ raise Exception(
+ "Wildcards are only supported in the filename part"
+ " of srcdir-relative or absolute paths."
+ )
+
+ install_manifest.add_pattern_link(basepath, wild, dest_dir)
+ else:
+ install_manifest.add_pattern_link(f.srcdir, f, dest_dir)
+ elif isinstance(f, AbsolutePath):
+ if not f.full_path.lower().endswith((".dll", ".pdb", ".so")):
+ raise Exception(
+ "Absolute paths installed to FINAL_TARGET_FILES must"
+ " only be shared libraries or associated debug"
+ " information."
+ )
+ install_manifest.add_optional_exists(dest_file)
+ absolute_files.append(f.full_path)
+ else:
+ install_manifest.add_link(f.full_path, dest_file)
+ else:
+ install_manifest.add_optional_exists(dest_file)
+ objdir_files.append(self._pretty_path(f, backend_file))
+ install_location = "$(DEPTH)/%s" % mozpath.join(target, path)
+ if objdir_files:
+ tier = "export" if obj.install_target == "dist/include" else "misc"
+ # We cannot generate multilocale.txt during misc at the moment.
+ if objdir_files[0] == "multilocale.txt":
+ tier = "libs"
+ self._add_install_target(
+ backend_file, target_var, tier, install_location, objdir_files
+ )
+ if absolute_files:
+ # Unfortunately, we can't use _add_install_target because on
+ # Windows, the absolute file paths that we want to install
+ # from often have spaces. So we write our own rule.
+ self._no_skip["misc"].add(backend_file.relobjdir)
+ backend_file.write(
+ "misc::\n%s\n"
+ % "\n".join(
+ "\t$(INSTALL) %s %s"
+ % (make_quote(shell_quote(f)), install_location)
+ for f in absolute_files
+ )
+ )
+
+ def _process_final_target_pp_files(self, obj, files, backend_file, name):
+ # Bug 1177710 - We'd like to install these via manifests as
+ # preprocessed files. But they currently depend on non-standard flags
+ # being added via some Makefiles, so for now we just pass them through
+ # to the underlying Makefile.in.
+ #
+ # Note that if this becomes a manifest, OBJDIR_PP_FILES will likely
+ # still need to use PP_TARGETS internally because we can't have an
+ # install manifest for the root of the objdir.
+ for i, (path, files) in enumerate(files.walk()):
+ self._no_skip["misc"].add(backend_file.relobjdir)
+ var = "%s_%d" % (name, i)
+ for f in files:
+ backend_file.write(
+ "%s += %s\n" % (var, self._pretty_path(f, backend_file))
+ )
+ backend_file.write(
+ "%s_PATH := $(DEPTH)/%s\n"
+ % (var, mozpath.join(obj.install_target, path))
+ )
+ backend_file.write("%s_TARGET := misc\n" % var)
+ backend_file.write("PP_TARGETS += %s\n" % var)
+
+ def _write_localized_files_files(self, files, name, backend_file):
+ for f in files:
+ if not isinstance(f, ObjDirPath):
+ # The emitter asserts that all srcdir files start with `en-US/`
+ e, f = f.split("en-US/")
+ assert not e
+ if "*" in f:
+ # We can't use MERGE_FILE for wildcards because it takes
+ # only the first match internally. This is only used
+ # in one place in the tree currently so we'll hardcode
+ # that specific behavior for now.
+ backend_file.write(
+ "%s += $(wildcard $(LOCALE_SRCDIR)/%s)\n" % (name, f)
+ )
+ else:
+ backend_file.write("%s += $(call MERGE_FILE,%s)\n" % (name, f))
+ else:
+ # Objdir files are allowed from LOCALIZED_GENERATED_FILES
+ backend_file.write(
+ "%s += %s\n" % (name, self._pretty_path(f, backend_file))
+ )
+
+ def _process_localized_files(self, obj, files, backend_file):
+ target = obj.install_target
+ path = mozpath.basedir(target, ("dist/bin",))
+ if not path:
+ raise Exception("Cannot install localized files to " + target)
+ for i, (path, files) in enumerate(files.walk()):
+ name = "LOCALIZED_FILES_%d" % i
+ self._no_skip["misc"].add(backend_file.relobjdir)
+ self._write_localized_files_files(files, name + "_FILES", backend_file)
+ # Use FINAL_TARGET here because some l10n repack rules set
+ # XPI_NAME to generate langpacks.
+ backend_file.write("%s_DEST = $(FINAL_TARGET)/%s\n" % (name, path))
+ backend_file.write("%s_TARGET := misc\n" % name)
+ backend_file.write("INSTALL_TARGETS += %s\n" % name)
+
+ def _process_localized_pp_files(self, obj, files, backend_file):
+ target = obj.install_target
+ path = mozpath.basedir(target, ("dist/bin",))
+ if not path:
+ raise Exception("Cannot install localized files to " + target)
+ for i, (path, files) in enumerate(files.walk()):
+ name = "LOCALIZED_PP_FILES_%d" % i
+ self._no_skip["misc"].add(backend_file.relobjdir)
+ self._write_localized_files_files(files, name, backend_file)
+ # Use FINAL_TARGET here because some l10n repack rules set
+ # XPI_NAME to generate langpacks.
+ backend_file.write("%s_PATH = $(FINAL_TARGET)/%s\n" % (name, path))
+ backend_file.write("%s_TARGET := misc\n" % name)
+ # Localized files will have different content in different
+ # localizations, and some preprocessed files may not have
+ # any preprocessor directives.
+ backend_file.write(
+ "%s_FLAGS := --silence-missing-directive-warnings\n" % name
+ )
+ backend_file.write("PP_TARGETS += %s\n" % name)
+
+ def _process_objdir_files(self, obj, files, backend_file):
+ # We can't use an install manifest for the root of the objdir, since it
+ # would delete all the other files that get put there by the build
+ # system.
+ for i, (path, files) in enumerate(files.walk()):
+ self._no_skip["misc"].add(backend_file.relobjdir)
+ for f in files:
+ backend_file.write(
+ "OBJDIR_%d_FILES += %s\n" % (i, self._pretty_path(f, backend_file))
+ )
+ backend_file.write("OBJDIR_%d_DEST := $(topobjdir)/%s\n" % (i, path))
+ backend_file.write("OBJDIR_%d_TARGET := misc\n" % i)
+ backend_file.write("INSTALL_TARGETS += OBJDIR_%d\n" % i)
+
+ def _process_chrome_manifest_entry(self, obj, backend_file):
+ fragment = Makefile()
+ rule = fragment.create_rule(targets=["misc:"])
+
+ top_level = mozpath.join(obj.install_target, "chrome.manifest")
+ if obj.path != top_level:
+ args = [
+ mozpath.join("$(DEPTH)", top_level),
+ make_quote(
+ shell_quote(
+ "manifest %s" % mozpath.relpath(obj.path, obj.install_target)
+ )
+ ),
+ ]
+ rule.add_commands(["$(call py_action,buildlist,%s)" % " ".join(args)])
+ args = [
+ mozpath.join("$(DEPTH)", obj.path),
+ make_quote(shell_quote(str(obj.entry))),
+ ]
+ rule.add_commands(["$(call py_action,buildlist,%s)" % " ".join(args)])
+ fragment.dump(backend_file.fh, removal_guard=False)
+
+ self._no_skip["misc"].add(obj.relsrcdir)
+
+ def _write_manifests(self, dest, manifests):
+ man_dir = mozpath.join(self.environment.topobjdir, "_build_manifests", dest)
+
+ for k, manifest in manifests.items():
+ with self._write_file(mozpath.join(man_dir, k)) as fh:
+ manifest.write(fileobj=fh)
+
+ def _write_master_test_manifest(self, path, manifests):
+ with self._write_file(path) as master:
+ master.write(
+ "# THIS FILE WAS AUTOMATICALLY GENERATED. DO NOT MODIFY BY HAND.\n\n"
+ )
+
+ for manifest in sorted(manifests):
+ master.write("[include:%s]\n" % manifest)
+
+ class Substitution(object):
+ """BaseConfigSubstitution-like class for use with _create_makefile."""
+
+ __slots__ = ("input_path", "output_path", "topsrcdir", "topobjdir", "config")
+
+ def _create_makefile(self, obj, stub=False, extra=None):
+ """Creates the given makefile. Makefiles are treated the same as
+ config files, but some additional header and footer is added to the
+ output.
+
+ When the stub argument is True, no source file is used, and a stub
+ makefile with the default header and footer only is created.
+ """
+ with self._get_preprocessor(obj) as pp:
+ if extra:
+ pp.context.update(extra)
+ if not pp.context.get("autoconfmk", ""):
+ pp.context["autoconfmk"] = "autoconf.mk"
+ pp.handleLine(
+ "# THIS FILE WAS AUTOMATICALLY GENERATED. DO NOT MODIFY BY HAND.\n"
+ )
+ pp.handleLine("DEPTH := @DEPTH@\n")
+ pp.handleLine("topobjdir := @topobjdir@\n")
+ pp.handleLine("topsrcdir := @top_srcdir@\n")
+ pp.handleLine("srcdir := @srcdir@\n")
+ pp.handleLine("srcdir_rel := @srcdir_rel@\n")
+ pp.handleLine("relativesrcdir := @relativesrcdir@\n")
+ pp.handleLine("include $(DEPTH)/config/@autoconfmk@\n")
+ if not stub:
+ pp.do_include(obj.input_path)
+ # Empty line to avoid failures when last line in Makefile.in ends
+ # with a backslash.
+ pp.handleLine("\n")
+ pp.handleLine("include $(topsrcdir)/config/recurse.mk\n")
+ if not stub:
+ # Adding the Makefile.in here has the desired side-effect
+ # that if the Makefile.in disappears, this will force
+ # moz.build traversal. This means that when we remove empty
+ # Makefile.in files, the old file will get replaced with
+ # the autogenerated one automatically.
+ self.backend_input_files.add(obj.input_path)
+
+ self._makefile_out_count += 1
+
+ def _handle_linked_rust_crates(self, obj, extern_crate_file):
+ backend_file = self._get_backend_file_for(obj)
+
+ backend_file.write("RS_STATICLIB_CRATE_SRC := %s\n" % extern_crate_file)
+
+ def _handle_ipdl_sources(
+ self,
+ ipdl_dir,
+ sorted_ipdl_sources,
+ sorted_nonstatic_ipdl_sources,
+ sorted_static_ipdl_sources,
+ ):
+ # Write out a master list of all IPDL source files.
+ mk = Makefile()
+
+ sorted_nonstatic_ipdl_basenames = list()
+ for source in sorted_nonstatic_ipdl_sources:
+ basename = os.path.basename(source)
+ sorted_nonstatic_ipdl_basenames.append(basename)
+ rule = mk.create_rule([basename])
+ rule.add_dependencies([source])
+ rule.add_commands(
+ [
+ "$(RM) $@",
+ "$(call py_action,preprocessor,$(DEFINES) $(ACDEFINES) "
+ "$< -o $@)",
+ ]
+ )
+
+ mk.add_statement(
+ "ALL_IPDLSRCS := %s %s"
+ % (
+ " ".join(sorted_nonstatic_ipdl_basenames),
+ " ".join(sorted_static_ipdl_sources),
+ )
+ )
+
+ # Preprocessed ipdl files are generated in ipdl_dir.
+ mk.add_statement(
+ "IPDLDIRS := %s %s"
+ % (
+ ipdl_dir,
+ " ".join(
+ sorted(set(mozpath.dirname(p) for p in sorted_static_ipdl_sources))
+ ),
+ )
+ )
+
+ with self._write_file(mozpath.join(ipdl_dir, "ipdlsrcs.mk")) as ipdls:
+ mk.dump(ipdls, removal_guard=False)
+
+ def _handle_webidl_build(
+ self,
+ bindings_dir,
+ unified_source_mapping,
+ webidls,
+ expected_build_output_files,
+ global_define_files,
+ ):
+ include_dir = mozpath.join(self.environment.topobjdir, "dist", "include")
+ for f in expected_build_output_files:
+ if f.startswith(include_dir):
+ self._install_manifests["dist_include"].add_optional_exists(
+ mozpath.relpath(f, include_dir)
+ )
+
+ # We pass WebIDL info to make via a completely generated make file.
+ mk = Makefile()
+ mk.add_statement(
+ "nonstatic_webidl_files := %s"
+ % " ".join(sorted(webidls.all_non_static_basenames()))
+ )
+ mk.add_statement(
+ "globalgen_sources := %s" % " ".join(sorted(global_define_files))
+ )
+ mk.add_statement(
+ "test_sources := %s"
+ % " ".join(sorted("%sBinding.cpp" % s for s in webidls.all_test_stems()))
+ )
+
+ # Add rules to preprocess bindings.
+ # This should ideally be using PP_TARGETS. However, since the input
+ # filenames match the output filenames, the existing PP_TARGETS rules
+ # result in circular dependencies and other make weirdness. One
+ # solution is to rename the input or output files repsectively. See
+ # bug 928195 comment 129.
+ for source in sorted(webidls.all_preprocessed_sources()):
+ basename = os.path.basename(source)
+ rule = mk.create_rule([basename])
+ # GLOBAL_DEPS would be used here, but due to the include order of
+ # our makefiles it's not set early enough to be useful, so we use
+ # WEBIDL_PP_DEPS, which has analagous content.
+ rule.add_dependencies([source, "$(WEBIDL_PP_DEPS)"])
+ rule.add_commands(
+ [
+ # Remove the file before writing so bindings that go from
+ # static to preprocessed don't end up writing to a symlink,
+ # which would modify content in the source directory.
+ "$(RM) $@",
+ "$(call py_action,preprocessor,$(DEFINES) $(ACDEFINES) "
+ "$< -o $@)",
+ ]
+ )
+
+ self._add_unified_build_rules(
+ mk,
+ unified_source_mapping,
+ unified_files_makefile_variable="unified_binding_cpp_files",
+ )
+
+ webidls_mk = mozpath.join(bindings_dir, "webidlsrcs.mk")
+ with self._write_file(webidls_mk) as fh:
+ mk.dump(fh, removal_guard=False)
+
+ # Add the test directory to the compile graph.
+ if self.environment.substs.get("ENABLE_TESTS"):
+ self._compile_graph[
+ mozpath.join(
+ mozpath.relpath(bindings_dir, self.environment.topobjdir),
+ "test",
+ "target-objects",
+ )
+ ]
+
+ def _format_generated_file_input_name(self, path, obj):
+ if obj.localized:
+ # Localized generated files can have locale-specific inputs, which
+ # are indicated by paths starting with `en-US/` or containing
+ # `locales/en-US/`.
+ if "locales/en-US" in path:
+ # We need an "absolute source path" relative to
+ # topsrcdir, like "/source/path".
+ if not path.startswith("/"):
+ path = "/" + mozpath.relpath(path.full_path, obj.topsrcdir)
+ e, f = path.split("locales/en-US/", 1)
+ assert f
+ return "$(call MERGE_RELATIVE_FILE,{},{}locales)".format(
+ f, e if not e.startswith("/") else e[len("/") :]
+ )
+ elif path.startswith("en-US/"):
+ e, f = path.split("en-US/", 1)
+ assert not e
+ return "$(call MERGE_FILE,%s)" % f
+ return self._pretty_path(path, self._get_backend_file_for(obj))
+ else:
+ return self._pretty_path(path, self._get_backend_file_for(obj))
+
+ def _format_generated_file_output_name(self, path, obj):
+ if not isinstance(path, Path):
+ path = ObjDirPath(obj._context, "!" + path)
+ return self._pretty_path(path, self._get_backend_file_for(obj))
diff --git a/python/mozbuild/mozbuild/backend/static_analysis.py b/python/mozbuild/mozbuild/backend/static_analysis.py
new file mode 100644
index 0000000000..2b3ce96e75
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/static_analysis.py
@@ -0,0 +1,52 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This module provides a backend static-analysis, like clang-tidy and coverity.
+# The main difference between this and the default database backend is that this one
+# tracks folders that can be built in the non-unified environment and generates
+# the coresponding build commands for the files.
+
+import os
+
+import mozpack.path as mozpath
+
+from mozbuild.compilation.database import CompileDBBackend
+
+
+class StaticAnalysisBackend(CompileDBBackend):
+ def _init(self):
+ CompileDBBackend._init(self)
+ self.non_unified_build = []
+
+ # List of directories can be built outside of the unified build system.
+ with open(
+ mozpath.join(self.environment.topsrcdir, "build", "non-unified-compat")
+ ) as fh:
+ content = fh.readlines()
+ self.non_unified_build = [
+ mozpath.join(self.environment.topsrcdir, line.strip())
+ for line in content
+ ]
+
+ def _build_cmd(self, cmd, filename, unified):
+ cmd = list(cmd)
+ # Maybe the file is in non-unified environment or it resides under a directory
+ # that can also be built in non-unified environment
+ if unified is None or any(
+ filename.startswith(path) for path in self.non_unified_build
+ ):
+ cmd.append(filename)
+ else:
+ cmd.append(unified)
+
+ return cmd
+
+ def _outputfile_path(self):
+ database_path = os.path.join(self.environment.topobjdir, "static-analysis")
+
+ if not os.path.exists(database_path):
+ os.mkdir(database_path)
+
+ # Output the database (a JSON file) to objdir/static-analysis/compile_commands.json
+ return mozpath.join(database_path, "compile_commands.json")
diff --git a/python/mozbuild/mozbuild/backend/test_manifest.py b/python/mozbuild/mozbuild/backend/test_manifest.py
new file mode 100644
index 0000000000..ba1e5135f4
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/test_manifest.py
@@ -0,0 +1,110 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from collections import defaultdict
+
+import mozpack.path as mozpath
+import six
+import six.moves.cPickle as pickle
+
+from mozbuild.backend.base import PartialBackend
+from mozbuild.frontend.data import TestManifest
+
+
+class TestManifestBackend(PartialBackend):
+ """Partial backend that generates test metadata files."""
+
+ def _init(self):
+ self.tests_by_path = defaultdict(list)
+ self.installs_by_path = defaultdict(list)
+ self.deferred_installs = set()
+ self.manifest_defaults = {}
+
+ # Add config.status so performing a build will invalidate this backend.
+ self.backend_input_files.add(
+ mozpath.join(self.environment.topobjdir, "config.status")
+ )
+
+ def consume_object(self, obj):
+ if not isinstance(obj, TestManifest):
+ return
+
+ self.backend_input_files.add(obj.path)
+ self.backend_input_files |= obj.context_all_paths
+ for source in obj.source_relpaths:
+ self.backend_input_files.add(mozpath.join(obj.topsrcdir, source))
+ try:
+ from reftest import ReftestManifest
+
+ if isinstance(obj.manifest, ReftestManifest):
+ # Mark included files as part of the build backend so changes
+ # result in re-config.
+ self.backend_input_files |= obj.manifest.manifests
+ except ImportError:
+ # Ignore errors caused by the reftest module not being present.
+ # This can happen when building SpiderMonkey standalone, for example.
+ pass
+
+ for test in obj.tests:
+ self.add(test, obj.flavor, obj.topsrcdir)
+ self.add_defaults(obj.manifest)
+ self.add_installs(obj, obj.topsrcdir)
+
+ def consume_finished(self):
+ topobjdir = self.environment.topobjdir
+
+ with self._write_file(
+ mozpath.join(topobjdir, "all-tests.pkl"), readmode="rb"
+ ) as fh:
+ pickle.dump(dict(self.tests_by_path), fh, protocol=2)
+
+ with self._write_file(
+ mozpath.join(topobjdir, "test-defaults.pkl"), readmode="rb"
+ ) as fh:
+ pickle.dump(self.manifest_defaults, fh, protocol=2)
+
+ path = mozpath.join(topobjdir, "test-installs.pkl")
+ with self._write_file(path, readmode="rb") as fh:
+ pickle.dump(
+ {
+ k: v
+ for k, v in self.installs_by_path.items()
+ if k in self.deferred_installs
+ },
+ fh,
+ protocol=2,
+ )
+
+ def add(self, t, flavor, topsrcdir):
+ t = dict(t)
+ t["flavor"] = flavor
+
+ path = mozpath.normpath(t["path"])
+ manifest = mozpath.normpath(t["manifest"])
+ assert mozpath.basedir(path, [topsrcdir])
+ assert mozpath.basedir(manifest, [topsrcdir])
+
+ key = path[len(topsrcdir) + 1 :]
+ t["file_relpath"] = key
+ t["dir_relpath"] = mozpath.dirname(key)
+ t["srcdir_relpath"] = key
+ t["manifest_relpath"] = manifest[len(topsrcdir) + 1 :]
+
+ self.tests_by_path[key].append(t)
+
+ def add_defaults(self, manifest):
+ if not hasattr(manifest, "manifest_defaults"):
+ return
+ for sub_manifest, defaults in manifest.manifest_defaults.items():
+ self.manifest_defaults[sub_manifest] = defaults
+
+ def add_installs(self, obj, topsrcdir):
+ for src, (dest, _) in six.iteritems(obj.installs):
+ key = src[len(topsrcdir) + 1 :]
+ self.installs_by_path[key].append((src, dest))
+ for src, pat, dest in obj.pattern_installs:
+ key = mozpath.join(src[len(topsrcdir) + 1 :], pat)
+ self.installs_by_path[key].append((src, pat, dest))
+ for path in obj.deferred_installs:
+ self.deferred_installs.add(path[2:])
diff --git a/python/mozbuild/mozbuild/backend/visualstudio.py b/python/mozbuild/mozbuild/backend/visualstudio.py
new file mode 100644
index 0000000000..b9b30804b8
--- /dev/null
+++ b/python/mozbuild/mozbuild/backend/visualstudio.py
@@ -0,0 +1,712 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This file contains a build backend for generating Visual Studio project
+# files.
+
+import errno
+import os
+import re
+import sys
+import uuid
+from pathlib import Path
+from xml.dom import getDOMImplementation
+
+from mozpack.files import FileFinder
+
+from mozbuild.base import ExecutionSummary
+
+from ..frontend.data import (
+ Defines,
+ HostProgram,
+ HostSources,
+ Library,
+ LocalInclude,
+ Program,
+ SandboxedWasmLibrary,
+ Sources,
+ UnifiedSources,
+)
+from .common import CommonBackend
+
+MSBUILD_NAMESPACE = "http://schemas.microsoft.com/developer/msbuild/2003"
+MSNATVIS_NAMESPACE = "http://schemas.microsoft.com/vstudio/debugger/natvis/2010"
+
+
+def get_id(name):
+ if sys.version_info[0] == 2:
+ name = name.encode("utf-8")
+ return str(uuid.uuid5(uuid.NAMESPACE_URL, name)).upper()
+
+
+def visual_studio_product_to_solution_version(version):
+ if version == "2017":
+ return "12.00", "15"
+ elif version == "2019":
+ return "12.00", "16"
+ elif version == "2022":
+ return "12.00", "17"
+ else:
+ raise Exception("Unknown version seen: %s" % version)
+
+
+def visual_studio_product_to_platform_toolset_version(version):
+ if version == "2017":
+ return "v141"
+ elif version == "2019":
+ return "v142"
+ elif version == "2022":
+ return "v143"
+ else:
+ raise Exception("Unknown version seen: %s" % version)
+
+
+class VisualStudioBackend(CommonBackend):
+ """Generate Visual Studio project files.
+
+ This backend is used to produce Visual Studio projects and a solution
+ to foster developing Firefox with Visual Studio.
+
+ This backend is currently considered experimental. There are many things
+ not optimal about how it works.
+ """
+
+ def _init(self):
+ CommonBackend._init(self)
+
+ # These should eventually evolve into parameters.
+ self._out_dir = os.path.join(self.environment.topobjdir, "msvc")
+ self._projsubdir = "projects"
+
+ self._version = self.environment.substs.get("MSVS_VERSION", "2017")
+
+ self._paths_to_sources = {}
+ self._paths_to_includes = {}
+ self._paths_to_defines = {}
+ self._paths_to_configs = {}
+ self._libs_to_paths = {}
+ self._progs_to_paths = {}
+
+ def summary(self):
+ return ExecutionSummary(
+ "VisualStudio backend executed in {execution_time:.2f}s\n"
+ "Generated Visual Studio solution at {path:s}",
+ execution_time=self._execution_time,
+ path=os.path.join(self._out_dir, "mozilla.sln"),
+ )
+
+ def consume_object(self, obj):
+ reldir = getattr(obj, "relsrcdir", None)
+
+ if hasattr(obj, "config") and reldir not in self._paths_to_configs:
+ self._paths_to_configs[reldir] = obj.config
+
+ if isinstance(obj, Sources):
+ self._add_sources(reldir, obj)
+
+ elif isinstance(obj, HostSources):
+ self._add_sources(reldir, obj)
+
+ elif isinstance(obj, UnifiedSources):
+ # XXX we should be letting CommonBackend.consume_object call this
+ # for us instead.
+ self._process_unified_sources(obj)
+
+ elif isinstance(obj, Library) and not isinstance(obj, SandboxedWasmLibrary):
+ self._libs_to_paths[obj.basename] = reldir
+
+ elif isinstance(obj, Program) or isinstance(obj, HostProgram):
+ self._progs_to_paths[obj.program] = reldir
+
+ elif isinstance(obj, Defines):
+ self._paths_to_defines.setdefault(reldir, {}).update(obj.defines)
+
+ elif isinstance(obj, LocalInclude):
+ includes = self._paths_to_includes.setdefault(reldir, [])
+ includes.append(obj.path.full_path)
+
+ # Just acknowledge everything.
+ return True
+
+ def _add_sources(self, reldir, obj):
+ s = self._paths_to_sources.setdefault(reldir, set())
+ s.update(obj.files)
+
+ def _process_unified_sources(self, obj):
+ reldir = getattr(obj, "relsrcdir", None)
+
+ s = self._paths_to_sources.setdefault(reldir, set())
+ s.update(obj.files)
+
+ def consume_finished(self):
+ out_dir = self._out_dir
+ out_proj_dir = os.path.join(self._out_dir, self._projsubdir)
+
+ projects = self._write_projects_for_sources(
+ self._libs_to_paths, "library", out_proj_dir
+ )
+ projects.update(
+ self._write_projects_for_sources(
+ self._progs_to_paths, "binary", out_proj_dir
+ )
+ )
+
+ # Generate projects that can be used to build common targets.
+ for target in ("export", "binaries", "tools", "full"):
+ basename = "target_%s" % target
+ command = "$(SolutionDir)\\mach.bat build"
+ if target != "full":
+ command += " %s" % target
+
+ project_id = self._write_vs_project(
+ out_proj_dir,
+ basename,
+ target,
+ build_command=command,
+ clean_command="$(SolutionDir)\\mach.bat clobber",
+ )
+
+ projects[basename] = (project_id, basename, target)
+
+ # A project that can be used to regenerate the visual studio projects.
+ basename = "target_vs"
+ project_id = self._write_vs_project(
+ out_proj_dir,
+ basename,
+ "visual-studio",
+ build_command="$(SolutionDir)\\mach.bat build-backend -b VisualStudio",
+ )
+ projects[basename] = (project_id, basename, "visual-studio")
+
+ # Write out a shared property file with common variables.
+ props_path = os.path.join(out_proj_dir, "mozilla.props")
+ with self._write_file(props_path, readmode="rb") as fh:
+ self._write_props(fh)
+
+ # Generate some wrapper scripts that allow us to invoke mach inside
+ # a MozillaBuild-like environment. We currently only use the batch
+ # script. We'd like to use the PowerShell script. However, it seems
+ # to buffer output from within Visual Studio (surely this is
+ # configurable) and the default execution policy of PowerShell doesn't
+ # allow custom scripts to be executed.
+ with self._write_file(os.path.join(out_dir, "mach.bat"), readmode="rb") as fh:
+ self._write_mach_batch(fh)
+
+ with self._write_file(os.path.join(out_dir, "mach.ps1"), readmode="rb") as fh:
+ self._write_mach_powershell(fh)
+
+ # Write out a solution file to tie it all together.
+ solution_path = os.path.join(out_dir, "mozilla.sln")
+ with self._write_file(solution_path, readmode="rb") as fh:
+ self._write_solution(fh, projects)
+
+ def _write_projects_for_sources(self, sources, prefix, out_dir):
+ projects = {}
+ for item, path in sorted(sources.items()):
+ config = self._paths_to_configs.get(path, None)
+ sources = self._paths_to_sources.get(path, set())
+ sources = set(os.path.join("$(TopSrcDir)", path, s) for s in sources)
+ sources = set(os.path.normpath(s) for s in sources)
+
+ finder = FileFinder(os.path.join(self.environment.topsrcdir, path))
+
+ headers = [t[0] for t in finder.find("*.h")]
+ headers = [
+ os.path.normpath(os.path.join("$(TopSrcDir)", path, f)) for f in headers
+ ]
+
+ includes = [
+ os.path.join("$(TopSrcDir)", path),
+ os.path.join("$(TopObjDir)", path),
+ ]
+ includes.extend(self._paths_to_includes.get(path, []))
+ includes.append("$(TopObjDir)\\dist\\include\\nss")
+ includes.append("$(TopObjDir)\\dist\\include")
+
+ for v in (
+ "NSPR_CFLAGS",
+ "NSS_CFLAGS",
+ "MOZ_JPEG_CFLAGS",
+ "MOZ_PNG_CFLAGS",
+ "MOZ_ZLIB_CFLAGS",
+ "MOZ_PIXMAN_CFLAGS",
+ ):
+ if not config:
+ break
+
+ args = config.substs.get(v, [])
+
+ for i, arg in enumerate(args):
+ if arg.startswith("-I"):
+ includes.append(os.path.normpath(arg[2:]))
+
+ # Pull in system defaults.
+ includes.append("$(DefaultIncludes)")
+
+ includes = [os.path.normpath(i) for i in includes]
+
+ defines = []
+ for k, v in self._paths_to_defines.get(path, {}).items():
+ if v is True:
+ defines.append(k)
+ else:
+ defines.append("%s=%s" % (k, v))
+
+ debugger = None
+ if prefix == "binary":
+ if item.startswith(self.environment.substs["MOZ_APP_NAME"]):
+ app_args = "-no-remote -profile $(TopObjDir)\\tmp\\profile-default"
+ if self.environment.substs.get("MOZ_LAUNCHER_PROCESS", False):
+ app_args += " -wait-for-browser"
+ debugger = ("$(TopObjDir)\\dist\\bin\\%s" % item, app_args)
+ else:
+ debugger = ("$(TopObjDir)\\dist\\bin\\%s" % item, "")
+
+ basename = "%s_%s" % (prefix, item)
+
+ project_id = self._write_vs_project(
+ out_dir,
+ basename,
+ item,
+ includes=includes,
+ forced_includes=["$(TopObjDir)\\dist\\include\\mozilla-config.h"],
+ defines=defines,
+ headers=headers,
+ sources=sources,
+ debugger=debugger,
+ )
+
+ projects[basename] = (project_id, basename, item)
+
+ return projects
+
+ def _write_solution(self, fh, projects):
+ # Visual Studio appears to write out its current version in the
+ # solution file. Instead of trying to figure out what version it will
+ # write, try to parse the version out of the existing file and use it
+ # verbatim.
+ vs_version = None
+ try:
+ with open(fh.name, "rb") as sfh:
+ for line in sfh:
+ if line.startswith(b"VisualStudioVersion = "):
+ vs_version = line.split(b" = ", 1)[1].strip()
+ except IOError as e:
+ if e.errno != errno.ENOENT:
+ raise
+
+ format_version, comment_version = visual_studio_product_to_solution_version(
+ self._version
+ )
+ # This is a Visual C++ Project type.
+ project_type = "8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942"
+
+ # Visual Studio seems to require this header.
+ fh.write(
+ "Microsoft Visual Studio Solution File, Format Version %s\r\n"
+ % format_version
+ )
+ fh.write("# Visual Studio %s\r\n" % comment_version)
+
+ if vs_version:
+ fh.write("VisualStudioVersion = %s\r\n" % vs_version)
+
+ # Corresponds to VS2013.
+ fh.write("MinimumVisualStudioVersion = 12.0.31101.0\r\n")
+
+ binaries_id = projects["target_binaries"][0]
+
+ # Write out entries for each project.
+ for key in sorted(projects):
+ project_id, basename, name = projects[key]
+ path = os.path.join(self._projsubdir, "%s.vcxproj" % basename)
+
+ fh.write(
+ 'Project("{%s}") = "%s", "%s", "{%s}"\r\n'
+ % (project_type, name, path, project_id)
+ )
+
+ # Make all libraries depend on the binaries target.
+ if key.startswith("library_"):
+ fh.write("\tProjectSection(ProjectDependencies) = postProject\r\n")
+ fh.write("\t\t{%s} = {%s}\r\n" % (binaries_id, binaries_id))
+ fh.write("\tEndProjectSection\r\n")
+
+ fh.write("EndProject\r\n")
+
+ # Write out solution folders for organizing things.
+
+ # This is the UUID you use for solution folders.
+ container_id = "2150E333-8FDC-42A3-9474-1A3956D46DE8"
+
+ def write_container(desc):
+ cid = get_id(desc)
+ fh.write(
+ 'Project("{%s}") = "%s", "%s", "{%s}"\r\n'
+ % (container_id, desc, desc, cid)
+ )
+ fh.write("EndProject\r\n")
+
+ return cid
+
+ library_id = write_container("Libraries")
+ target_id = write_container("Build Targets")
+ binary_id = write_container("Binaries")
+
+ fh.write("Global\r\n")
+
+ # Make every project a member of our one configuration.
+ fh.write("\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\r\n")
+ fh.write("\t\tBuild|Win32 = Build|Win32\r\n")
+ fh.write("\tEndGlobalSection\r\n")
+
+ # Set every project's active configuration to the one configuration and
+ # set up the default build project.
+ fh.write("\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\r\n")
+ for name, project in sorted(projects.items()):
+ fh.write("\t\t{%s}.Build|Win32.ActiveCfg = Build|Win32\r\n" % project[0])
+
+ # Only build the full build target by default.
+ # It's important we don't write multiple entries here because they
+ # conflict!
+ if name == "target_full":
+ fh.write("\t\t{%s}.Build|Win32.Build.0 = Build|Win32\r\n" % project[0])
+
+ fh.write("\tEndGlobalSection\r\n")
+
+ fh.write("\tGlobalSection(SolutionProperties) = preSolution\r\n")
+ fh.write("\t\tHideSolutionNode = FALSE\r\n")
+ fh.write("\tEndGlobalSection\r\n")
+
+ # Associate projects with containers.
+ fh.write("\tGlobalSection(NestedProjects) = preSolution\r\n")
+ for key in sorted(projects):
+ project_id = projects[key][0]
+
+ if key.startswith("library_"):
+ container_id = library_id
+ elif key.startswith("target_"):
+ container_id = target_id
+ elif key.startswith("binary_"):
+ container_id = binary_id
+ else:
+ raise Exception("Unknown project type: %s" % key)
+
+ fh.write("\t\t{%s} = {%s}\r\n" % (project_id, container_id))
+ fh.write("\tEndGlobalSection\r\n")
+
+ fh.write("EndGlobal\r\n")
+
+ def _write_props(self, fh):
+ impl = getDOMImplementation()
+ doc = impl.createDocument(MSBUILD_NAMESPACE, "Project", None)
+
+ project = doc.documentElement
+ project.setAttribute("xmlns", MSBUILD_NAMESPACE)
+ project.setAttribute("ToolsVersion", "4.0")
+
+ ig = project.appendChild(doc.createElement("ImportGroup"))
+ ig.setAttribute("Label", "PropertySheets")
+
+ pg = project.appendChild(doc.createElement("PropertyGroup"))
+ pg.setAttribute("Label", "UserMacros")
+
+ ig = project.appendChild(doc.createElement("ItemGroup"))
+
+ def add_var(k, v):
+ e = pg.appendChild(doc.createElement(k))
+ e.appendChild(doc.createTextNode(v))
+
+ e = ig.appendChild(doc.createElement("BuildMacro"))
+ e.setAttribute("Include", k)
+
+ e = e.appendChild(doc.createElement("Value"))
+ e.appendChild(doc.createTextNode("$(%s)" % k))
+
+ natvis = ig.appendChild(doc.createElement("Natvis"))
+ natvis.setAttribute("Include", "../../../toolkit/library/gecko.natvis")
+
+ add_var("TopObjDir", os.path.normpath(self.environment.topobjdir))
+ add_var("TopSrcDir", os.path.normpath(self.environment.topsrcdir))
+ add_var("PYTHON", "$(TopObjDir)\\_virtualenv\\Scripts\\python.exe")
+ add_var("MACH", "$(TopSrcDir)\\mach")
+
+ # From MozillaBuild.
+ add_var("DefaultIncludes", os.environ.get("INCLUDE", ""))
+
+ fh.write(b"\xef\xbb\xbf")
+ doc.writexml(fh, addindent=" ", newl="\r\n")
+
+ def _create_natvis_type(
+ self, doc, visualizer, name, displayString, stringView=None
+ ):
+
+ t = visualizer.appendChild(doc.createElement("Type"))
+ t.setAttribute("Name", name)
+
+ ds = t.appendChild(doc.createElement("DisplayString"))
+ ds.appendChild(doc.createTextNode(displayString))
+
+ if stringView is not None:
+ sv = t.appendChild(doc.createElement("DisplayString"))
+ sv.appendChild(doc.createTextNode(stringView))
+
+ def _create_natvis_simple_string_type(self, doc, visualizer, name):
+ self._create_natvis_type(
+ doc, visualizer, name + "<char16_t>", "{mData,su}", "mData,su"
+ )
+ self._create_natvis_type(
+ doc, visualizer, name + "<char>", "{mData,s}", "mData,s"
+ )
+
+ def _create_natvis_string_tuple_type(self, doc, visualizer, chartype, formatstring):
+ t = visualizer.appendChild(doc.createElement("Type"))
+ t.setAttribute("Name", "nsTSubstringTuple<" + chartype + ">")
+
+ ds1 = t.appendChild(doc.createElement("DisplayString"))
+ ds1.setAttribute("Condition", "mHead != nullptr")
+ ds1.appendChild(
+ doc.createTextNode("{mHead,na} {mFragB->mData," + formatstring + "}")
+ )
+
+ ds2 = t.appendChild(doc.createElement("DisplayString"))
+ ds2.setAttribute("Condition", "mHead == nullptr")
+ ds2.appendChild(
+ doc.createTextNode(
+ "{mFragA->mData,"
+ + formatstring
+ + "} {mFragB->mData,"
+ + formatstring
+ + "}"
+ )
+ )
+
+ def _relevant_environment_variables(self):
+ # Write out the environment variables, presumably coming from
+ # MozillaBuild.
+ for k, v in sorted(os.environ.items()):
+ if not re.match("^[a-zA-Z0-9_]+$", k):
+ continue
+
+ if k in ("OLDPWD", "PS1"):
+ continue
+
+ if k.startswith("_"):
+ continue
+
+ yield k, v
+
+ yield "TOPSRCDIR", self.environment.topsrcdir
+ yield "TOPOBJDIR", self.environment.topobjdir
+
+ def _write_mach_powershell(self, fh):
+ for k, v in self._relevant_environment_variables():
+ fh.write(b'$env:%s = "%s"\r\n' % (k.encode("utf-8"), v.encode("utf-8")))
+
+ relpath = os.path.relpath(
+ self.environment.topsrcdir, self.environment.topobjdir
+ ).replace("\\", "/")
+
+ fh.write(
+ b'$bashargs = "%s/mach", "--log-no-times"\r\n' % relpath.encode("utf-8")
+ )
+ fh.write(b"$bashargs = $bashargs + $args\r\n")
+
+ fh.write(b"$expanded = $bashargs -join ' '\r\n")
+ fh.write(b'$procargs = "-c", $expanded\r\n')
+
+ if (Path(os.environ["MOZILLABUILD"]) / "msys2").exists():
+ bash_path = rb"msys2\usr\bin\bash"
+ else:
+ bash_path = rb"msys\bin\bash"
+
+ fh.write(
+ b"Start-Process -WorkingDirectory $env:TOPOBJDIR "
+ b"-FilePath $env:MOZILLABUILD\\%b "
+ b"-ArgumentList $procargs "
+ b"-Wait -NoNewWindow\r\n" % bash_path
+ )
+
+ def _write_mach_batch(self, fh):
+ """Write out a batch script that builds the tree.
+
+ The script "bootstraps" into the MozillaBuild environment by setting
+ the environment variables that are active in the current MozillaBuild
+ environment. Then, it builds the tree.
+ """
+ for k, v in self._relevant_environment_variables():
+ fh.write(b'SET "%s=%s"\r\n' % (k.encode("utf-8"), v.encode("utf-8")))
+
+ fh.write(b"cd %TOPOBJDIR%\r\n")
+
+ # We need to convert Windows-native paths to msys paths. Easiest way is
+ # relative paths, since munging c:\ to /c/ is slightly more
+ # complicated.
+ relpath = os.path.relpath(
+ self.environment.topsrcdir, self.environment.topobjdir
+ ).replace("\\", "/")
+
+ if (Path(os.environ["MOZILLABUILD"]) / "msys2").exists():
+ bash_path = rb"msys2\usr\bin\bash"
+ else:
+ bash_path = rb"msys\bin\bash"
+
+ # We go through mach because it has the logic for choosing the most
+ # appropriate build tool.
+ fh.write(
+ b'"%%MOZILLABUILD%%\\%b" '
+ b'-c "%s/mach --log-no-times %%1 %%2 %%3 %%4 %%5 %%6 %%7"'
+ % (bash_path, relpath.encode("utf-8"))
+ )
+
+ def _write_vs_project(self, out_dir, basename, name, **kwargs):
+ root = "%s.vcxproj" % basename
+ project_id = get_id(basename)
+
+ with self._write_file(os.path.join(out_dir, root), readmode="rb") as fh:
+ project_id, name = VisualStudioBackend.write_vs_project(
+ fh, self._version, project_id, name, **kwargs
+ )
+
+ with self._write_file(
+ os.path.join(out_dir, "%s.user" % root), readmode="rb"
+ ) as fh:
+ fh.write('<?xml version="1.0" encoding="utf-8"?>\r\n')
+ fh.write('<Project ToolsVersion="4.0" xmlns="%s">\r\n' % MSBUILD_NAMESPACE)
+ fh.write("</Project>\r\n")
+
+ return project_id
+
+ @staticmethod
+ def write_vs_project(
+ fh,
+ version,
+ project_id,
+ name,
+ includes=[],
+ forced_includes=[],
+ defines=[],
+ build_command=None,
+ clean_command=None,
+ debugger=None,
+ headers=[],
+ sources=[],
+ ):
+
+ impl = getDOMImplementation()
+ doc = impl.createDocument(MSBUILD_NAMESPACE, "Project", None)
+
+ project = doc.documentElement
+ project.setAttribute("DefaultTargets", "Build")
+ project.setAttribute("ToolsVersion", "4.0")
+ project.setAttribute("xmlns", MSBUILD_NAMESPACE)
+
+ ig = project.appendChild(doc.createElement("ItemGroup"))
+ ig.setAttribute("Label", "ProjectConfigurations")
+
+ pc = ig.appendChild(doc.createElement("ProjectConfiguration"))
+ pc.setAttribute("Include", "Build|Win32")
+
+ c = pc.appendChild(doc.createElement("Configuration"))
+ c.appendChild(doc.createTextNode("Build"))
+
+ p = pc.appendChild(doc.createElement("Platform"))
+ p.appendChild(doc.createTextNode("Win32"))
+
+ pg = project.appendChild(doc.createElement("PropertyGroup"))
+ pg.setAttribute("Label", "Globals")
+
+ n = pg.appendChild(doc.createElement("ProjectName"))
+ n.appendChild(doc.createTextNode(name))
+
+ k = pg.appendChild(doc.createElement("Keyword"))
+ k.appendChild(doc.createTextNode("MakeFileProj"))
+
+ g = pg.appendChild(doc.createElement("ProjectGuid"))
+ g.appendChild(doc.createTextNode("{%s}" % project_id))
+
+ rn = pg.appendChild(doc.createElement("RootNamespace"))
+ rn.appendChild(doc.createTextNode("mozilla"))
+
+ pts = pg.appendChild(doc.createElement("PlatformToolset"))
+ pts.appendChild(
+ doc.createTextNode(
+ visual_studio_product_to_platform_toolset_version(version)
+ )
+ )
+
+ i = project.appendChild(doc.createElement("Import"))
+ i.setAttribute("Project", "$(VCTargetsPath)\\Microsoft.Cpp.Default.props")
+
+ ig = project.appendChild(doc.createElement("ImportGroup"))
+ ig.setAttribute("Label", "ExtensionTargets")
+
+ ig = project.appendChild(doc.createElement("ImportGroup"))
+ ig.setAttribute("Label", "ExtensionSettings")
+
+ ig = project.appendChild(doc.createElement("ImportGroup"))
+ ig.setAttribute("Label", "PropertySheets")
+ i = ig.appendChild(doc.createElement("Import"))
+ i.setAttribute("Project", "mozilla.props")
+
+ pg = project.appendChild(doc.createElement("PropertyGroup"))
+ pg.setAttribute("Label", "Configuration")
+ ct = pg.appendChild(doc.createElement("ConfigurationType"))
+ ct.appendChild(doc.createTextNode("Makefile"))
+
+ pg = project.appendChild(doc.createElement("PropertyGroup"))
+ pg.setAttribute("Condition", "'$(Configuration)|$(Platform)'=='Build|Win32'")
+
+ if build_command:
+ n = pg.appendChild(doc.createElement("NMakeBuildCommandLine"))
+ n.appendChild(doc.createTextNode(build_command))
+
+ if clean_command:
+ n = pg.appendChild(doc.createElement("NMakeCleanCommandLine"))
+ n.appendChild(doc.createTextNode(clean_command))
+
+ if includes:
+ n = pg.appendChild(doc.createElement("NMakeIncludeSearchPath"))
+ n.appendChild(doc.createTextNode(";".join(includes)))
+
+ if forced_includes:
+ n = pg.appendChild(doc.createElement("NMakeForcedIncludes"))
+ n.appendChild(doc.createTextNode(";".join(forced_includes)))
+
+ if defines:
+ n = pg.appendChild(doc.createElement("NMakePreprocessorDefinitions"))
+ n.appendChild(doc.createTextNode(";".join(defines)))
+
+ if debugger:
+ n = pg.appendChild(doc.createElement("LocalDebuggerCommand"))
+ n.appendChild(doc.createTextNode(debugger[0]))
+
+ n = pg.appendChild(doc.createElement("LocalDebuggerCommandArguments"))
+ n.appendChild(doc.createTextNode(debugger[1]))
+
+ # Sets IntelliSense to use c++17 Language Standard
+ n = pg.appendChild(doc.createElement("AdditionalOptions"))
+ n.appendChild(doc.createTextNode("/std:c++17"))
+
+ i = project.appendChild(doc.createElement("Import"))
+ i.setAttribute("Project", "$(VCTargetsPath)\\Microsoft.Cpp.props")
+
+ i = project.appendChild(doc.createElement("Import"))
+ i.setAttribute("Project", "$(VCTargetsPath)\\Microsoft.Cpp.targets")
+
+ # Now add files to the project.
+ ig = project.appendChild(doc.createElement("ItemGroup"))
+ for header in sorted(headers or []):
+ n = ig.appendChild(doc.createElement("ClInclude"))
+ n.setAttribute("Include", header)
+
+ ig = project.appendChild(doc.createElement("ItemGroup"))
+ for source in sorted(sources or []):
+ n = ig.appendChild(doc.createElement("ClCompile"))
+ n.setAttribute("Include", source)
+
+ fh.write(b"\xef\xbb\xbf")
+ doc.writexml(fh, addindent=" ", newl="\r\n")
+
+ return project_id, name
diff --git a/python/mozbuild/mozbuild/base.py b/python/mozbuild/mozbuild/base.py
new file mode 100644
index 0000000000..9822a9b76e
--- /dev/null
+++ b/python/mozbuild/mozbuild/base.py
@@ -0,0 +1,1110 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import errno
+import io
+import json
+import logging
+import multiprocessing
+import os
+import subprocess
+import sys
+from pathlib import Path
+
+import mozpack.path as mozpath
+import six
+from mach.mixin.process import ProcessExecutionMixin
+from mozboot.mozconfig import MozconfigFindException
+from mozfile import which
+from mozversioncontrol import (
+ GitRepository,
+ HgRepository,
+ InvalidRepoPath,
+ MissingConfigureInfo,
+ MissingVCSTool,
+ get_repository_from_build_config,
+ get_repository_object,
+)
+
+from .backend.configenvironment import ConfigEnvironment, ConfigStatusFailure
+from .configure import ConfigureSandbox
+from .controller.clobber import Clobberer
+from .mozconfig import MozconfigLoader, MozconfigLoadException
+from .util import memoize, memoized_property
+
+try:
+ import psutil
+except Exception:
+ psutil = None
+
+
+class BadEnvironmentException(Exception):
+ """Base class for errors raised when the build environment is not sane."""
+
+
+class BuildEnvironmentNotFoundException(BadEnvironmentException, AttributeError):
+ """Raised when we could not find a build environment."""
+
+
+class ObjdirMismatchException(BadEnvironmentException):
+ """Raised when the current dir is an objdir and doesn't match the mozconfig."""
+
+ def __init__(self, objdir1, objdir2):
+ self.objdir1 = objdir1
+ self.objdir2 = objdir2
+
+ def __str__(self):
+ return "Objdir mismatch: %s != %s" % (self.objdir1, self.objdir2)
+
+
+class BinaryNotFoundException(Exception):
+ """Raised when the binary is not found in the expected location."""
+
+ def __init__(self, path):
+ self.path = path
+
+ def __str__(self):
+ return "Binary expected at {} does not exist.".format(self.path)
+
+ def help(self):
+ return "It looks like your program isn't built. You can run |./mach build| to build it."
+
+
+class MozbuildObject(ProcessExecutionMixin):
+ """Base class providing basic functionality useful to many modules.
+
+ Modules in this package typically require common functionality such as
+ accessing the current config, getting the location of the source directory,
+ running processes, etc. This classes provides that functionality. Other
+ modules can inherit from this class to obtain this functionality easily.
+ """
+
+ def __init__(
+ self,
+ topsrcdir,
+ settings,
+ log_manager,
+ topobjdir=None,
+ mozconfig=MozconfigLoader.AUTODETECT,
+ virtualenv_name=None,
+ ):
+ """Create a new Mozbuild object instance.
+
+ Instances are bound to a source directory, a ConfigSettings instance,
+ and a LogManager instance. The topobjdir may be passed in as well. If
+ it isn't, it will be calculated from the active mozconfig.
+ """
+ self.topsrcdir = mozpath.realpath(topsrcdir)
+ self.settings = settings
+
+ self.populate_logger()
+ self.log_manager = log_manager
+
+ self._make = None
+ self._topobjdir = mozpath.realpath(topobjdir) if topobjdir else topobjdir
+ self._mozconfig = mozconfig
+ self._config_environment = None
+ self._virtualenv_name = virtualenv_name or "common"
+ self._virtualenv_manager = None
+
+ @classmethod
+ def from_environment(cls, cwd=None, detect_virtualenv_mozinfo=True, **kwargs):
+ """Create a MozbuildObject by detecting the proper one from the env.
+
+ This examines environment state like the current working directory and
+ creates a MozbuildObject from the found source directory, mozconfig, etc.
+
+ The role of this function is to identify a topsrcdir, topobjdir, and
+ mozconfig file.
+
+ If the current working directory is inside a known objdir, we always
+ use the topsrcdir and mozconfig associated with that objdir.
+
+ If the current working directory is inside a known srcdir, we use that
+ topsrcdir and look for mozconfigs using the default mechanism, which
+ looks inside environment variables.
+
+ If the current Python interpreter is running from a virtualenv inside
+ an objdir, we use that as our objdir.
+
+ If we're not inside a srcdir or objdir, an exception is raised.
+
+ detect_virtualenv_mozinfo determines whether we should look for a
+ mozinfo.json file relative to the virtualenv directory. This was
+ added to facilitate testing. Callers likely shouldn't change the
+ default.
+ """
+
+ cwd = os.path.realpath(cwd or os.getcwd())
+ topsrcdir = None
+ topobjdir = None
+ mozconfig = MozconfigLoader.AUTODETECT
+
+ def load_mozinfo(path):
+ info = json.load(io.open(path, "rt", encoding="utf-8"))
+ topsrcdir = info.get("topsrcdir")
+ topobjdir = os.path.dirname(path)
+ mozconfig = info.get("mozconfig")
+ return topsrcdir, topobjdir, mozconfig
+
+ for dir_path in [str(path) for path in [cwd] + list(Path(cwd).parents)]:
+ # If we find a mozinfo.json, we are in the objdir.
+ mozinfo_path = os.path.join(dir_path, "mozinfo.json")
+ if os.path.isfile(mozinfo_path):
+ topsrcdir, topobjdir, mozconfig = load_mozinfo(mozinfo_path)
+ break
+
+ if not topsrcdir:
+ # See if we're running from a Python virtualenv that's inside an objdir.
+ # sys.prefix would look like "$objdir/_virtualenvs/$virtualenv/".
+ # Note that virtualenv-based objdir detection work for instrumented builds,
+ # because they aren't created in the scoped "instrumentated" objdir.
+ # However, working-directory-ancestor-based objdir resolution should fully
+ # cover that case.
+ mozinfo_path = os.path.join(sys.prefix, "..", "..", "mozinfo.json")
+ if detect_virtualenv_mozinfo and os.path.isfile(mozinfo_path):
+ topsrcdir, topobjdir, mozconfig = load_mozinfo(mozinfo_path)
+
+ if not topsrcdir:
+ topsrcdir = str(Path(__file__).parent.parent.parent.parent.resolve())
+
+ topsrcdir = mozpath.realpath(topsrcdir)
+ if topobjdir:
+ topobjdir = mozpath.realpath(topobjdir)
+
+ if topsrcdir == topobjdir:
+ raise BadEnvironmentException(
+ "The object directory appears "
+ "to be the same as your source directory (%s). This build "
+ "configuration is not supported." % topsrcdir
+ )
+
+ # If we can't resolve topobjdir, oh well. We'll figure out when we need
+ # one.
+ return cls(
+ topsrcdir, None, None, topobjdir=topobjdir, mozconfig=mozconfig, **kwargs
+ )
+
+ def resolve_mozconfig_topobjdir(self, default=None):
+ topobjdir = self.mozconfig.get("topobjdir") or default
+ if not topobjdir:
+ return None
+
+ if "@CONFIG_GUESS@" in topobjdir:
+ topobjdir = topobjdir.replace("@CONFIG_GUESS@", self.resolve_config_guess())
+
+ if not os.path.isabs(topobjdir):
+ topobjdir = os.path.abspath(os.path.join(self.topsrcdir, topobjdir))
+
+ return mozpath.normsep(os.path.normpath(topobjdir))
+
+ def build_out_of_date(self, output, dep_file):
+ if not os.path.isfile(output):
+ print(" Output reference file not found: %s" % output)
+ return True
+ if not os.path.isfile(dep_file):
+ print(" Dependency file not found: %s" % dep_file)
+ return True
+
+ deps = []
+ with io.open(dep_file, "r", encoding="utf-8", newline="\n") as fh:
+ deps = fh.read().splitlines()
+
+ mtime = os.path.getmtime(output)
+ for f in deps:
+ try:
+ dep_mtime = os.path.getmtime(f)
+ except OSError as e:
+ if e.errno == errno.ENOENT:
+ print(" Input not found: %s" % f)
+ return True
+ raise
+ if dep_mtime > mtime:
+ print(" %s is out of date with respect to %s" % (output, f))
+ return True
+ return False
+
+ def backend_out_of_date(self, backend_file):
+ if not os.path.isfile(backend_file):
+ return True
+
+ # Check if any of our output files have been removed since
+ # we last built the backend, re-generate the backend if
+ # so.
+ outputs = []
+ with io.open(backend_file, "r", encoding="utf-8", newline="\n") as fh:
+ outputs = fh.read().splitlines()
+ for output in outputs:
+ if not os.path.isfile(mozpath.join(self.topobjdir, output)):
+ return True
+
+ dep_file = "%s.in" % backend_file
+ return self.build_out_of_date(backend_file, dep_file)
+
+ @property
+ def topobjdir(self):
+ if self._topobjdir is None:
+ self._topobjdir = self.resolve_mozconfig_topobjdir(
+ default="obj-@CONFIG_GUESS@"
+ )
+
+ return self._topobjdir
+
+ @property
+ def virtualenv_manager(self):
+ from mach.site import CommandSiteManager
+ from mozboot.util import get_state_dir
+
+ if self._virtualenv_manager is None:
+ self._virtualenv_manager = CommandSiteManager.from_environment(
+ self.topsrcdir,
+ lambda: get_state_dir(
+ specific_to_topsrcdir=True, topsrcdir=self.topsrcdir
+ ),
+ self._virtualenv_name,
+ os.path.join(self.topobjdir, "_virtualenvs"),
+ )
+
+ return self._virtualenv_manager
+
+ @staticmethod
+ @memoize
+ def get_base_mozconfig_info(topsrcdir, path, env_mozconfig):
+ # env_mozconfig is only useful for unittests, which change the value of
+ # the environment variable, which has an impact on autodetection (when
+ # path is MozconfigLoader.AUTODETECT), and memoization wouldn't account
+ # for it without the explicit (unused) argument.
+ out = six.StringIO()
+ env = os.environ
+ if path and path != MozconfigLoader.AUTODETECT:
+ env = dict(env)
+ env["MOZCONFIG"] = path
+
+ # We use python configure to get mozconfig content and the value for
+ # --target (from mozconfig if necessary, guessed otherwise).
+
+ # Modified configure sandbox that replaces '--help' dependencies with
+ # `always`, such that depends functions with a '--help' dependency are
+ # not automatically executed when including files. We don't want all of
+ # those from init.configure to execute, only a subset.
+ class ReducedConfigureSandbox(ConfigureSandbox):
+ def depends_impl(self, *args, **kwargs):
+ args = tuple(
+ a
+ if not isinstance(a, six.string_types) or a != "--help"
+ else self._always.sandboxed
+ for a in args
+ )
+ return super(ReducedConfigureSandbox, self).depends_impl(
+ *args, **kwargs
+ )
+
+ # This may be called recursively from configure itself for $reasons,
+ # so avoid logging to the same logger (configure uses "moz.configure")
+ logger = logging.getLogger("moz.configure.reduced")
+ handler = logging.StreamHandler(out)
+ logger.addHandler(handler)
+ # If this were true, logging would still propagate to "moz.configure".
+ logger.propagate = False
+ sandbox = ReducedConfigureSandbox(
+ {},
+ environ=env,
+ argv=["mach"],
+ logger=logger,
+ )
+ base_dir = os.path.join(topsrcdir, "build", "moz.configure")
+ try:
+ sandbox.include_file(os.path.join(base_dir, "init.configure"))
+ # Force mozconfig options injection before getting the target.
+ sandbox._value_for(sandbox["mozconfig_options"])
+ return {
+ "mozconfig": sandbox._value_for(sandbox["mozconfig"]),
+ "target": sandbox._value_for(sandbox["real_target"]),
+ "project": sandbox._value_for(sandbox._options["project"]),
+ "artifact-builds": sandbox._value_for(
+ sandbox._options["artifact-builds"]
+ ),
+ }
+ except SystemExit:
+ print(out.getvalue())
+ raise
+
+ @property
+ def base_mozconfig_info(self):
+ return self.get_base_mozconfig_info(
+ self.topsrcdir, self._mozconfig, os.environ.get("MOZCONFIG")
+ )
+
+ @property
+ def mozconfig(self):
+ """Returns information about the current mozconfig file.
+
+ This a dict as returned by MozconfigLoader.read_mozconfig()
+ """
+ return self.base_mozconfig_info["mozconfig"]
+
+ @property
+ def config_environment(self):
+ """Returns the ConfigEnvironment for the current build configuration.
+
+ This property is only available once configure has executed.
+
+ If configure's output is not available, this will raise.
+ """
+ if self._config_environment:
+ return self._config_environment
+
+ config_status = os.path.join(self.topobjdir, "config.status")
+
+ if not os.path.exists(config_status) or not os.path.getsize(config_status):
+ raise BuildEnvironmentNotFoundException(
+ "config.status not available. Run configure."
+ )
+
+ try:
+ self._config_environment = ConfigEnvironment.from_config_status(
+ config_status
+ )
+ except ConfigStatusFailure as e:
+ six.raise_from(
+ BuildEnvironmentNotFoundException(
+ "config.status is outdated or broken. Run configure."
+ ),
+ e,
+ )
+
+ return self._config_environment
+
+ @property
+ def defines(self):
+ return self.config_environment.defines
+
+ @property
+ def substs(self):
+ return self.config_environment.substs
+
+ @property
+ def distdir(self):
+ return os.path.join(self.topobjdir, "dist")
+
+ @property
+ def bindir(self):
+ return os.path.join(self.topobjdir, "dist", "bin")
+
+ @property
+ def includedir(self):
+ return os.path.join(self.topobjdir, "dist", "include")
+
+ @property
+ def statedir(self):
+ return os.path.join(self.topobjdir, ".mozbuild")
+
+ @property
+ def platform(self):
+ """Returns current platform and architecture name"""
+ import mozinfo
+
+ platform_name = None
+ bits = str(mozinfo.info["bits"])
+ if mozinfo.isLinux:
+ platform_name = "linux" + bits
+ elif mozinfo.isWin:
+ platform_name = "win" + bits
+ elif mozinfo.isMac:
+ platform_name = "macosx" + bits
+
+ return platform_name, bits + "bit"
+
+ @memoized_property
+ def repository(self):
+ """Get a `mozversioncontrol.Repository` object for the
+ top source directory."""
+ # We try to obtain a repo using the configured VCS info first.
+ # If we don't have a configure context, fall back to auto-detection.
+ try:
+ return get_repository_from_build_config(self)
+ except (
+ BuildEnvironmentNotFoundException,
+ MissingConfigureInfo,
+ MissingVCSTool,
+ ):
+ pass
+
+ return get_repository_object(self.topsrcdir)
+
+ def reload_config_environment(self):
+ """Force config.status to be re-read and return the new value
+ of ``self.config_environment``.
+ """
+ self._config_environment = None
+ return self.config_environment
+
+ def mozbuild_reader(
+ self, config_mode="build", vcs_revision=None, vcs_check_clean=True
+ ):
+ """Obtain a ``BuildReader`` for evaluating moz.build files.
+
+ Given arguments, returns a ``mozbuild.frontend.reader.BuildReader``
+ that can be used to evaluate moz.build files for this repo.
+
+ ``config_mode`` is either ``build`` or ``empty``. If ``build``,
+ ``self.config_environment`` is used. This requires a configured build
+ system to work. If ``empty``, an empty config is used. ``empty`` is
+ appropriate for file-based traversal mode where ``Files`` metadata is
+ read.
+
+ If ``vcs_revision`` is defined, it specifies a version control revision
+ to use to obtain files content. The default is to use the filesystem.
+ This mode is only supported with Mercurial repositories.
+
+ If ``vcs_revision`` is not defined and the version control checkout is
+ sparse, this implies ``vcs_revision='.'``.
+
+ If ``vcs_revision`` is ``.`` (denotes the parent of the working
+ directory), we will verify that the working directory is clean unless
+ ``vcs_check_clean`` is False. This prevents confusion due to uncommitted
+ file changes not being reflected in the reader.
+ """
+ from mozpack.files import MercurialRevisionFinder
+
+ from mozbuild.frontend.reader import BuildReader, EmptyConfig, default_finder
+
+ if config_mode == "build":
+ config = self.config_environment
+ elif config_mode == "empty":
+ config = EmptyConfig(self.topsrcdir)
+ else:
+ raise ValueError("unknown config_mode value: %s" % config_mode)
+
+ try:
+ repo = self.repository
+ except InvalidRepoPath:
+ repo = None
+
+ if (
+ repo
+ and repo != "SOURCE"
+ and not vcs_revision
+ and repo.sparse_checkout_present()
+ ):
+ vcs_revision = "."
+
+ if vcs_revision is None:
+ finder = default_finder
+ else:
+ # If we failed to detect the repo prior, check again to raise its
+ # exception.
+ if not repo:
+ self.repository
+ assert False
+
+ if repo.name != "hg":
+ raise Exception("do not support VCS reading mode for %s" % repo.name)
+
+ if vcs_revision == "." and vcs_check_clean:
+ with repo:
+ if not repo.working_directory_clean():
+ raise Exception(
+ "working directory is not clean; "
+ "refusing to use a VCS-based finder"
+ )
+
+ finder = MercurialRevisionFinder(
+ self.topsrcdir, rev=vcs_revision, recognize_repo_paths=True
+ )
+
+ return BuildReader(config, finder=finder)
+
+ def is_clobber_needed(self):
+ if not os.path.exists(self.topobjdir):
+ return False
+ return Clobberer(self.topsrcdir, self.topobjdir).clobber_needed()
+
+ def get_binary_path(self, what="app", validate_exists=True, where="default"):
+ """Obtain the path to a compiled binary for this build configuration.
+
+ The what argument is the program or tool being sought after. See the
+ code implementation for supported values.
+
+ If validate_exists is True (the default), we will ensure the found path
+ exists before returning, raising an exception if it doesn't.
+
+ If where is 'staged-package', we will return the path to the binary in
+ the package staging directory.
+
+ If no arguments are specified, we will return the main binary for the
+ configured XUL application.
+ """
+
+ if where not in ("default", "staged-package"):
+ raise Exception("Don't know location %s" % where)
+
+ substs = self.substs
+
+ stem = self.distdir
+ if where == "staged-package":
+ stem = os.path.join(stem, substs["MOZ_APP_NAME"])
+
+ if substs["OS_ARCH"] == "Darwin" and "MOZ_MACBUNDLE_NAME" in substs:
+ stem = os.path.join(stem, substs["MOZ_MACBUNDLE_NAME"], "Contents", "MacOS")
+ elif where == "default":
+ stem = os.path.join(stem, "bin")
+
+ leaf = None
+
+ leaf = (substs["MOZ_APP_NAME"] if what == "app" else what) + substs[
+ "BIN_SUFFIX"
+ ]
+ path = os.path.join(stem, leaf)
+
+ if validate_exists and not os.path.exists(path):
+ raise BinaryNotFoundException(path)
+
+ return path
+
+ def resolve_config_guess(self):
+ return self.base_mozconfig_info["target"].alias
+
+ def notify(self, msg):
+ """Show a desktop notification with the supplied message
+
+ On Linux and Mac, this will show a desktop notification with the message,
+ but on Windows we can only flash the screen.
+ """
+ if "MOZ_NOSPAM" in os.environ or "MOZ_AUTOMATION" in os.environ:
+ return
+
+ try:
+ if sys.platform.startswith("darwin"):
+ notifier = which("terminal-notifier")
+ if not notifier:
+ raise Exception(
+ "Install terminal-notifier to get "
+ "a notification when the build finishes."
+ )
+ self.run_process(
+ [
+ notifier,
+ "-title",
+ "Mozilla Build System",
+ "-group",
+ "mozbuild",
+ "-message",
+ msg,
+ ],
+ ensure_exit_code=False,
+ )
+ elif sys.platform.startswith("win"):
+ from ctypes import POINTER, WINFUNCTYPE, Structure, sizeof, windll
+ from ctypes.wintypes import BOOL, DWORD, HANDLE, UINT
+
+ class FLASHWINDOW(Structure):
+ _fields_ = [
+ ("cbSize", UINT),
+ ("hwnd", HANDLE),
+ ("dwFlags", DWORD),
+ ("uCount", UINT),
+ ("dwTimeout", DWORD),
+ ]
+
+ FlashWindowExProto = WINFUNCTYPE(BOOL, POINTER(FLASHWINDOW))
+ FlashWindowEx = FlashWindowExProto(("FlashWindowEx", windll.user32))
+ FLASHW_CAPTION = 0x01
+ FLASHW_TRAY = 0x02
+ FLASHW_TIMERNOFG = 0x0C
+
+ # GetConsoleWindows returns NULL if no console is attached. We
+ # can't flash nothing.
+ console = windll.kernel32.GetConsoleWindow()
+ if not console:
+ return
+
+ params = FLASHWINDOW(
+ sizeof(FLASHWINDOW),
+ console,
+ FLASHW_CAPTION | FLASHW_TRAY | FLASHW_TIMERNOFG,
+ 3,
+ 0,
+ )
+ FlashWindowEx(params)
+ else:
+ notifier = which("notify-send")
+ if not notifier:
+ raise Exception(
+ "Install notify-send (usually part of "
+ "the libnotify package) to get a notification when "
+ "the build finishes."
+ )
+ self.run_process(
+ [
+ notifier,
+ "--app-name=Mozilla Build System",
+ "Mozilla Build System",
+ msg,
+ ],
+ ensure_exit_code=False,
+ )
+ except Exception as e:
+ self.log(
+ logging.WARNING,
+ "notifier-failed",
+ {"error": str(e)},
+ "Notification center failed: {error}",
+ )
+
+ def _ensure_objdir_exists(self):
+ if os.path.isdir(self.statedir):
+ return
+
+ os.makedirs(self.statedir)
+
+ def _ensure_state_subdir_exists(self, subdir):
+ path = os.path.join(self.statedir, subdir)
+
+ if os.path.isdir(path):
+ return
+
+ os.makedirs(path)
+
+ def _get_state_filename(self, filename, subdir=None):
+ path = self.statedir
+
+ if subdir:
+ path = os.path.join(path, subdir)
+
+ return os.path.join(path, filename)
+
+ def _wrap_path_argument(self, arg):
+ return PathArgument(arg, self.topsrcdir, self.topobjdir)
+
+ def _run_make(
+ self,
+ directory=None,
+ filename=None,
+ target=None,
+ log=True,
+ srcdir=False,
+ line_handler=None,
+ append_env=None,
+ explicit_env=None,
+ ignore_errors=False,
+ ensure_exit_code=0,
+ silent=True,
+ print_directory=True,
+ pass_thru=False,
+ num_jobs=0,
+ job_size=0,
+ keep_going=False,
+ ):
+ """Invoke make.
+
+ directory -- Relative directory to look for Makefile in.
+ filename -- Explicit makefile to run.
+ target -- Makefile target(s) to make. Can be a string or iterable of
+ strings.
+ srcdir -- If True, invoke make from the source directory tree.
+ Otherwise, make will be invoked from the object directory.
+ silent -- If True (the default), run make in silent mode.
+ print_directory -- If True (the default), have make print directories
+ while doing traversal.
+ """
+ self._ensure_objdir_exists()
+
+ args = [self.substs["GMAKE"]]
+
+ if directory:
+ args.extend(["-C", directory.replace(os.sep, "/")])
+
+ if filename:
+ args.extend(["-f", filename])
+
+ if num_jobs == 0 and self.mozconfig["make_flags"]:
+ flags = iter(self.mozconfig["make_flags"])
+ for flag in flags:
+ if flag == "-j":
+ try:
+ flag = flags.next()
+ except StopIteration:
+ break
+ try:
+ num_jobs = int(flag)
+ except ValueError:
+ args.append(flag)
+ elif flag.startswith("-j"):
+ try:
+ num_jobs = int(flag[2:])
+ except (ValueError, IndexError):
+ break
+ else:
+ args.append(flag)
+
+ if num_jobs == 0:
+ if job_size == 0:
+ job_size = 2.0 if self.substs.get("CC_TYPE") == "gcc" else 1.0 # GiB
+
+ cpus = multiprocessing.cpu_count()
+ if not psutil or not job_size:
+ num_jobs = cpus
+ else:
+ mem_gb = psutil.virtual_memory().total / 1024 ** 3
+ from_mem = round(mem_gb / job_size)
+ num_jobs = max(1, min(cpus, from_mem))
+ print(
+ " Parallelism determined by memory: using %d jobs for %d cores "
+ "based on %.1f GiB RAM and estimated job size of %.1f GiB"
+ % (num_jobs, cpus, mem_gb, job_size)
+ )
+
+ args.append("-j%d" % num_jobs)
+
+ if ignore_errors:
+ args.append("-k")
+
+ if silent:
+ args.append("-s")
+
+ # Print entering/leaving directory messages. Some consumers look at
+ # these to measure progress.
+ if print_directory:
+ args.append("-w")
+
+ if keep_going:
+ args.append("-k")
+
+ if isinstance(target, list):
+ args.extend(target)
+ elif target:
+ args.append(target)
+
+ fn = self._run_command_in_objdir
+
+ if srcdir:
+ fn = self._run_command_in_srcdir
+
+ append_env = dict(append_env or ())
+ append_env["MACH"] = "1"
+
+ params = {
+ "args": args,
+ "line_handler": line_handler,
+ "append_env": append_env,
+ "explicit_env": explicit_env,
+ "log_level": logging.INFO,
+ "require_unix_environment": False,
+ "ensure_exit_code": ensure_exit_code,
+ "pass_thru": pass_thru,
+ # Make manages its children, so mozprocess doesn't need to bother.
+ # Having mozprocess manage children can also have side-effects when
+ # building on Windows. See bug 796840.
+ "ignore_children": True,
+ }
+
+ if log:
+ params["log_name"] = "make"
+
+ return fn(**params)
+
+ def _run_command_in_srcdir(self, **args):
+ return self.run_process(cwd=self.topsrcdir, **args)
+
+ def _run_command_in_objdir(self, **args):
+ return self.run_process(cwd=self.topobjdir, **args)
+
+ def _is_windows(self):
+ return os.name in ("nt", "ce")
+
+ def _is_osx(self):
+ return "darwin" in str(sys.platform).lower()
+
+ def _spawn(self, cls):
+ """Create a new MozbuildObject-derived class instance from ourselves.
+
+ This is used as a convenience method to create other
+ MozbuildObject-derived class instances. It can only be used on
+ classes that have the same constructor arguments as us.
+ """
+
+ return cls(
+ self.topsrcdir, self.settings, self.log_manager, topobjdir=self.topobjdir
+ )
+
+ def activate_virtualenv(self):
+ self.virtualenv_manager.activate()
+
+ def _set_log_level(self, verbose):
+ self.log_manager.terminal_handler.setLevel(
+ logging.INFO if not verbose else logging.DEBUG
+ )
+
+ def _ensure_zstd(self):
+ try:
+ import zstandard # noqa: F401
+ except (ImportError, AttributeError):
+ self.activate_virtualenv()
+ self.virtualenv_manager.install_pip_requirements(
+ os.path.join(self.topsrcdir, "build", "zstandard_requirements.txt")
+ )
+
+
+class MachCommandBase(MozbuildObject):
+ """Base class for mach command providers that wish to be MozbuildObjects.
+
+ This provides a level of indirection so MozbuildObject can be refactored
+ without having to change everything that inherits from it.
+ """
+
+ def __init__(self, context, virtualenv_name=None, metrics=None, no_auto_log=False):
+ # Attempt to discover topobjdir through environment detection, as it is
+ # more reliable than mozconfig when cwd is inside an objdir.
+ topsrcdir = context.topdir
+ topobjdir = None
+ detect_virtualenv_mozinfo = True
+ if hasattr(context, "detect_virtualenv_mozinfo"):
+ detect_virtualenv_mozinfo = getattr(context, "detect_virtualenv_mozinfo")
+ try:
+ dummy = MozbuildObject.from_environment(
+ cwd=context.cwd, detect_virtualenv_mozinfo=detect_virtualenv_mozinfo
+ )
+ topsrcdir = dummy.topsrcdir
+ topobjdir = dummy._topobjdir
+ if topobjdir:
+ # If we're inside a objdir and the found mozconfig resolves to
+ # another objdir, we abort. The reasoning here is that if you
+ # are inside an objdir you probably want to perform actions on
+ # that objdir, not another one. This prevents accidental usage
+ # of the wrong objdir when the current objdir is ambiguous.
+ config_topobjdir = dummy.resolve_mozconfig_topobjdir()
+
+ if config_topobjdir and not Path(topobjdir).samefile(
+ Path(config_topobjdir)
+ ):
+ raise ObjdirMismatchException(topobjdir, config_topobjdir)
+ except BuildEnvironmentNotFoundException:
+ pass
+ except ObjdirMismatchException as e:
+ print(
+ "Ambiguous object directory detected. We detected that "
+ "both %s and %s could be object directories. This is "
+ "typically caused by having a mozconfig pointing to a "
+ "different object directory from the current working "
+ "directory. To solve this problem, ensure you do not have a "
+ "default mozconfig in searched paths." % (e.objdir1, e.objdir2)
+ )
+ sys.exit(1)
+
+ except MozconfigLoadException as e:
+ print(e)
+ sys.exit(1)
+
+ MozbuildObject.__init__(
+ self,
+ topsrcdir,
+ context.settings,
+ context.log_manager,
+ topobjdir=topobjdir,
+ virtualenv_name=virtualenv_name,
+ )
+
+ self._mach_context = context
+ self.metrics = metrics
+
+ # Incur mozconfig processing so we have unified error handling for
+ # errors. Otherwise, the exceptions could bubble back to mach's error
+ # handler.
+ try:
+ self.mozconfig
+
+ except MozconfigFindException as e:
+ print(e)
+ sys.exit(1)
+
+ except MozconfigLoadException as e:
+ print(e)
+ sys.exit(1)
+
+ # Always keep a log of the last command, but don't do that for mach
+ # invokations from scripts (especially not the ones done by the build
+ # system itself).
+ try:
+ fileno = getattr(sys.stdout, "fileno", lambda: None)()
+ except io.UnsupportedOperation:
+ fileno = None
+ if fileno and os.isatty(fileno) and not no_auto_log:
+ self._ensure_state_subdir_exists(".")
+ logfile = self._get_state_filename("last_log.json")
+ try:
+ fd = open(logfile, "wt")
+ self.log_manager.add_json_handler(fd)
+ except Exception as e:
+ self.log(
+ logging.WARNING,
+ "mach",
+ {"error": str(e)},
+ "Log will not be kept for this command: {error}.",
+ )
+
+ def _sub_mach(self, argv):
+ return subprocess.call(
+ [sys.executable, os.path.join(self.topsrcdir, "mach")] + argv
+ )
+
+
+class MachCommandConditions(object):
+ """A series of commonly used condition functions which can be applied to
+ mach commands with providers deriving from MachCommandBase.
+ """
+
+ @staticmethod
+ def is_firefox(cls):
+ """Must have a Firefox build."""
+ if hasattr(cls, "substs"):
+ return cls.substs.get("MOZ_BUILD_APP") == "browser"
+ return False
+
+ @staticmethod
+ def is_jsshell(cls):
+ """Must have a jsshell build."""
+ if hasattr(cls, "substs"):
+ return cls.substs.get("MOZ_BUILD_APP") == "js"
+ return False
+
+ @staticmethod
+ def is_thunderbird(cls):
+ """Must have a Thunderbird build."""
+ if hasattr(cls, "substs"):
+ return cls.substs.get("MOZ_BUILD_APP") == "comm/mail"
+ return False
+
+ @staticmethod
+ def is_firefox_or_thunderbird(cls):
+ """Must have a Firefox or Thunderbird build."""
+ return MachCommandConditions.is_firefox(
+ cls
+ ) or MachCommandConditions.is_thunderbird(cls)
+
+ @staticmethod
+ def is_android(cls):
+ """Must have an Android build."""
+ if hasattr(cls, "substs"):
+ return cls.substs.get("MOZ_WIDGET_TOOLKIT") == "android"
+ return False
+
+ @staticmethod
+ def is_not_android(cls):
+ """Must not have an Android build."""
+ if hasattr(cls, "substs"):
+ return cls.substs.get("MOZ_WIDGET_TOOLKIT") != "android"
+ return False
+
+ @staticmethod
+ def is_firefox_or_android(cls):
+ """Must have a Firefox or Android build."""
+ return MachCommandConditions.is_firefox(
+ cls
+ ) or MachCommandConditions.is_android(cls)
+
+ @staticmethod
+ def has_build(cls):
+ """Must have a build."""
+ return MachCommandConditions.is_firefox_or_android(
+ cls
+ ) or MachCommandConditions.is_thunderbird(cls)
+
+ @staticmethod
+ def has_build_or_shell(cls):
+ """Must have a build or a shell build."""
+ return MachCommandConditions.has_build(cls) or MachCommandConditions.is_jsshell(
+ cls
+ )
+
+ @staticmethod
+ def is_hg(cls):
+ """Must have a mercurial source checkout."""
+ try:
+ return isinstance(cls.repository, HgRepository)
+ except InvalidRepoPath:
+ return False
+
+ @staticmethod
+ def is_git(cls):
+ """Must have a git source checkout."""
+ try:
+ return isinstance(cls.repository, GitRepository)
+ except InvalidRepoPath:
+ return False
+
+ @staticmethod
+ def is_artifact_build(cls):
+ """Must be an artifact build."""
+ if hasattr(cls, "substs"):
+ return getattr(cls, "substs", {}).get("MOZ_ARTIFACT_BUILDS")
+ return False
+
+ @staticmethod
+ def is_non_artifact_build(cls):
+ """Must not be an artifact build."""
+ if hasattr(cls, "substs"):
+ return not MachCommandConditions.is_artifact_build(cls)
+ return False
+
+ @staticmethod
+ def is_buildapp_in(cls, apps):
+ """Must have a build for one of the given app"""
+ for app in apps:
+ attr = getattr(MachCommandConditions, "is_{}".format(app), None)
+ if attr and attr(cls):
+ return True
+ return False
+
+
+class PathArgument(object):
+ """Parse a filesystem path argument and transform it in various ways."""
+
+ def __init__(self, arg, topsrcdir, topobjdir, cwd=None):
+ self.arg = arg
+ self.topsrcdir = topsrcdir
+ self.topobjdir = topobjdir
+ self.cwd = os.getcwd() if cwd is None else cwd
+
+ def relpath(self):
+ """Return a path relative to the topsrcdir or topobjdir.
+
+ If the argument is a path to a location in one of the base directories
+ (topsrcdir or topobjdir), then strip off the base directory part and
+ just return the path within the base directory."""
+
+ abspath = os.path.abspath(os.path.join(self.cwd, self.arg))
+
+ # If that path is within topsrcdir or topobjdir, return an equivalent
+ # path relative to that base directory.
+ for base_dir in [self.topobjdir, self.topsrcdir]:
+ if abspath.startswith(os.path.abspath(base_dir)):
+ return mozpath.relpath(abspath, base_dir)
+
+ return mozpath.normsep(self.arg)
+
+ def srcdir_path(self):
+ return mozpath.join(self.topsrcdir, self.relpath())
+
+ def objdir_path(self):
+ return mozpath.join(self.topobjdir, self.relpath())
+
+
+class ExecutionSummary(dict):
+ """Helper for execution summaries."""
+
+ def __init__(self, summary_format, **data):
+ self._summary_format = ""
+ assert "execution_time" in data
+ self.extend(summary_format, **data)
+
+ def extend(self, summary_format, **data):
+ self._summary_format += summary_format
+ self.update(data)
+
+ def __str__(self):
+ return self._summary_format.format(**self)
+
+ def __getattr__(self, key):
+ return self[key]
diff --git a/python/mozbuild/mozbuild/bootstrap.py b/python/mozbuild/mozbuild/bootstrap.py
new file mode 100644
index 0000000000..60a307145c
--- /dev/null
+++ b/python/mozbuild/mozbuild/bootstrap.py
@@ -0,0 +1,61 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import functools
+import io
+import logging
+import os
+from pathlib import Path
+
+from mozbuild.configure import ConfigureSandbox
+
+
+def _raw_sandbox(extra_args=[]):
+ # Here, we don't want an existing mozconfig to interfere with what we
+ # do, neither do we want the default for --enable-bootstrap (which is not
+ # always on) to prevent this from doing something.
+ out = io.StringIO()
+ logger = logging.getLogger("moz.configure")
+ handler = logging.StreamHandler(out)
+ logger.addHandler(handler)
+ logger.propagate = False
+ sandbox = ConfigureSandbox(
+ {},
+ argv=["configure"]
+ + ["--enable-bootstrap", f"MOZCONFIG={os.devnull}"]
+ + extra_args,
+ logger=logger,
+ )
+ return sandbox
+
+
+@functools.lru_cache(maxsize=None)
+def _bootstrap_sandbox():
+ sandbox = _raw_sandbox()
+ moz_configure = (
+ Path(__file__).parent.parent.parent.parent / "build" / "moz.configure"
+ )
+ sandbox.include_file(str(moz_configure / "init.configure"))
+ # bootstrap_search_path_order has a dependency on developer_options, which
+ # is not defined in init.configure. Its value doesn't matter for us, though.
+ sandbox["developer_options"] = sandbox["always"]
+ sandbox.include_file(str(moz_configure / "bootstrap.configure"))
+ return sandbox
+
+
+def bootstrap_toolchain(toolchain_job):
+ # Expand the `bootstrap_path` template for the given toolchain_job, and execute the
+ # expanded function via `_value_for`, which will trigger autobootstrap.
+ # Returns the path to the toolchain.
+ sandbox = _bootstrap_sandbox()
+ return sandbox._value_for(sandbox["bootstrap_path"](toolchain_job))
+
+
+def bootstrap_all_toolchains_for(configure_args=[]):
+ sandbox = _raw_sandbox(configure_args)
+ moz_configure = Path(__file__).parent.parent.parent.parent / "moz.configure"
+ sandbox.include_file(str(moz_configure))
+ for depend in sandbox._depends.values():
+ if depend.name == "bootstrap_path":
+ depend.result()
diff --git a/python/mozbuild/mozbuild/build_commands.py b/python/mozbuild/mozbuild/build_commands.py
new file mode 100644
index 0000000000..47398dc3a0
--- /dev/null
+++ b/python/mozbuild/mozbuild/build_commands.py
@@ -0,0 +1,366 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, # You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import argparse
+import os
+import subprocess
+
+import mozpack.path as mozpath
+from mach.decorators import Command, CommandArgument
+
+from mozbuild.backend import backends
+from mozbuild.mozconfig import MozconfigLoader
+from mozbuild.util import MOZBUILD_METRICS_PATH
+
+BUILD_WHAT_HELP = """
+What to build. Can be a top-level make target or a relative directory. If
+multiple options are provided, they will be built serially. BUILDING ONLY PARTS
+OF THE TREE CAN RESULT IN BAD TREE STATE. USE AT YOUR OWN RISK.
+""".strip()
+
+
+def _set_priority(priority, verbose):
+ # Choose the Windows API structure to standardize on.
+ PRIO_CLASS_BY_KEY = {
+ "idle": "IDLE_PRIORITY_CLASS",
+ "less": "BELOW_NORMAL_PRIORITY_CLASS",
+ "normal": "NORMAL_PRIORITY_CLASS",
+ "more": "ABOVE_NORMAL_PRIORITY_CLASS",
+ "high": "HIGH_PRIORITY_CLASS",
+ }
+ try:
+ prio_class = PRIO_CLASS_BY_KEY[priority]
+ except KeyError:
+ raise KeyError(f"priority '{priority}' not in {list(PRIO_CLASS_BY_KEY)}")
+
+ if "nice" in dir(os):
+ # Translate the Windows priority classes into niceness values.
+ NICENESS_BY_PRIO_CLASS = {
+ "IDLE_PRIORITY_CLASS": 19,
+ "BELOW_NORMAL_PRIORITY_CLASS": 10,
+ "NORMAL_PRIORITY_CLASS": 0,
+ "ABOVE_NORMAL_PRIORITY_CLASS": -10,
+ "HIGH_PRIORITY_CLASS": -20,
+ }
+ niceness = NICENESS_BY_PRIO_CLASS[prio_class]
+
+ os.nice(niceness)
+ if verbose:
+ print(f"os.nice({niceness})")
+ return True
+
+ try:
+ import psutil
+
+ prio_class_val = getattr(psutil, prio_class)
+ except ModuleNotFoundError:
+ return False
+ except AttributeError:
+ return False
+
+ psutil.Process().nice(prio_class_val)
+ if verbose:
+ print(f"psutil.Process().nice(psutil.{prio_class})")
+ return True
+
+
+# Interface to build the tree.
+
+
+@Command(
+ "build",
+ category="build",
+ description="Build the tree.",
+ metrics_path=MOZBUILD_METRICS_PATH,
+ virtualenv_name="build",
+)
+@CommandArgument(
+ "--jobs",
+ "-j",
+ default="0",
+ metavar="jobs",
+ type=int,
+ help="Number of concurrent jobs to run. Default is based on the number of "
+ "CPUs and the estimated size of the jobs (see --job-size).",
+)
+@CommandArgument(
+ "--job-size",
+ default="0",
+ metavar="size",
+ type=float,
+ help="Estimated RAM required, in GiB, for each parallel job. Used to "
+ "compute a default number of concurrent jobs.",
+)
+@CommandArgument(
+ "-C",
+ "--directory",
+ default=None,
+ help="Change to a subdirectory of the build directory first.",
+)
+@CommandArgument("what", default=None, nargs="*", help=BUILD_WHAT_HELP)
+@CommandArgument(
+ "-v",
+ "--verbose",
+ action="store_true",
+ help="Verbose output for what commands the build is running.",
+)
+@CommandArgument(
+ "--keep-going",
+ action="store_true",
+ help="Keep building after an error has occurred",
+)
+@CommandArgument(
+ "--priority",
+ default="less",
+ metavar="priority",
+ type=str,
+ help="idle/less/normal/more/high. (Default less)",
+)
+def build(
+ command_context,
+ what=None,
+ jobs=0,
+ job_size=0,
+ directory=None,
+ verbose=False,
+ keep_going=False,
+ priority="less",
+):
+ """Build the source tree.
+
+ With no arguments, this will perform a full build.
+
+ Positional arguments define targets to build. These can be make targets
+ or patterns like "<dir>/<target>" to indicate a make target within a
+ directory.
+
+ There are a few special targets that can be used to perform a partial
+ build faster than what `mach build` would perform:
+
+ * binaries - compiles and links all C/C++ sources and produces shared
+ libraries and executables (binaries).
+
+ * faster - builds JavaScript, XUL, CSS, etc files.
+
+ "binaries" and "faster" almost fully complement each other. However,
+ there are build actions not captured by either. If things don't appear to
+ be rebuilding, perform a vanilla `mach build` to rebuild the world.
+ """
+ from mozbuild.controller.building import BuildDriver
+
+ command_context.log_manager.enable_all_structured_loggers()
+
+ loader = MozconfigLoader(command_context.topsrcdir)
+ mozconfig = loader.read_mozconfig(loader.AUTODETECT)
+ configure_args = mozconfig["configure_args"]
+ doing_pgo = configure_args and "MOZ_PGO=1" in configure_args
+ # Force verbosity on automation.
+ verbose = verbose or bool(os.environ.get("MOZ_AUTOMATION", False))
+ # Keep going by default on automation so that we exhaust as many errors as
+ # possible.
+ keep_going = keep_going or bool(os.environ.get("MOZ_AUTOMATION", False))
+ append_env = None
+
+ # By setting the current process's priority, by default our child processes
+ # will also inherit this same priority.
+ if not _set_priority(priority, verbose):
+ print("--priority not supported on this platform.")
+
+ if doing_pgo:
+ if what:
+ raise Exception("Cannot specify targets (%s) in MOZ_PGO=1 builds" % what)
+ instr = command_context._spawn(BuildDriver)
+ orig_topobjdir = instr._topobjdir
+ instr._topobjdir = mozpath.join(instr._topobjdir, "instrumented")
+
+ append_env = {"MOZ_PROFILE_GENERATE": "1"}
+ status = instr.build(
+ command_context.metrics,
+ what=what,
+ jobs=jobs,
+ job_size=job_size,
+ directory=directory,
+ verbose=verbose,
+ keep_going=keep_going,
+ mach_context=command_context._mach_context,
+ append_env=append_env,
+ virtualenv_topobjdir=orig_topobjdir,
+ )
+ if status != 0:
+ return status
+
+ # Packaging the instrumented build is required to get the jarlog
+ # data.
+ status = instr._run_make(
+ directory=".",
+ target="package",
+ silent=not verbose,
+ ensure_exit_code=False,
+ append_env=append_env,
+ )
+ if status != 0:
+ return status
+
+ pgo_env = os.environ.copy()
+ if instr.config_environment.substs.get("CC_TYPE") in ("clang", "clang-cl"):
+ pgo_env["LLVM_PROFDATA"] = instr.config_environment.substs.get(
+ "LLVM_PROFDATA"
+ )
+ pgo_env["JARLOG_FILE"] = mozpath.join(orig_topobjdir, "jarlog/en-US.log")
+ pgo_cmd = [
+ command_context.virtualenv_manager.python_path,
+ mozpath.join(command_context.topsrcdir, "build/pgo/profileserver.py"),
+ ]
+ subprocess.check_call(pgo_cmd, cwd=instr.topobjdir, env=pgo_env)
+
+ # Set the default build to MOZ_PROFILE_USE
+ append_env = {"MOZ_PROFILE_USE": "1"}
+
+ driver = command_context._spawn(BuildDriver)
+ return driver.build(
+ command_context.metrics,
+ what=what,
+ jobs=jobs,
+ job_size=job_size,
+ directory=directory,
+ verbose=verbose,
+ keep_going=keep_going,
+ mach_context=command_context._mach_context,
+ append_env=append_env,
+ )
+
+
+@Command(
+ "configure",
+ category="build",
+ description="Configure the tree (run configure and config.status).",
+ metrics_path=MOZBUILD_METRICS_PATH,
+ virtualenv_name="build",
+)
+@CommandArgument(
+ "options", default=None, nargs=argparse.REMAINDER, help="Configure options"
+)
+def configure(
+ command_context,
+ options=None,
+ buildstatus_messages=False,
+ line_handler=None,
+):
+ from mozbuild.controller.building import BuildDriver
+
+ command_context.log_manager.enable_all_structured_loggers()
+ driver = command_context._spawn(BuildDriver)
+
+ return driver.configure(
+ command_context.metrics,
+ options=options,
+ buildstatus_messages=buildstatus_messages,
+ line_handler=line_handler,
+ )
+
+
+@Command(
+ "resource-usage",
+ category="post-build",
+ description="Show information about system resource usage for a build.",
+ virtualenv_name="build",
+)
+@CommandArgument(
+ "--address",
+ default="localhost",
+ help="Address the HTTP server should listen on.",
+)
+@CommandArgument(
+ "--port",
+ type=int,
+ default=0,
+ help="Port number the HTTP server should listen on.",
+)
+@CommandArgument(
+ "--browser",
+ default="firefox",
+ help="Web browser to automatically open. See webbrowser Python module.",
+)
+@CommandArgument("--url", help="URL of JSON document to display")
+def resource_usage(command_context, address=None, port=None, browser=None, url=None):
+ import webbrowser
+
+ from mozbuild.html_build_viewer import BuildViewerServer
+
+ server = BuildViewerServer(address, port)
+
+ if url:
+ server.add_resource_json_url("url", url)
+ else:
+ last = command_context._get_state_filename("build_resources.json")
+ if not os.path.exists(last):
+ print(
+ "Build resources not available. If you have performed a "
+ "build and receive this message, the psutil Python package "
+ "likely failed to initialize properly."
+ )
+ return 1
+
+ server.add_resource_json_file("last", last)
+ try:
+ webbrowser.get(browser).open_new_tab(server.url)
+ except Exception:
+ print("Cannot get browser specified, trying the default instead.")
+ try:
+ browser = webbrowser.get().open_new_tab(server.url)
+ except Exception:
+ print("Please open %s in a browser." % server.url)
+
+ print("Hit CTRL+c to stop server.")
+ server.run()
+
+
+@Command(
+ "build-backend",
+ category="build",
+ description="Generate a backend used to build the tree.",
+ virtualenv_name="build",
+)
+@CommandArgument("-d", "--diff", action="store_true", help="Show a diff of changes.")
+# It would be nice to filter the choices below based on
+# conditions, but that is for another day.
+@CommandArgument(
+ "-b",
+ "--backend",
+ nargs="+",
+ choices=sorted(backends),
+ help="Which backend to build.",
+)
+@CommandArgument("-v", "--verbose", action="store_true", help="Verbose output.")
+@CommandArgument(
+ "-n",
+ "--dry-run",
+ action="store_true",
+ help="Do everything except writing files out.",
+)
+def build_backend(command_context, backend, diff=False, verbose=False, dry_run=False):
+ python = command_context.virtualenv_manager.python_path
+ config_status = os.path.join(command_context.topobjdir, "config.status")
+
+ if not os.path.exists(config_status):
+ print(
+ "config.status not found. Please run |mach configure| "
+ "or |mach build| prior to building the %s build backend." % backend
+ )
+ return 1
+
+ args = [python, config_status]
+ if backend:
+ args.append("--backend")
+ args.extend(backend)
+ if diff:
+ args.append("--diff")
+ if verbose:
+ args.append("--verbose")
+ if dry_run:
+ args.append("--dry-run")
+
+ return command_context._run_command_in_objdir(
+ args=args, pass_thru=True, ensure_exit_code=False
+ )
diff --git a/python/mozbuild/mozbuild/chunkify.py b/python/mozbuild/mozbuild/chunkify.py
new file mode 100644
index 0000000000..b2c1057450
--- /dev/null
+++ b/python/mozbuild/mozbuild/chunkify.py
@@ -0,0 +1,56 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This file is a direct clone of
+# https://github.com/bhearsum/chunkify/blob/master/chunkify/__init__.py
+# of version 1.2. Its license (MPL2) is contained in repo root LICENSE file.
+# Please make modifications there where possible.
+
+from itertools import islice
+
+
+class ChunkingError(Exception):
+ pass
+
+
+def split_evenly(n, chunks):
+ """Split an integer into evenly distributed list
+
+ >>> split_evenly(7, 3)
+ [3, 2, 2]
+
+ >>> split_evenly(12, 3)
+ [4, 4, 4]
+
+ >>> split_evenly(35, 10)
+ [4, 4, 4, 4, 4, 3, 3, 3, 3, 3]
+
+ >>> split_evenly(1, 2)
+ Traceback (most recent call last):
+ ...
+ ChunkingError: Number of chunks is greater than number
+
+ """
+ if n < chunks:
+ raise ChunkingError("Number of chunks is greater than number")
+ if n % chunks == 0:
+ # Either we can evenly split or only 1 chunk left
+ return [n // chunks] * chunks
+ # otherwise the current chunk should be a bit larger
+ max_size = n // chunks + 1
+ return [max_size] + split_evenly(n - max_size, chunks - 1)
+
+
+def chunkify(things, this_chunk, chunks):
+ if this_chunk > chunks:
+ raise ChunkingError("this_chunk is greater than total chunks")
+
+ dist = split_evenly(len(things), chunks)
+ start = sum(dist[: this_chunk - 1])
+ end = start + dist[this_chunk - 1]
+
+ try:
+ return things[start:end]
+ except TypeError:
+ return islice(things, start, end)
diff --git a/python/mozbuild/mozbuild/code_analysis/__init__.py b/python/mozbuild/mozbuild/code_analysis/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/code_analysis/__init__.py
diff --git a/python/mozbuild/mozbuild/code_analysis/mach_commands.py b/python/mozbuild/mozbuild/code_analysis/mach_commands.py
new file mode 100644
index 0000000000..ad6c352021
--- /dev/null
+++ b/python/mozbuild/mozbuild/code_analysis/mach_commands.py
@@ -0,0 +1,1976 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, # You can obtain one at http://mozilla.org/MPL/2.0/.
+import concurrent.futures
+import json
+import logging
+import multiprocessing
+import ntpath
+import os
+import pathlib
+import posixpath
+import re
+import shutil
+import subprocess
+import sys
+import tempfile
+import xml.etree.ElementTree as ET
+from types import SimpleNamespace
+
+import mozpack.path as mozpath
+import six
+import yaml
+from mach.decorators import Command, CommandArgument, SubCommand
+from mach.main import Mach
+from mozversioncontrol import get_repository_object
+from six.moves import input
+
+from mozbuild import build_commands
+from mozbuild.controller.clobber import Clobberer
+from mozbuild.nodeutil import find_node_executable
+from mozbuild.util import memoize
+
+
+# Function used to run clang-format on a batch of files. It is a helper function
+# in order to integrate into the futures ecosystem clang-format.
+def run_one_clang_format_batch(args):
+ try:
+ subprocess.check_output(args)
+ except subprocess.CalledProcessError as e:
+ return e
+
+
+def build_repo_relative_path(abs_path, repo_path):
+ """Build path relative to repository root"""
+
+ if os.path.islink(abs_path):
+ abs_path = mozpath.realpath(abs_path)
+
+ return mozpath.relpath(abs_path, repo_path)
+
+
+def prompt_bool(prompt, limit=5):
+ """Prompts the user with prompt and requires a boolean value."""
+ from distutils.util import strtobool
+
+ for _ in range(limit):
+ try:
+ return strtobool(input(prompt + "[Y/N]\n"))
+ except ValueError:
+ print(
+ "ERROR! Please enter a valid option! Please use any of the following:"
+ " Y, N, True, False, 1, 0"
+ )
+ return False
+
+
+class StaticAnalysisSubCommand(SubCommand):
+ def __call__(self, func):
+ after = SubCommand.__call__(self, func)
+ args = [
+ CommandArgument(
+ "--verbose", "-v", action="store_true", help="Print verbose output."
+ )
+ ]
+ for arg in args:
+ after = arg(after)
+ return after
+
+
+class StaticAnalysisMonitor(object):
+ def __init__(self, srcdir, objdir, checks, total):
+ self._total = total
+ self._processed = 0
+ self._current = None
+ self._srcdir = srcdir
+
+ import copy
+
+ self._checks = copy.deepcopy(checks)
+
+ # Transform the configuration to support Regex
+ for item in self._checks:
+ if item["name"] == "-*":
+ continue
+ item["name"] = item["name"].replace("*", ".*")
+
+ from mozbuild.compilation.warnings import WarningsCollector, WarningsDatabase
+
+ self._warnings_database = WarningsDatabase()
+
+ def on_warning(warning):
+
+ # Output paths relative to repository root if the paths are under repo tree
+ warning["filename"] = build_repo_relative_path(
+ warning["filename"], self._srcdir
+ )
+
+ self._warnings_database.insert(warning)
+
+ self._warnings_collector = WarningsCollector(on_warning, objdir=objdir)
+
+ @property
+ def num_files(self):
+ return self._total
+
+ @property
+ def num_files_processed(self):
+ return self._processed
+
+ @property
+ def current_file(self):
+ return self._current
+
+ @property
+ def warnings_db(self):
+ return self._warnings_database
+
+ def on_line(self, line):
+ warning = None
+
+ try:
+ warning = self._warnings_collector.process_line(line)
+ except Exception:
+ pass
+
+ if line.find("clang-tidy") != -1:
+ filename = line.split(" ")[-1]
+ if os.path.isfile(filename):
+ self._current = build_repo_relative_path(filename, self._srcdir)
+ else:
+ self._current = None
+ self._processed = self._processed + 1
+ return (warning, False)
+ if warning is not None:
+
+ def get_check_config(checker_name):
+ # get the matcher from self._checks that is the 'name' field
+ for item in self._checks:
+ if item["name"] == checker_name:
+ return item
+
+ # We are using a regex in order to also match 'mozilla-.* like checkers'
+ matcher = re.match(item["name"], checker_name)
+ if matcher is not None and matcher.group(0) == checker_name:
+ return item
+
+ check_config = get_check_config(warning["flag"])
+ if check_config is not None:
+ warning["reliability"] = check_config.get("reliability", "low")
+ warning["reason"] = check_config.get("reason")
+ warning["publish"] = check_config.get("publish", True)
+ elif warning["flag"] == "clang-diagnostic-error":
+ # For a "warning" that is flagged as "clang-diagnostic-error"
+ # set it as "publish"
+ warning["publish"] = True
+
+ return (warning, True)
+
+
+# Utilities for running C++ static analysis checks and format.
+
+# List of file extension to consider (should start with dot)
+_format_include_extensions = (".cpp", ".c", ".cc", ".h", ".m", ".mm")
+# File contaning all paths to exclude from formatting
+_format_ignore_file = ".clang-format-ignore"
+
+# (TOOLS) Function return codes
+TOOLS_SUCCESS = 0
+TOOLS_FAILED_DOWNLOAD = 1
+TOOLS_UNSUPORTED_PLATFORM = 2
+TOOLS_CHECKER_NO_TEST_FILE = 3
+TOOLS_CHECKER_RETURNED_NO_ISSUES = 4
+TOOLS_CHECKER_RESULT_FILE_NOT_FOUND = 5
+TOOLS_CHECKER_DIFF_FAILED = 6
+TOOLS_CHECKER_NOT_FOUND = 7
+TOOLS_CHECKER_FAILED_FILE = 8
+TOOLS_CHECKER_LIST_EMPTY = 9
+TOOLS_GRADLE_FAILED = 10
+
+
+@Command(
+ "clang-tidy",
+ category="devenv",
+ description="Convenience alias for the static-analysis command",
+)
+def clang_tidy(command_context):
+ # If no arguments are provided, just print a help message.
+ """Detailed documentation:
+ https://firefox-source-docs.mozilla.org/code-quality/static-analysis/index.html
+ """
+ mach = Mach(os.getcwd())
+
+ def populate_context(key=None):
+ if key == "topdir":
+ return command_context.topsrcdir
+
+ mach.populate_context_handler = populate_context
+ mach.run(["static-analysis", "--help"])
+
+
+@Command(
+ "static-analysis",
+ category="devenv",
+ description="Run C++ static analysis checks using clang-tidy",
+)
+def static_analysis(command_context):
+ # If no arguments are provided, just print a help message.
+ """Detailed documentation:
+ https://firefox-source-docs.mozilla.org/code-quality/static-analysis/index.html
+ """
+ mach = Mach(os.getcwd())
+
+ def populate_context(key=None):
+ if key == "topdir":
+ return command_context.topsrcdir
+
+ mach.populate_context_handler = populate_context
+ mach.run(["static-analysis", "--help"])
+
+
+@StaticAnalysisSubCommand(
+ "static-analysis", "check", "Run the checks using the helper tool"
+)
+@CommandArgument(
+ "source",
+ nargs="*",
+ default=[".*"],
+ help="Source files to be analyzed (regex on path). "
+ "Can be omitted, in which case the entire code base "
+ "is analyzed. The source argument is ignored if "
+ "there is anything fed through stdin, in which case "
+ "the analysis is only performed on the files changed "
+ "in the patch streamed through stdin. This is called "
+ "the diff mode.",
+)
+@CommandArgument(
+ "--checks",
+ "-c",
+ default="-*",
+ metavar="checks",
+ help="Static analysis checks to enable. By default, this enables only "
+ "checks that are published here: https://mzl.la/2DRHeTh, but can be any "
+ "clang-tidy checks syntax.",
+)
+@CommandArgument(
+ "--jobs",
+ "-j",
+ default="0",
+ metavar="jobs",
+ type=int,
+ help="Number of concurrent jobs to run. Default is the number of CPUs.",
+)
+@CommandArgument(
+ "--strip",
+ "-p",
+ default="1",
+ metavar="NUM",
+ help="Strip NUM leading components from file names in diff mode.",
+)
+@CommandArgument(
+ "--fix",
+ "-f",
+ default=False,
+ action="store_true",
+ help="Try to autofix errors detected by clang-tidy checkers.",
+)
+@CommandArgument(
+ "--header-filter",
+ "-h-f",
+ default="",
+ metavar="header_filter",
+ help="Regular expression matching the names of the headers to "
+ "output diagnostics from. Diagnostics from the main file "
+ "of each translation unit are always displayed",
+)
+@CommandArgument(
+ "--output", "-o", default=None, help="Write clang-tidy output in a file"
+)
+@CommandArgument(
+ "--format",
+ default="text",
+ choices=("text", "json"),
+ help="Output format to write in a file",
+)
+@CommandArgument(
+ "--outgoing",
+ default=False,
+ action="store_true",
+ help="Run static analysis checks on outgoing files from mercurial repository",
+)
+def check(
+ command_context,
+ source=None,
+ jobs=2,
+ strip=1,
+ verbose=False,
+ checks="-*",
+ fix=False,
+ header_filter="",
+ output=None,
+ format="text",
+ outgoing=False,
+):
+ from mozbuild.controller.building import (
+ StaticAnalysisFooter,
+ StaticAnalysisOutputManager,
+ )
+
+ command_context._set_log_level(verbose)
+ command_context.activate_virtualenv()
+ command_context.log_manager.enable_unstructured()
+
+ rc, clang_paths = get_clang_tools(command_context, verbose=verbose)
+ if rc != 0:
+ return rc
+
+ if not _is_version_eligible(command_context, clang_paths):
+ return 1
+
+ rc, _compile_db, compilation_commands_path = _build_compile_db(
+ command_context, verbose=verbose
+ )
+ rc = rc or _build_export(command_context, jobs=jobs, verbose=verbose)
+ if rc != 0:
+ return rc
+
+ # Use outgoing files instead of source files
+ if outgoing:
+ repo = get_repository_object(command_context.topsrcdir)
+ files = repo.get_outgoing_files()
+ source = get_abspath_files(command_context, files)
+
+ # Split in several chunks to avoid hitting Python's limit of 100 groups in re
+ compile_db = json.loads(open(_compile_db, "r").read())
+ total = 0
+ import re
+
+ chunk_size = 50
+ for offset in range(0, len(source), chunk_size):
+ source_chunks = [
+ re.escape(f) for f in source[offset : offset + chunk_size].copy()
+ ]
+ name_re = re.compile("(" + ")|(".join(source_chunks) + ")")
+ for f in compile_db:
+ if name_re.search(f["file"]):
+ total = total + 1
+
+ # Filter source to remove excluded files
+ source = _generate_path_list(command_context, source, verbose=verbose)
+
+ if not total or not source:
+ command_context.log(
+ logging.INFO,
+ "static-analysis",
+ {},
+ "There are no files eligible for analysis. Please note that 'header' files "
+ "cannot be used for analysis since they do not consist compilation units.",
+ )
+ return 0
+
+ # Escape the files from source
+ source = [re.escape(f) for f in source]
+
+ cwd = command_context.topobjdir
+
+ monitor = StaticAnalysisMonitor(
+ command_context.topsrcdir,
+ command_context.topobjdir,
+ get_clang_tidy_config(command_context).checks_with_data,
+ total,
+ )
+
+ footer = StaticAnalysisFooter(command_context.log_manager.terminal, monitor)
+
+ with StaticAnalysisOutputManager(
+ command_context.log_manager, monitor, footer
+ ) as output_manager:
+ import math
+
+ batch_size = int(math.ceil(float(len(source)) / multiprocessing.cpu_count()))
+ for i in range(0, len(source), batch_size):
+ args = _get_clang_tidy_command(
+ command_context,
+ clang_paths,
+ compilation_commands_path,
+ checks=checks,
+ header_filter=header_filter,
+ sources=source[i : (i + batch_size)],
+ jobs=jobs,
+ fix=fix,
+ )
+ rc = command_context.run_process(
+ args=args,
+ ensure_exit_code=False,
+ line_handler=output_manager.on_line,
+ cwd=cwd,
+ )
+
+ command_context.log(
+ logging.WARNING,
+ "warning_summary",
+ {"count": len(monitor.warnings_db)},
+ "{count} warnings present.",
+ )
+
+ # Write output file
+ if output is not None:
+ output_manager.write(output, format)
+
+ return rc
+
+
+def get_abspath_files(command_context, files):
+ return [mozpath.join(command_context.topsrcdir, f) for f in files]
+
+
+def get_files_with_commands(command_context, compile_db, source):
+ """
+ Returns an array of dictionaries having file_path with build command
+ """
+
+ compile_db = json.load(open(compile_db, "r"))
+
+ commands_list = []
+
+ for f in source:
+ # It must be a C/C++ file
+ _, ext = os.path.splitext(f)
+
+ if ext.lower() not in _format_include_extensions:
+ command_context.log(
+ logging.INFO, "static-analysis", {}, "Skipping {}".format(f)
+ )
+ continue
+ file_with_abspath = os.path.join(command_context.topsrcdir, f)
+ for f in compile_db:
+ # Found for a file that we are looking
+ if file_with_abspath == f["file"]:
+ commands_list.append(f)
+
+ return commands_list
+
+
+@memoize
+def get_clang_tidy_config(command_context):
+ from mozbuild.code_analysis.utils import ClangTidyConfig
+
+ return ClangTidyConfig(command_context.topsrcdir)
+
+
+def _get_required_version(command_context):
+ version = get_clang_tidy_config(command_context).version
+ if version is None:
+ command_context.log(
+ logging.ERROR,
+ "static-analysis",
+ {},
+ "ERROR: Unable to find 'package_version' in config.yml",
+ )
+ return version
+
+
+def _get_current_version(command_context, clang_paths):
+ # Because the fact that we ship together clang-tidy and clang-format
+ # we are sure that these two will always share the same version.
+ # Thus in order to determine that the version is compatible we only
+ # need to check one of them, going with clang-format
+ cmd = [clang_paths._clang_format_path, "--version"]
+ version_info = None
+ try:
+ version_info = (
+ subprocess.check_output(cmd, stderr=subprocess.STDOUT)
+ .decode("utf-8")
+ .strip()
+ )
+
+ if "MOZ_AUTOMATION" in os.environ:
+ # Only show it in the CI
+ command_context.log(
+ logging.INFO,
+ "static-analysis",
+ {},
+ "{} Version = {} ".format(clang_paths._clang_format_path, version_info),
+ )
+
+ except subprocess.CalledProcessError as e:
+ command_context.log(
+ logging.ERROR,
+ "static-analysis",
+ {},
+ "Error determining the version clang-tidy/format binary, please see the "
+ "attached exception: \n{}".format(e.output),
+ )
+ return version_info
+
+
+def _is_version_eligible(command_context, clang_paths, log_error=True):
+ version = _get_required_version(command_context)
+ if version is None:
+ return False
+
+ current_version = _get_current_version(command_context, clang_paths)
+ if current_version is None:
+ return False
+ version = "clang-format version " + version
+ if version in current_version:
+ return True
+
+ if log_error:
+ command_context.log(
+ logging.ERROR,
+ "static-analysis",
+ {},
+ "ERROR: You're using an old or incorrect version ({}) of clang-format binary. "
+ "Please update to a more recent one (at least > {}) "
+ "by running: './mach bootstrap' ".format(
+ _get_current_version(command_context, clang_paths),
+ _get_required_version(command_context),
+ ),
+ )
+
+ return False
+
+
+def _get_clang_tidy_command(
+ command_context,
+ clang_paths,
+ compilation_commands_path,
+ checks,
+ header_filter,
+ sources,
+ jobs,
+ fix,
+):
+
+ if checks == "-*":
+ checks = ",".join(get_clang_tidy_config(command_context).checks)
+
+ common_args = [
+ "-clang-tidy-binary",
+ clang_paths._clang_tidy_path,
+ "-clang-apply-replacements-binary",
+ clang_paths._clang_apply_replacements,
+ "-checks=%s" % checks,
+ "-extra-arg=-DMOZ_CLANG_PLUGIN",
+ ]
+
+ # Flag header-filter is passed in order to limit the diagnostic messages only
+ # to the specified header files. When no value is specified the default value
+ # is considered to be the source in order to limit the diagnostic message to
+ # the source files or folders.
+ common_args += [
+ "-header-filter=%s"
+ % (header_filter if len(header_filter) else "|".join(sources))
+ ]
+
+ # From our configuration file, config.yaml, we build the configuration list, for
+ # the checkers that are used. These configuration options are used to better fit
+ # the checkers to our code.
+ cfg = get_clang_tidy_config(command_context).checks_config
+ if cfg:
+ common_args += ["-config=%s" % yaml.dump(cfg)]
+
+ if fix:
+ common_args += ["-fix"]
+
+ return (
+ [
+ command_context.virtualenv_manager.python_path,
+ clang_paths._run_clang_tidy_path,
+ "-j",
+ str(jobs),
+ "-p",
+ compilation_commands_path,
+ ]
+ + common_args
+ + sources
+ )
+
+
+@StaticAnalysisSubCommand(
+ "static-analysis",
+ "autotest",
+ "Run the auto-test suite in order to determine that"
+ " the analysis did not regress.",
+)
+@CommandArgument(
+ "--dump-results",
+ "-d",
+ default=False,
+ action="store_true",
+ help="Generate the baseline for the regression test. Based on"
+ " this baseline we will test future results.",
+)
+@CommandArgument(
+ "--intree-tool",
+ "-i",
+ default=False,
+ action="store_true",
+ help="Use a pre-aquired in-tree clang-tidy package from the automation env."
+ " This option is only valid on automation environments.",
+)
+@CommandArgument(
+ "checker_names",
+ nargs="*",
+ default=[],
+ help="Checkers that are going to be auto-tested.",
+)
+def autotest(
+ command_context,
+ verbose=False,
+ dump_results=False,
+ intree_tool=False,
+ checker_names=[],
+):
+ # If 'dump_results' is True than we just want to generate the issues files for each
+ # checker in particulat and thus 'force_download' becomes 'False' since we want to
+ # do this on a local trusted clang-tidy package.
+ command_context._set_log_level(verbose)
+ command_context.activate_virtualenv()
+ dump_results = dump_results
+
+ force_download = not dump_results
+
+ # Configure the tree or download clang-tidy package, depending on the option that we choose
+ if intree_tool:
+ clang_paths = SimpleNamespace()
+ if "MOZ_AUTOMATION" not in os.environ:
+ command_context.log(
+ logging.INFO,
+ "static-analysis",
+ {},
+ "The `autotest` with `--intree-tool` can only be ran in automation.",
+ )
+ return 1
+ if "MOZ_FETCHES_DIR" not in os.environ:
+ command_context.log(
+ logging.INFO,
+ "static-analysis",
+ {},
+ "`MOZ_FETCHES_DIR` is missing from the environment variables.",
+ )
+ return 1
+
+ _, config, _ = _get_config_environment(command_context)
+ clang_tools_path = os.environ["MOZ_FETCHES_DIR"]
+ clang_paths._clang_tidy_path = mozpath.join(
+ clang_tools_path,
+ "clang-tidy",
+ "bin",
+ "clang-tidy" + config.substs.get("HOST_BIN_SUFFIX", ""),
+ )
+ clang_paths._clang_format_path = mozpath.join(
+ clang_tools_path,
+ "clang-tidy",
+ "bin",
+ "clang-format" + config.substs.get("HOST_BIN_SUFFIX", ""),
+ )
+ clang_paths._clang_apply_replacements = mozpath.join(
+ clang_tools_path,
+ "clang-tidy",
+ "bin",
+ "clang-apply-replacements" + config.substs.get("HOST_BIN_SUFFIX", ""),
+ )
+ clang_paths._run_clang_tidy_path = mozpath.join(
+ clang_tools_path, "clang-tidy", "bin", "run-clang-tidy"
+ )
+ clang_paths._clang_format_diff = mozpath.join(
+ clang_tools_path, "clang-tidy", "share", "clang", "clang-format-diff.py"
+ )
+
+ # Ensure that clang-tidy is present
+ rc = not os.path.exists(clang_paths._clang_tidy_path)
+ else:
+ rc, clang_paths = get_clang_tools(
+ command_context, force=force_download, verbose=verbose
+ )
+
+ if rc != 0:
+ command_context.log(
+ logging.ERROR,
+ "ERROR: static-analysis",
+ {},
+ "ERROR: clang-tidy unable to locate package.",
+ )
+ return TOOLS_FAILED_DOWNLOAD
+
+ clang_paths._clang_tidy_base_path = mozpath.join(
+ command_context.topsrcdir, "tools", "clang-tidy"
+ )
+
+ # For each checker run it
+ platform, _ = command_context.platform
+
+ if platform not in get_clang_tidy_config(command_context).platforms:
+ command_context.log(
+ logging.ERROR,
+ "static-analysis",
+ {},
+ "ERROR: RUNNING: clang-tidy autotest for platform {} not supported.".format(
+ platform
+ ),
+ )
+ return TOOLS_UNSUPORTED_PLATFORM
+
+ max_workers = multiprocessing.cpu_count()
+
+ command_context.log(
+ logging.INFO,
+ "static-analysis",
+ {},
+ "RUNNING: clang-tidy autotest for platform {0} with {1} workers.".format(
+ platform, max_workers
+ ),
+ )
+
+ # List all available checkers
+ cmd = [clang_paths._clang_tidy_path, "-list-checks", "-checks=*"]
+ clang_output = subprocess.check_output(cmd, stderr=subprocess.STDOUT).decode(
+ "utf-8"
+ )
+ available_checks = clang_output.split("\n")[1:]
+ clang_tidy_checks = [c.strip() for c in available_checks if c]
+
+ # Build the dummy compile_commands.json
+ compilation_commands_path = _create_temp_compilation_db(command_context)
+ checkers_test_batch = []
+ checkers_results = []
+ with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor:
+ futures = []
+ for item in get_clang_tidy_config(command_context).checks_with_data:
+ # Skip if any of the following statements is true:
+ # 1. Checker attribute 'publish' is False.
+ not_published = not bool(item.get("publish", True))
+ # 2. Checker has restricted-platforms and current platform is not of them.
+ ignored_platform = (
+ "restricted-platforms" in item
+ and platform not in item["restricted-platforms"]
+ )
+ # 3. Checker name is mozilla-* or -*.
+ ignored_checker = item["name"] in ["mozilla-*", "-*"]
+ # 4. List checker_names is passed and the current checker is not part of the
+ # list or 'publish' is False
+ checker_not_in_list = checker_names and (
+ item["name"] not in checker_names or not_published
+ )
+ if (
+ not_published
+ or ignored_platform
+ or ignored_checker
+ or checker_not_in_list
+ ):
+ continue
+ checkers_test_batch.append(item["name"])
+ futures.append(
+ executor.submit(
+ _verify_checker,
+ command_context,
+ clang_paths,
+ compilation_commands_path,
+ dump_results,
+ clang_tidy_checks,
+ item,
+ checkers_results,
+ )
+ )
+
+ error_code = TOOLS_SUCCESS
+ for future in concurrent.futures.as_completed(futures):
+ # Wait for every task to finish
+ ret_val = future.result()
+ if ret_val != TOOLS_SUCCESS:
+ # We are interested only in one error and we don't break
+ # the execution of for loop since we want to make sure that all
+ # tasks finished.
+ error_code = ret_val
+
+ if error_code != TOOLS_SUCCESS:
+
+ command_context.log(
+ logging.INFO,
+ "static-analysis",
+ {},
+ "FAIL: the following clang-tidy check(s) failed:",
+ )
+ for failure in checkers_results:
+ checker_error = failure["checker-error"]
+ checker_name = failure["checker-name"]
+ info1 = failure["info1"]
+ info2 = failure["info2"]
+ info3 = failure["info3"]
+
+ message_to_log = ""
+ if checker_error == TOOLS_CHECKER_NOT_FOUND:
+ message_to_log = (
+ "\tChecker "
+ "{} not present in this clang-tidy version.".format(
+ checker_name
+ )
+ )
+ elif checker_error == TOOLS_CHECKER_NO_TEST_FILE:
+ message_to_log = (
+ "\tChecker "
+ "{0} does not have a test file - {0}.cpp".format(checker_name)
+ )
+ elif checker_error == TOOLS_CHECKER_RETURNED_NO_ISSUES:
+ message_to_log = (
+ "\tChecker {0} did not find any issues in its test file, "
+ "clang-tidy output for the run is:\n{1}"
+ ).format(checker_name, info1)
+ elif checker_error == TOOLS_CHECKER_RESULT_FILE_NOT_FOUND:
+ message_to_log = (
+ "\tChecker {0} does not have a result file - {0}.json"
+ ).format(checker_name)
+ elif checker_error == TOOLS_CHECKER_DIFF_FAILED:
+ message_to_log = (
+ "\tChecker {0}\nExpected: {1}\n"
+ "Got: {2}\n"
+ "clang-tidy output for the run is:\n"
+ "{3}"
+ ).format(checker_name, info1, info2, info3)
+
+ print("\n" + message_to_log)
+
+ # Also delete the tmp folder
+ shutil.rmtree(compilation_commands_path)
+ return error_code
+
+ # Run the analysis on all checkers at the same time only if we don't dump results.
+ if not dump_results:
+ ret_val = _run_analysis_batch(
+ command_context,
+ clang_paths,
+ compilation_commands_path,
+ checkers_test_batch,
+ )
+ if ret_val != TOOLS_SUCCESS:
+ shutil.rmtree(compilation_commands_path)
+ return ret_val
+
+ command_context.log(
+ logging.INFO, "static-analysis", {}, "SUCCESS: clang-tidy all tests passed."
+ )
+ # Also delete the tmp folder
+ shutil.rmtree(compilation_commands_path)
+
+
+def _run_analysis(
+ command_context,
+ clang_paths,
+ compilation_commands_path,
+ checks,
+ header_filter,
+ sources,
+ jobs=1,
+ fix=False,
+ print_out=False,
+):
+ cmd = _get_clang_tidy_command(
+ command_context,
+ clang_paths,
+ compilation_commands_path,
+ checks=checks,
+ header_filter=header_filter,
+ sources=sources,
+ jobs=jobs,
+ fix=fix,
+ )
+
+ try:
+ clang_output = subprocess.check_output(cmd, stderr=subprocess.STDOUT).decode(
+ "utf-8"
+ )
+ except subprocess.CalledProcessError as e:
+ print(e.output)
+ return None
+ return _parse_issues(command_context, clang_output), clang_output
+
+
+def _run_analysis_batch(command_context, clang_paths, compilation_commands_path, items):
+ command_context.log(
+ logging.INFO,
+ "static-analysis",
+ {},
+ "RUNNING: clang-tidy checker batch analysis.",
+ )
+ if not len(items):
+ command_context.log(
+ logging.ERROR,
+ "static-analysis",
+ {},
+ "ERROR: clang-tidy checker list is empty!",
+ )
+ return TOOLS_CHECKER_LIST_EMPTY
+
+ issues, clang_output = _run_analysis(
+ command_context,
+ clang_paths,
+ compilation_commands_path,
+ checks="-*," + ",".join(items),
+ header_filter="",
+ sources=[
+ mozpath.join(clang_paths._clang_tidy_base_path, "test", checker) + ".cpp"
+ for checker in items
+ ],
+ print_out=True,
+ )
+
+ if issues is None:
+ return TOOLS_CHECKER_FAILED_FILE
+
+ failed_checks = []
+ failed_checks_baseline = []
+ for checker in items:
+ test_file_path_json = (
+ mozpath.join(clang_paths._clang_tidy_base_path, "test", checker) + ".json"
+ )
+ # Read the pre-determined issues
+ baseline_issues = _get_autotest_stored_issues(test_file_path_json)
+
+ # We also stored the 'reliability' index so strip that from the baseline_issues
+ baseline_issues[:] = [
+ item for item in baseline_issues if "reliability" not in item
+ ]
+
+ found = all([element_base in issues for element_base in baseline_issues])
+
+ if not found:
+ failed_checks.append(checker)
+ failed_checks_baseline.append(baseline_issues)
+
+ if len(failed_checks) > 0:
+ command_context.log(
+ logging.ERROR,
+ "static-analysis",
+ {},
+ "ERROR: The following check(s) failed for bulk analysis: "
+ + " ".join(failed_checks),
+ )
+
+ for failed_check, baseline_issue in zip(failed_checks, failed_checks_baseline):
+ print(
+ "\tChecker {0} expect following results: \n\t\t{1}".format(
+ failed_check, baseline_issue
+ )
+ )
+
+ print(
+ "This is the output generated by clang-tidy for the bulk build:\n{}".format(
+ clang_output
+ )
+ )
+ return TOOLS_CHECKER_DIFF_FAILED
+
+ return TOOLS_SUCCESS
+
+
+def _create_temp_compilation_db(command_context):
+ directory = tempfile.mkdtemp(prefix="cc")
+ with open(mozpath.join(directory, "compile_commands.json"), "w") as file_handler:
+ compile_commands = []
+ director = mozpath.join(
+ command_context.topsrcdir, "tools", "clang-tidy", "test"
+ )
+ for item in get_clang_tidy_config(command_context).checks:
+ if item in ["-*", "mozilla-*"]:
+ continue
+ file = item + ".cpp"
+ element = {}
+ element["directory"] = director
+ element["command"] = "cpp -std=c++17 " + file
+ element["file"] = mozpath.join(director, file)
+ compile_commands.append(element)
+
+ json.dump(compile_commands, file_handler)
+ file_handler.flush()
+
+ return directory
+
+
+@StaticAnalysisSubCommand(
+ "static-analysis", "install", "Install the static analysis helper tool"
+)
+@CommandArgument(
+ "source",
+ nargs="?",
+ type=str,
+ help="Where to fetch a local archive containing the static-analysis and "
+ "format helper tool."
+ "It will be installed in ~/.mozbuild/clang-tools."
+ "Can be omitted, in which case the latest clang-tools "
+ "helper for the platform would be automatically detected and installed.",
+)
+@CommandArgument(
+ "--skip-cache",
+ action="store_true",
+ help="Skip all local caches to force re-fetching the helper tool.",
+ default=False,
+)
+@CommandArgument(
+ "--force",
+ action="store_true",
+ help="Force re-install even though the tool exists in mozbuild.",
+ default=False,
+)
+def install(
+ command_context,
+ source=None,
+ skip_cache=False,
+ force=False,
+ verbose=False,
+):
+ command_context._set_log_level(verbose)
+ rc, _ = get_clang_tools(
+ command_context,
+ force=force,
+ skip_cache=skip_cache,
+ source=source,
+ verbose=verbose,
+ )
+ return rc
+
+
+@StaticAnalysisSubCommand(
+ "static-analysis",
+ "clear-cache",
+ "Delete local helpers and reset static analysis helper tool cache",
+)
+def clear_cache(command_context, verbose=False):
+ command_context._set_log_level(verbose)
+ rc, _ = get_clang_tools(
+ command_context,
+ force=True,
+ download_if_needed=True,
+ skip_cache=True,
+ verbose=verbose,
+ )
+
+ if rc != 0:
+ return rc
+
+ from mozbuild.artifact_commands import artifact_clear_cache
+
+ return artifact_clear_cache(command_context)
+
+
+@StaticAnalysisSubCommand(
+ "static-analysis",
+ "print-checks",
+ "Print a list of the static analysis checks performed by default",
+)
+def print_checks(command_context, verbose=False):
+ command_context._set_log_level(verbose)
+ rc, clang_paths = get_clang_tools(command_context, verbose=verbose)
+
+ if rc != 0:
+ return rc
+
+ args = [
+ clang_paths._clang_tidy_path,
+ "-list-checks",
+ "-checks=%s" % get_clang_tidy_config(command_context).checks,
+ ]
+
+ return command_context.run_process(args=args, pass_thru=True)
+
+
+@Command(
+ "prettier-format",
+ category="misc",
+ description="Run prettier on current changes",
+)
+@CommandArgument(
+ "--path",
+ "-p",
+ nargs=1,
+ required=True,
+ help="Specify the path to reformat to stdout.",
+)
+@CommandArgument(
+ "--assume-filename",
+ "-a",
+ nargs=1,
+ required=True,
+ help="This option is usually used in the context of hg-formatsource."
+ "When reading from stdin, Prettier assumes this "
+ "filename to decide which style and parser to use.",
+)
+def prettier_format(command_context, path, assume_filename):
+ # With assume_filename we want to have stdout clean since the result of the
+ # format will be redirected to stdout.
+
+ binary, _ = find_node_executable()
+ prettier = os.path.join(
+ command_context.topsrcdir, "node_modules", "prettier", "bin-prettier.js"
+ )
+ path = os.path.join(command_context.topsrcdir, path[0])
+
+ # Bug 1564824. Prettier fails on patches with moved files where the
+ # original directory also does not exist.
+ assume_dir = os.path.dirname(
+ os.path.join(command_context.topsrcdir, assume_filename[0])
+ )
+ assume_filename = assume_filename[0] if os.path.isdir(assume_dir) else path
+
+ # We use --stdin-filepath in order to better determine the path for
+ # the prettier formatter when it is ran outside of the repo, for example
+ # by the extension hg-formatsource.
+ args = [binary, prettier, "--stdin-filepath", assume_filename]
+
+ process = subprocess.Popen(args, stdin=subprocess.PIPE)
+ with open(path, "rb") as fin:
+ process.stdin.write(fin.read())
+ process.stdin.close()
+ process.wait()
+ return process.returncode
+
+
+@Command(
+ "clang-format",
+ category="misc",
+ description="Run clang-format on current changes",
+)
+@CommandArgument(
+ "--show",
+ "-s",
+ action="store_const",
+ const="stdout",
+ dest="output_path",
+ help="Show diff output on stdout instead of applying changes",
+)
+@CommandArgument(
+ "--assume-filename",
+ "-a",
+ nargs=1,
+ default=None,
+ help="This option is usually used in the context of hg-formatsource."
+ "When reading from stdin, clang-format assumes this "
+ "filename to look for a style config file (with "
+ "-style=file) and to determine the language. When "
+ "specifying this option only one file should be used "
+ "as an input and the output will be forwarded to stdin. "
+ "This option also impairs the download of the clang-tools "
+ "and assumes the package is already located in it's default "
+ "location",
+)
+@CommandArgument(
+ "--path", "-p", nargs="+", default=None, help="Specify the path(s) to reformat"
+)
+@CommandArgument(
+ "--commit",
+ "-c",
+ default=None,
+ help="Specify a commit to reformat from. "
+ "For git you can also pass a range of commits (foo..bar) "
+ "to format all of them at the same time.",
+)
+@CommandArgument(
+ "--output",
+ "-o",
+ default=None,
+ dest="output_path",
+ help="Specify a file handle to write clang-format raw output instead of "
+ "applying changes. This can be stdout or a file path.",
+)
+@CommandArgument(
+ "--format",
+ "-f",
+ choices=("diff", "json"),
+ default="diff",
+ dest="output_format",
+ help="Specify the output format used: diff is the raw patch provided by "
+ "clang-format, json is a list of atomic changes to process.",
+)
+@CommandArgument(
+ "--outgoing",
+ default=False,
+ action="store_true",
+ help="Run clang-format on outgoing files from mercurial repository.",
+)
+def clang_format(
+ command_context,
+ assume_filename,
+ path,
+ commit,
+ output_path=None,
+ output_format="diff",
+ verbose=False,
+ outgoing=False,
+):
+ # Run clang-format or clang-format-diff on the local changes
+ # or files/directories
+ if path is None and outgoing:
+ repo = get_repository_object(command_context.topsrcdir)
+ path = repo.get_outgoing_files()
+
+ if path:
+ # Create the full path list
+ def path_maker(f_name):
+ return os.path.join(command_context.topsrcdir, f_name)
+
+ path = map(path_maker, path)
+
+ os.chdir(command_context.topsrcdir)
+
+ # Load output file handle, either stdout or a file handle in write mode
+ output = None
+ if output_path is not None:
+ output = sys.stdout if output_path == "stdout" else open(output_path, "w")
+
+ # With assume_filename we want to have stdout clean since the result of the
+ # format will be redirected to stdout. Only in case of errror we
+ # write something to stdout.
+ # We don't actually want to get the clang-tools here since we want in some
+ # scenarios to do this in parallel so we relay on the fact that the tools
+ # have already been downloaded via './mach bootstrap' or directly via
+ # './mach static-analysis install'
+ if assume_filename:
+ rc, clang_paths = _set_clang_tools_paths(command_context)
+ if rc != 0:
+ print("clang-format: Unable to set path to clang-format tools.")
+ return rc
+
+ if not _do_clang_tools_exist(clang_paths):
+ print("clang-format: Unable to set locate clang-format tools.")
+ return 1
+
+ if not _is_version_eligible(command_context, clang_paths):
+ return 1
+ else:
+ rc, clang_paths = get_clang_tools(command_context, verbose=verbose)
+ if rc != 0:
+ return rc
+
+ if path is None:
+ return _run_clang_format_diff(
+ command_context,
+ clang_paths._clang_format_diff,
+ clang_paths._clang_format_path,
+ commit,
+ output,
+ )
+
+ if assume_filename:
+ return _run_clang_format_in_console(
+ command_context, clang_paths._clang_format_path, path, assume_filename
+ )
+
+ return _run_clang_format_path(
+ command_context, clang_paths._clang_format_path, path, output, output_format
+ )
+
+
+def _verify_checker(
+ command_context,
+ clang_paths,
+ compilation_commands_path,
+ dump_results,
+ clang_tidy_checks,
+ item,
+ checkers_results,
+):
+ check = item["name"]
+ test_file_path = mozpath.join(clang_paths._clang_tidy_base_path, "test", check)
+ test_file_path_cpp = test_file_path + ".cpp"
+ test_file_path_json = test_file_path + ".json"
+
+ command_context.log(
+ logging.INFO,
+ "static-analysis",
+ {},
+ "RUNNING: clang-tidy checker {}.".format(check),
+ )
+
+ # Structured information in case a checker fails
+ checker_error = {
+ "checker-name": check,
+ "checker-error": "",
+ "info1": "",
+ "info2": "",
+ "info3": "",
+ }
+
+ # Verify if this checker actually exists
+ if check not in clang_tidy_checks:
+ checker_error["checker-error"] = TOOLS_CHECKER_NOT_FOUND
+ checkers_results.append(checker_error)
+ return TOOLS_CHECKER_NOT_FOUND
+
+ # Verify if the test file exists for this checker
+ if not os.path.exists(test_file_path_cpp):
+ checker_error["checker-error"] = TOOLS_CHECKER_NO_TEST_FILE
+ checkers_results.append(checker_error)
+ return TOOLS_CHECKER_NO_TEST_FILE
+
+ issues, clang_output = _run_analysis(
+ command_context,
+ clang_paths,
+ compilation_commands_path,
+ checks="-*," + check,
+ header_filter="",
+ sources=[test_file_path_cpp],
+ )
+ if issues is None:
+ return TOOLS_CHECKER_FAILED_FILE
+
+ # Verify to see if we got any issues, if not raise exception
+ if not issues:
+ checker_error["checker-error"] = TOOLS_CHECKER_RETURNED_NO_ISSUES
+ checker_error["info1"] = clang_output
+ checkers_results.append(checker_error)
+ return TOOLS_CHECKER_RETURNED_NO_ISSUES
+
+ # Also store the 'reliability' index for this checker
+ issues.append({"reliability": item["reliability"]})
+
+ if dump_results:
+ _build_autotest_result(test_file_path_json, json.dumps(issues))
+ else:
+ if not os.path.exists(test_file_path_json):
+ # Result file for test not found maybe regenerate it?
+ checker_error["checker-error"] = TOOLS_CHECKER_RESULT_FILE_NOT_FOUND
+ checkers_results.append(checker_error)
+ return TOOLS_CHECKER_RESULT_FILE_NOT_FOUND
+
+ # Read the pre-determined issues
+ baseline_issues = _get_autotest_stored_issues(test_file_path_json)
+
+ # Compare the two lists
+ if issues != baseline_issues:
+ checker_error["checker-error"] = TOOLS_CHECKER_DIFF_FAILED
+ checker_error["info1"] = baseline_issues
+ checker_error["info2"] = issues
+ checker_error["info3"] = clang_output
+ checkers_results.append(checker_error)
+ return TOOLS_CHECKER_DIFF_FAILED
+
+ return TOOLS_SUCCESS
+
+
+def _build_autotest_result(file, issues):
+ with open(file, "w") as f:
+ f.write(issues)
+
+
+def _get_autotest_stored_issues(file):
+ with open(file) as f:
+ return json.load(f)
+
+
+def _parse_issues(command_context, clang_output):
+ """
+ Parse clang-tidy output into structured issues
+ """
+
+ # Limit clang output parsing to 'Enabled checks:'
+ end = re.search(r"^Enabled checks:\n", clang_output, re.MULTILINE)
+ if end is not None:
+ clang_output = clang_output[: end.start() - 1]
+
+ platform, _ = command_context.platform
+ re_strip_colors = re.compile(r"\x1b\[[\d;]+m", re.MULTILINE)
+ filtered = re_strip_colors.sub("", clang_output)
+ # Starting with clang 8, for the diagnostic messages we have multiple `LF CR`
+ # in order to be compatiable with msvc compiler format, and for this
+ # we are not interested to match the end of line.
+ regex_string = r"(.+):(\d+):(\d+): (warning|error): ([^\[\]\n]+)(?: \[([\.\w-]+)\])"
+
+ # For non 'win' based platforms we also need the 'end of the line' regex
+ if platform not in ("win64", "win32"):
+ regex_string += "?$"
+
+ regex_header = re.compile(regex_string, re.MULTILINE)
+
+ # Sort headers by positions
+ headers = sorted(regex_header.finditer(filtered), key=lambda h: h.start())
+ issues = []
+ for _, header in enumerate(headers):
+ header_group = header.groups()
+ element = [header_group[3], header_group[4], header_group[5]]
+ issues.append(element)
+ return issues
+
+
+def _get_config_environment(command_context):
+ ran_configure = False
+ config = None
+
+ try:
+ config = command_context.config_environment
+ except Exception:
+ command_context.log(
+ logging.WARNING,
+ "static-analysis",
+ {},
+ "Looks like configure has not run yet, running it now...",
+ )
+
+ clobber = Clobberer(command_context.topsrcdir, command_context.topobjdir)
+
+ if clobber.clobber_needed():
+ choice = prompt_bool(
+ "Configuration has changed and Clobber is needed. "
+ "Do you want to proceed?"
+ )
+ if not choice:
+ command_context.log(
+ logging.ERROR,
+ "static-analysis",
+ {},
+ "ERROR: Without Clobber we cannot continue execution!",
+ )
+ return (1, None, None)
+ os.environ["AUTOCLOBBER"] = "1"
+
+ rc = build_commands.configure(command_context)
+ if rc != 0:
+ return (rc, config, ran_configure)
+ ran_configure = True
+ try:
+ config = command_context.config_environment
+ except Exception:
+ pass
+
+ return (0, config, ran_configure)
+
+
+def _build_compile_db(command_context, verbose=False):
+ compilation_commands_path = mozpath.join(
+ command_context.topobjdir, "static-analysis"
+ )
+ compile_db = mozpath.join(compilation_commands_path, "compile_commands.json")
+
+ if os.path.exists(compile_db):
+ return 0, compile_db, compilation_commands_path
+
+ rc, config, ran_configure = _get_config_environment(command_context)
+ if rc != 0:
+ return rc, compile_db, compilation_commands_path
+
+ if ran_configure:
+ # Configure may have created the compilation database if the
+ # mozconfig enables building the CompileDB backend by default,
+ # So we recurse to see if the file exists once again.
+ return _build_compile_db(command_context, verbose=verbose)
+
+ if config:
+ print(
+ "Looks like a clang compilation database has not been "
+ "created yet, creating it now..."
+ )
+ rc = build_commands.build_backend(
+ command_context, ["StaticAnalysis"], verbose=verbose
+ )
+ if rc != 0:
+ return rc, compile_db, compilation_commands_path
+ assert os.path.exists(compile_db)
+ return 0, compile_db, compilation_commands_path
+
+
+def _build_export(command_context, jobs, verbose=False):
+ def on_line(line):
+ command_context.log(logging.INFO, "build_output", {"line": line}, "{line}")
+
+ # First install what we can through install manifests.
+ rc = command_context._run_make(
+ directory=command_context.topobjdir,
+ target="pre-export",
+ line_handler=None,
+ silent=not verbose,
+ )
+ if rc != 0:
+ return rc
+
+ # Then build the rest of the build dependencies by running the full
+ # export target, because we can't do anything better.
+ for target in ("export", "pre-compile"):
+ rc = command_context._run_make(
+ directory=command_context.topobjdir,
+ target=target,
+ line_handler=None,
+ silent=not verbose,
+ num_jobs=jobs,
+ )
+ if rc != 0:
+ return rc
+
+ return 0
+
+
+def _set_clang_tools_paths(command_context):
+ rc, config, _ = _get_config_environment(command_context)
+
+ clang_paths = SimpleNamespace()
+
+ if rc != 0:
+ return rc, clang_paths
+
+ clang_paths._clang_tools_path = mozpath.join(
+ command_context._mach_context.state_dir, "clang-tools"
+ )
+ clang_paths._clang_tidy_path = mozpath.join(
+ clang_paths._clang_tools_path,
+ "clang-tidy",
+ "bin",
+ "clang-tidy" + config.substs.get("HOST_BIN_SUFFIX", ""),
+ )
+ clang_paths._clang_format_path = mozpath.join(
+ clang_paths._clang_tools_path,
+ "clang-tidy",
+ "bin",
+ "clang-format" + config.substs.get("HOST_BIN_SUFFIX", ""),
+ )
+ clang_paths._clang_apply_replacements = mozpath.join(
+ clang_paths._clang_tools_path,
+ "clang-tidy",
+ "bin",
+ "clang-apply-replacements" + config.substs.get("HOST_BIN_SUFFIX", ""),
+ )
+ clang_paths._run_clang_tidy_path = mozpath.join(
+ clang_paths._clang_tools_path,
+ "clang-tidy",
+ "bin",
+ "run-clang-tidy",
+ )
+ clang_paths._clang_format_diff = mozpath.join(
+ clang_paths._clang_tools_path,
+ "clang-tidy",
+ "share",
+ "clang",
+ "clang-format-diff.py",
+ )
+ return 0, clang_paths
+
+
+def _do_clang_tools_exist(clang_paths):
+ return (
+ os.path.exists(clang_paths._clang_tidy_path)
+ and os.path.exists(clang_paths._clang_format_path)
+ and os.path.exists(clang_paths._clang_apply_replacements)
+ and os.path.exists(clang_paths._run_clang_tidy_path)
+ )
+
+
+def get_clang_tools(
+ command_context,
+ force=False,
+ skip_cache=False,
+ source=None,
+ download_if_needed=True,
+ verbose=False,
+):
+
+ rc, clang_paths = _set_clang_tools_paths(command_context)
+
+ if rc != 0:
+ return rc, clang_paths
+
+ if (
+ _do_clang_tools_exist(clang_paths)
+ and _is_version_eligible(command_context, clang_paths, log_error=False)
+ and not force
+ ):
+ return 0, clang_paths
+
+ if os.path.isdir(clang_paths._clang_tools_path) and download_if_needed:
+ # The directory exists, perhaps it's corrupted? Delete it
+ # and start from scratch.
+ shutil.rmtree(clang_paths._clang_tools_path)
+ return get_clang_tools(
+ command_context,
+ force=force,
+ skip_cache=skip_cache,
+ source=source,
+ verbose=verbose,
+ download_if_needed=download_if_needed,
+ )
+
+ # Create base directory where we store clang binary
+ os.mkdir(clang_paths._clang_tools_path)
+
+ if source:
+ return _get_clang_tools_from_source(command_context, clang_paths, source)
+
+ if not download_if_needed:
+ return 0, clang_paths
+
+ from mozbuild.bootstrap import bootstrap_toolchain
+
+ bootstrap_toolchain("clang-tools/clang-tidy")
+
+ return 0 if _is_version_eligible(command_context, clang_paths) else 1, clang_paths
+
+
+def _get_clang_tools_from_source(command_context, clang_paths, filename):
+ from mozbuild.action.tooltool import unpack_file
+
+ clang_tidy_path = mozpath.join(
+ command_context._mach_context.state_dir, "clang-tools"
+ )
+
+ currentWorkingDir = os.getcwd()
+ os.chdir(clang_tidy_path)
+
+ unpack_file(filename)
+
+ # Change back the cwd
+ os.chdir(currentWorkingDir)
+
+ clang_path = mozpath.join(clang_tidy_path, "clang")
+
+ if not os.path.isdir(clang_path):
+ raise Exception("Extracted the archive but didn't find the expected output")
+
+ assert os.path.exists(clang_paths._clang_tidy_path)
+ assert os.path.exists(clang_paths._clang_format_path)
+ assert os.path.exists(clang_paths._clang_apply_replacements)
+ assert os.path.exists(clang_paths._run_clang_tidy_path)
+ return 0, clang_paths
+
+
+def _get_clang_format_diff_command(command_context, commit):
+ if command_context.repository.name == "hg":
+ args = ["hg", "diff", "-U0"]
+ if commit:
+ args += ["-c", commit]
+ else:
+ args += ["-r", ".^"]
+ for dot_extension in _format_include_extensions:
+ args += ["--include", "glob:**{0}".format(dot_extension)]
+ args += ["--exclude", "listfile:{0}".format(_format_ignore_file)]
+ else:
+ commit_range = "HEAD" # All uncommitted changes.
+ if commit:
+ commit_range = (
+ commit if ".." in commit else "{}~..{}".format(commit, commit)
+ )
+ args = ["git", "diff", "--no-color", "-U0", commit_range, "--"]
+ for dot_extension in _format_include_extensions:
+ args += ["*{0}".format(dot_extension)]
+ # git-diff doesn't support an 'exclude-from-files' param, but
+ # allow to add individual exclude pattern since v1.9, see
+ # https://git-scm.com/docs/gitglossary#gitglossary-aiddefpathspecapathspec
+ with open(_format_ignore_file, "rb") as exclude_pattern_file:
+ for pattern in exclude_pattern_file.readlines():
+ pattern = six.ensure_str(pattern.rstrip())
+ pattern = pattern.replace(".*", "**")
+ if not pattern or pattern.startswith("#"):
+ continue # empty or comment
+ magics = ["exclude"]
+ if pattern.startswith("^"):
+ magics += ["top"]
+ pattern = pattern[1:]
+ args += [":({0}){1}".format(",".join(magics), pattern)]
+ return args
+
+
+def _run_clang_format_diff(
+ command_context, clang_format_diff, clang_format, commit, output_file
+):
+ # Run clang-format on the diff
+ # Note that this will potentially miss a lot things
+ from subprocess import PIPE, CalledProcessError, Popen, check_output
+
+ diff_process = Popen(
+ _get_clang_format_diff_command(command_context, commit), stdout=PIPE
+ )
+ args = [sys.executable, clang_format_diff, "-p1", "-binary=%s" % clang_format]
+
+ if not output_file:
+ args.append("-i")
+ try:
+ output = check_output(args, stdin=diff_process.stdout)
+ if output_file:
+ # We want to print the diffs
+ print(output, file=output_file)
+
+ return 0
+ except CalledProcessError as e:
+ # Something wrong happend
+ print("clang-format: An error occured while running clang-format-diff.")
+ return e.returncode
+
+
+def _is_ignored_path(command_context, ignored_dir_re, f):
+ # path needs to be relative to the src root
+ root_dir = command_context.topsrcdir + os.sep
+ if f.startswith(root_dir):
+ f = f[len(root_dir) :]
+ # the ignored_dir_re regex uses / on all platforms
+ return re.match(ignored_dir_re, f.replace(os.sep, "/"))
+
+
+def _generate_path_list(command_context, paths, verbose=True):
+ path_to_third_party = os.path.join(command_context.topsrcdir, _format_ignore_file)
+ ignored_dir = []
+ with open(path_to_third_party, "r") as fh:
+ for line in fh:
+ # Remove comments and empty lines
+ if line.startswith("#") or len(line.strip()) == 0:
+ continue
+ # The regexp is to make sure we are managing relative paths
+ ignored_dir.append(r"^[\./]*" + line.rstrip())
+
+ # Generates the list of regexp
+ ignored_dir_re = "(%s)" % "|".join(ignored_dir)
+ extensions = _format_include_extensions
+
+ path_list = []
+ for f in paths:
+ if _is_ignored_path(command_context, ignored_dir_re, f):
+ # Early exit if we have provided an ignored directory
+ if verbose:
+ print("static-analysis: Ignored third party code '{0}'".format(f))
+ continue
+
+ if os.path.isdir(f):
+ # Processing a directory, generate the file list
+ for folder, subs, files in os.walk(f):
+ subs.sort()
+ for filename in sorted(files):
+ f_in_dir = posixpath.join(pathlib.Path(folder).as_posix(), filename)
+ if f_in_dir.endswith(extensions) and not _is_ignored_path(
+ command_context, ignored_dir_re, f_in_dir
+ ):
+ # Supported extension and accepted path
+ path_list.append(f_in_dir)
+ else:
+ # Make sure that the file exists and it has a supported extension
+ if os.path.isfile(f) and f.endswith(extensions):
+ path_list.append(f)
+
+ return path_list
+
+
+def _run_clang_format_in_console(command_context, clang_format, paths, assume_filename):
+ path_list = _generate_path_list(command_context, assume_filename, False)
+
+ if path_list == []:
+ return 0
+
+ # We use -assume-filename in order to better determine the path for
+ # the .clang-format when it is ran outside of the repo, for example
+ # by the extension hg-formatsource
+ args = [clang_format, "-assume-filename={}".format(assume_filename[0])]
+
+ process = subprocess.Popen(args, stdin=subprocess.PIPE)
+ with open(paths[0], "r") as fin:
+ process.stdin.write(fin.read())
+ process.stdin.close()
+ process.wait()
+ return process.returncode
+
+
+def _get_clang_format_cfg(command_context, current_dir):
+ clang_format_cfg_path = mozpath.join(current_dir, ".clang-format")
+
+ if os.path.exists(clang_format_cfg_path):
+ # Return found path for .clang-format
+ return clang_format_cfg_path
+
+ if current_dir != command_context.topsrcdir:
+ # Go to parent directory
+ return _get_clang_format_cfg(command_context, os.path.split(current_dir)[0])
+ # We have reached command_context.topsrcdir so return None
+ return None
+
+
+def _copy_clang_format_for_show_diff(
+ command_context, current_dir, cached_clang_format_cfg, tmpdir
+):
+ # Lookup for .clang-format first in cache
+ clang_format_cfg = cached_clang_format_cfg.get(current_dir, None)
+
+ if clang_format_cfg is None:
+ # Go through top directories
+ clang_format_cfg = _get_clang_format_cfg(command_context, current_dir)
+
+ # This is unlikely to happen since we must have .clang-format from
+ # command_context.topsrcdir but in any case we should handle a potential error
+ if clang_format_cfg is None:
+ print("Cannot find corresponding .clang-format.")
+ return 1
+
+ # Cache clang_format_cfg for potential later usage
+ cached_clang_format_cfg[current_dir] = clang_format_cfg
+
+ # Copy .clang-format to the tmp dir where the formatted file is copied
+ shutil.copy(clang_format_cfg, tmpdir)
+ return 0
+
+
+def _run_clang_format_path(
+ command_context, clang_format, paths, output_file, output_format
+):
+
+ # Run clang-format on files or directories directly
+ from subprocess import CalledProcessError, check_output
+
+ if output_format == "json":
+ # Get replacements in xml, then process to json
+ args = [clang_format, "-output-replacements-xml"]
+ else:
+ args = [clang_format, "-i"]
+
+ if output_file:
+ # We just want to show the diff, we create the directory to copy it
+ tmpdir = os.path.join(command_context.topobjdir, "tmp")
+ if not os.path.exists(tmpdir):
+ os.makedirs(tmpdir)
+
+ path_list = _generate_path_list(command_context, paths)
+
+ if path_list == []:
+ return
+
+ print("Processing %d file(s)..." % len(path_list))
+
+ if output_file:
+ patches = {}
+ cached_clang_format_cfg = {}
+ for i in range(0, len(path_list)):
+ l = path_list[i : (i + 1)]
+
+ # Copy the files into a temp directory
+ # and run clang-format on the temp directory
+ # and show the diff
+ original_path = l[0]
+ local_path = ntpath.basename(original_path)
+ current_dir = ntpath.dirname(original_path)
+ target_file = os.path.join(tmpdir, local_path)
+ faketmpdir = os.path.dirname(target_file)
+ if not os.path.isdir(faketmpdir):
+ os.makedirs(faketmpdir)
+ shutil.copy(l[0], faketmpdir)
+ l[0] = target_file
+
+ ret = _copy_clang_format_for_show_diff(
+ command_context, current_dir, cached_clang_format_cfg, faketmpdir
+ )
+ if ret != 0:
+ return ret
+
+ # Run clang-format on the list
+ try:
+ output = check_output(args + l)
+ if output and output_format == "json":
+ # Output a relative path in json patch list
+ relative_path = os.path.relpath(
+ original_path, command_context.topsrcdir
+ )
+ patches[relative_path] = _parse_xml_output(original_path, output)
+ except CalledProcessError as e:
+ # Something wrong happend
+ print("clang-format: An error occured while running clang-format.")
+ return e.returncode
+
+ # show the diff
+ if output_format == "diff":
+ diff_command = ["diff", "-u", original_path, target_file]
+ try:
+ output = check_output(diff_command)
+ except CalledProcessError as e:
+ # diff -u returns 0 when no change
+ # here, we expect changes. if we are here, this means that
+ # there is a diff to show
+ if e.output:
+ # Replace the temp path by the path relative to the repository to
+ # display a valid patch
+ relative_path = os.path.relpath(
+ original_path, command_context.topsrcdir
+ )
+ # We must modify the paths in order to be compatible with the
+ # `diff` format.
+ original_path_diff = os.path.join("a", relative_path)
+ target_path_diff = os.path.join("b", relative_path)
+ e.output = e.output.decode("utf-8")
+ patch = e.output.replace(
+ "+++ {}".format(target_file),
+ "+++ {}".format(target_path_diff),
+ ).replace(
+ "-- {}".format(original_path),
+ "-- {}".format(original_path_diff),
+ )
+ patches[original_path] = patch
+
+ if output_format == "json":
+ output = json.dumps(patches, indent=4)
+ else:
+ # Display all the patches at once
+ output = "\n".join(patches.values())
+
+ # Output to specified file or stdout
+ print(output, file=output_file)
+
+ shutil.rmtree(tmpdir)
+ return 0
+
+ # Run clang-format in parallel trying to saturate all of the available cores.
+ import math
+
+ max_workers = multiprocessing.cpu_count()
+
+ # To maximize CPU usage when there are few items to handle,
+ # underestimate the number of items per batch, then dispatch
+ # outstanding items across workers. Per definition, each worker will
+ # handle at most one outstanding item.
+ batch_size = int(math.floor(float(len(path_list)) / max_workers))
+ outstanding_items = len(path_list) - batch_size * max_workers
+
+ batches = []
+
+ i = 0
+ while i < len(path_list):
+ num_items = batch_size + (1 if outstanding_items > 0 else 0)
+ batches.append(args + path_list[i : (i + num_items)])
+
+ outstanding_items -= 1
+ i += num_items
+
+ error_code = None
+
+ with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor:
+ futures = []
+ for batch in batches:
+ futures.append(executor.submit(run_one_clang_format_batch, batch))
+
+ for future in concurrent.futures.as_completed(futures):
+ # Wait for every task to finish
+ ret_val = future.result()
+ if ret_val is not None:
+ error_code = ret_val
+
+ if error_code is not None:
+ return error_code
+ return 0
+
+
+def _parse_xml_output(path, clang_output):
+ """
+ Parse the clang-format XML output to convert it in a JSON compatible
+ list of patches, and calculates line level informations from the
+ character level provided changes.
+ """
+ content = six.ensure_str(open(path, "r").read())
+
+ def _nb_of_lines(start, end):
+ return len(content[start:end].splitlines())
+
+ def _build(replacement):
+ offset = int(replacement.attrib["offset"])
+ length = int(replacement.attrib["length"])
+ last_line = content.rfind("\n", 0, offset)
+ return {
+ "replacement": replacement.text,
+ "char_offset": offset,
+ "char_length": length,
+ "line": _nb_of_lines(0, offset),
+ "line_offset": last_line != -1 and (offset - last_line) or 0,
+ "lines_modified": _nb_of_lines(offset, offset + length),
+ }
+
+ return [
+ _build(replacement)
+ for replacement in ET.fromstring(clang_output).findall("replacement")
+ ]
diff --git a/python/mozbuild/mozbuild/code_analysis/moz.build b/python/mozbuild/mozbuild/code_analysis/moz.build
new file mode 100644
index 0000000000..bb49fbcd2f
--- /dev/null
+++ b/python/mozbuild/mozbuild/code_analysis/moz.build
@@ -0,0 +1,8 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+with Files("**"):
+ BUG_COMPONENT = ("Firefox Build System", "Source Code Analysis")
diff --git a/python/mozbuild/mozbuild/code_analysis/utils.py b/python/mozbuild/mozbuild/code_analysis/utils.py
new file mode 100644
index 0000000000..e3931aa7e4
--- /dev/null
+++ b/python/mozbuild/mozbuild/code_analysis/utils.py
@@ -0,0 +1,138 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import logging
+
+import mozpack.path as mozpath
+import yaml
+
+from mozbuild.util import memoized_property
+
+
+class ClangTidyConfig(object):
+ def __init__(self, mozilla_src):
+ self._clang_tidy_config = self._get_clang_tidy_config(mozilla_src)
+
+ def _get_clang_tidy_config(self, mozilla_src):
+ try:
+ file_handler = open(
+ mozpath.join(mozilla_src, "tools", "clang-tidy", "config.yaml")
+ )
+ config = yaml.safe_load(file_handler)
+ except Exception:
+ self.log(
+ logging.ERROR,
+ "clang-tidy-config",
+ {},
+ "Looks like config.yaml is not valid, we are going to use default"
+ " values for the rest of the analysis for clang-tidy.",
+ )
+ return None
+ return config
+
+ @memoized_property
+ def checks(self):
+ """
+ Returns a list with all activated checks
+ """
+
+ checks = ["-*"]
+ try:
+ config = self._clang_tidy_config
+ for item in config["clang_checkers"]:
+ if item.get("publish", True):
+ checks.append(item["name"])
+ except Exception:
+ self.log(
+ logging.ERROR,
+ "clang-tidy-config",
+ {},
+ "Looks like config.yaml is not valid, so we are unable to "
+ "determine default checkers, using '-checks=-*,mozilla-*'",
+ )
+ checks.append("mozilla-*")
+ finally:
+ return checks
+
+ @memoized_property
+ def checks_with_data(self):
+ """
+ Returns a list with all activated checks plus metadata for each check
+ """
+
+ checks_with_data = [{"name": "-*"}]
+ try:
+ config = self._clang_tidy_config
+ for item in config["clang_checkers"]:
+ if item.get("publish", True):
+ checks_with_data.append(item)
+ except Exception:
+ self.log(
+ logging.ERROR,
+ "clang-tidy-config",
+ {},
+ "Looks like config.yaml is not valid, so we are unable to "
+ "determine default checkers, using '-checks=-*,mozilla-*'",
+ )
+ checks_with_data.append({"name": "mozilla-*", "reliability": "high"})
+ finally:
+ return checks_with_data
+
+ @memoized_property
+ def checks_config(self):
+ """
+ Returns the configuation for all checks
+ """
+
+ config_list = []
+ checks_config = {}
+ try:
+ config = self._clang_tidy_config
+ for checker in config["clang_checkers"]:
+ if checker.get("publish", True) and "config" in checker:
+ for checker_option in checker["config"]:
+ # Verify if the format of the Option is correct,
+ # possibilities are:
+ # 1. CheckerName.Option
+ # 2. Option -> that will become CheckerName.Option
+ if not checker_option["key"].startswith(checker["name"]):
+ checker_option["key"] = "{}.{}".format(
+ checker["name"], checker_option["key"]
+ )
+ config_list += checker["config"]
+ checks_config["CheckOptions"] = config_list
+ except Exception:
+ self.log(
+ logging.ERROR,
+ "clang-tidy-config",
+ {},
+ "Looks like config.yaml is not valid, so we are unable to "
+ "determine configuration for checkers, so using default",
+ )
+ checks_config = None
+ finally:
+ return checks_config
+
+ @memoized_property
+ def version(self):
+ """
+ Returns version of clang-tidy suitable for this configuration file
+ """
+
+ if "package_version" in self._clang_tidy_config:
+ return self._clang_tidy_config["package_version"]
+ self.log(
+ logging.ERROR,
+ "clang-tidy-confis",
+ {},
+ "Unable to find 'package_version' in the config.yml",
+ )
+ return None
+
+ @memoized_property
+ def platforms(self):
+ """
+ Returns a list of platforms suitable to work with `clang-tidy`
+ """
+ return self._clang_tidy_config.get("platforms", [])
diff --git a/python/mozbuild/mozbuild/codecoverage/__init__.py b/python/mozbuild/mozbuild/codecoverage/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/codecoverage/__init__.py
diff --git a/python/mozbuild/mozbuild/codecoverage/chrome_map.py b/python/mozbuild/mozbuild/codecoverage/chrome_map.py
new file mode 100644
index 0000000000..79cedd2faf
--- /dev/null
+++ b/python/mozbuild/mozbuild/codecoverage/chrome_map.py
@@ -0,0 +1,175 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import json
+import os
+import re
+
+import mozpack.path as mozpath
+import six
+from mach.config import ConfigSettings
+from mach.logging import LoggingManager
+from mozpack.copier import FileRegistry
+from mozpack.files import PreprocessedFile
+from mozpack.manifests import InstallManifest
+
+from mozbuild.backend.common import CommonBackend
+from mozbuild.base import MozbuildObject
+from mozbuild.frontend.data import (
+ ChromeManifestEntry,
+ FinalTargetFiles,
+ FinalTargetPreprocessedFiles,
+ JARManifest,
+)
+
+from .manifest_handler import ChromeManifestHandler
+
+_line_comment_re = re.compile('^//@line (\d+) "(.+)"$')
+
+
+def generate_pp_info(path, topsrcdir):
+ with open(path, encoding="utf-8") as fh:
+ # (start, end) -> (included_source, start)
+ section_info = dict()
+
+ this_section = None
+
+ def finish_section(pp_end):
+ pp_start, inc_source, inc_start = this_section
+ section_info[str(pp_start) + "," + str(pp_end)] = inc_source, inc_start
+
+ for count, line in enumerate(fh):
+ # Regex are quite slow, so bail out early.
+ if not line.startswith("//@line"):
+ continue
+ m = re.match(_line_comment_re, line)
+ if m:
+ if this_section:
+ finish_section(count + 1)
+ inc_start, inc_source = m.groups()
+
+ # Special case to handle $SRCDIR prefixes
+ src_dir_prefix = "$SRCDIR"
+ parts = mozpath.split(inc_source)
+ if parts[0] == src_dir_prefix:
+ inc_source = mozpath.join(*parts[1:])
+ else:
+ inc_source = mozpath.relpath(inc_source, topsrcdir)
+
+ pp_start = count + 2
+ this_section = pp_start, inc_source, int(inc_start)
+
+ if this_section:
+ finish_section(count + 2)
+
+ return section_info
+
+
+# This build backend is assuming the build to have happened already, as it is parsing
+# built preprocessed files to generate data to map them to the original sources.
+
+
+class ChromeMapBackend(CommonBackend):
+ def _init(self):
+ CommonBackend._init(self)
+
+ log_manager = LoggingManager()
+ self._cmd = MozbuildObject(
+ self.environment.topsrcdir,
+ ConfigSettings(),
+ log_manager,
+ self.environment.topobjdir,
+ )
+ self._install_mapping = {}
+ self.manifest_handler = ChromeManifestHandler()
+
+ def consume_object(self, obj):
+ if isinstance(obj, JARManifest):
+ self._consume_jar_manifest(obj)
+ if isinstance(obj, ChromeManifestEntry):
+ self.manifest_handler.handle_manifest_entry(obj.entry)
+ if isinstance(obj, (FinalTargetFiles, FinalTargetPreprocessedFiles)):
+ self._handle_final_target_files(obj)
+ return True
+
+ def _handle_final_target_files(self, obj):
+ for path, files in obj.files.walk():
+ for f in files:
+ dest = mozpath.join(obj.install_target, path, f.target_basename)
+ obj_path = mozpath.join(self.environment.topobjdir, dest)
+ if obj_path.endswith(".in"):
+ obj_path = obj_path[:-3]
+ if isinstance(obj, FinalTargetPreprocessedFiles):
+ assert os.path.exists(obj_path), "%s should exist" % obj_path
+ pp_info = generate_pp_info(obj_path, obj.topsrcdir)
+ else:
+ pp_info = None
+
+ base = (
+ obj.topobjdir
+ if f.full_path.startswith(obj.topobjdir)
+ else obj.topsrcdir
+ )
+ self._install_mapping[dest] = (
+ mozpath.relpath(f.full_path, base),
+ pp_info,
+ )
+
+ def consume_finished(self):
+ mp = os.path.join(
+ self.environment.topobjdir, "_build_manifests", "install", "_tests"
+ )
+ install_manifest = InstallManifest(mp)
+ reg = FileRegistry()
+ install_manifest.populate_registry(reg)
+
+ for dest, src in reg:
+ if not hasattr(src, "path"):
+ continue
+
+ if not os.path.isabs(dest):
+ dest = "_tests/" + dest
+
+ obj_path = mozpath.join(self.environment.topobjdir, dest)
+ if isinstance(src, PreprocessedFile):
+ assert os.path.exists(obj_path), "%s should exist" % obj_path
+ pp_info = generate_pp_info(obj_path, self.environment.topsrcdir)
+ else:
+ pp_info = None
+
+ rel_src = mozpath.relpath(src.path, self.environment.topsrcdir)
+ self._install_mapping[dest] = rel_src, pp_info
+
+ # Our result has four parts:
+ # A map from url prefixes to objdir directories:
+ # { "chrome://mozapps/content/": [ "dist/bin/chrome/toolkit/content/mozapps" ], ... }
+ # A map of overrides.
+ # A map from objdir paths to sourcedir paths, and an object storing mapping
+ # information for preprocessed files:
+ # { "dist/bin/browser/chrome/browser/content/browser/aboutSessionRestore.js":
+ # [ "$topsrcdir/browser/components/sessionstore/content/aboutSessionRestore.js", {} ],
+ # ... }
+ # An object containing build configuration information.
+ outputfile = os.path.join(self.environment.topobjdir, "chrome-map.json")
+ with self._write_file(outputfile) as fh:
+ chrome_mapping = self.manifest_handler.chrome_mapping
+ overrides = self.manifest_handler.overrides
+ json.dump(
+ [
+ {k: list(v) for k, v in six.iteritems(chrome_mapping)},
+ overrides,
+ self._install_mapping,
+ {
+ "topobjdir": mozpath.normpath(self.environment.topobjdir),
+ "MOZ_APP_NAME": self.environment.substs.get("MOZ_APP_NAME"),
+ "OMNIJAR_NAME": self.environment.substs.get("OMNIJAR_NAME"),
+ "MOZ_MACBUNDLE_NAME": self.environment.substs.get(
+ "MOZ_MACBUNDLE_NAME"
+ ),
+ },
+ ],
+ fh,
+ sort_keys=True,
+ indent=2,
+ )
diff --git a/python/mozbuild/mozbuild/codecoverage/lcov_rewriter.py b/python/mozbuild/mozbuild/codecoverage/lcov_rewriter.py
new file mode 100644
index 0000000000..fd83efdb3e
--- /dev/null
+++ b/python/mozbuild/mozbuild/codecoverage/lcov_rewriter.py
@@ -0,0 +1,777 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import json
+import os
+import sys
+from argparse import ArgumentParser
+
+try:
+ import urlparse
+except ImportError:
+ import urllib.parse as urlparse
+
+import mozpack.path as mozpath
+from mozpack.chrome.manifest import parse_manifest
+from six import viewitems
+
+from .manifest_handler import ChromeManifestHandler
+
+
+class LcovRecord(object):
+ __slots__ = (
+ "test_name",
+ "source_file",
+ "functions",
+ "function_exec_counts",
+ "function_count",
+ "covered_function_count",
+ "branches",
+ "branch_count",
+ "covered_branch_count",
+ "lines",
+ "line_count",
+ "covered_line_count",
+ )
+
+ def __init__(self):
+ self.functions = {}
+ self.function_exec_counts = {}
+ self.branches = {}
+ self.lines = {}
+
+ def __iadd__(self, other):
+
+ # These shouldn't differ.
+ self.source_file = other.source_file
+ if hasattr(other, "test_name"):
+ self.test_name = other.test_name
+ self.functions.update(other.functions)
+
+ for name, count in viewitems(other.function_exec_counts):
+ self.function_exec_counts[name] = count + self.function_exec_counts.get(
+ name, 0
+ )
+
+ for key, taken in viewitems(other.branches):
+ self.branches[key] = taken + self.branches.get(key, 0)
+
+ for line, (exec_count, checksum) in viewitems(other.lines):
+ new_exec_count = exec_count
+ if line in self.lines:
+ old_exec_count, _ = self.lines[line]
+ new_exec_count += old_exec_count
+ self.lines[line] = new_exec_count, checksum
+
+ self.resummarize()
+ return self
+
+ def resummarize(self):
+ # Re-calculate summaries after generating or splitting a record.
+ self.function_count = len(self.functions.keys())
+ # Function records may have moved between files, so filter here.
+ self.function_exec_counts = {
+ fn_name: count
+ for fn_name, count in viewitems(self.function_exec_counts)
+ if fn_name in self.functions.values()
+ }
+ self.covered_function_count = len(
+ [c for c in self.function_exec_counts.values() if c]
+ )
+ self.line_count = len(self.lines)
+ self.covered_line_count = len([c for c, _ in self.lines.values() if c])
+ self.branch_count = len(self.branches)
+ self.covered_branch_count = len([c for c in self.branches.values() if c])
+
+
+class RecordRewriter(object):
+ # Helper class for rewriting/spliting individual lcov records according
+ # to what the preprocessor did.
+ def __init__(self):
+ self._ranges = None
+
+ def _get_range(self, line):
+ for start, end in self._ranges:
+ if line < start:
+ return None
+ if line < end:
+ return start, end
+ return None
+
+ def _get_mapped_line(self, line, r):
+ inc_source, inc_start = self._current_pp_info[r]
+ start, end = r
+ offs = line - start
+ return inc_start + offs
+
+ def _get_record(self, inc_source):
+ if inc_source in self._additions:
+ gen_rec = self._additions[inc_source]
+ else:
+ gen_rec = LcovRecord()
+ gen_rec.source_file = inc_source
+ self._additions[inc_source] = gen_rec
+ return gen_rec
+
+ def _rewrite_lines(self, record):
+ rewritten_lines = {}
+ for ln, line_info in viewitems(record.lines):
+ r = self._get_range(ln)
+ if r is None:
+ rewritten_lines[ln] = line_info
+ continue
+ new_ln = self._get_mapped_line(ln, r)
+ inc_source, _ = self._current_pp_info[r]
+
+ if inc_source != record.source_file:
+ gen_rec = self._get_record(inc_source)
+ gen_rec.lines[new_ln] = line_info
+ continue
+
+ # Move exec_count to the new lineno.
+ rewritten_lines[new_ln] = line_info
+
+ record.lines = rewritten_lines
+
+ def _rewrite_functions(self, record):
+ rewritten_fns = {}
+
+ # Sometimes we get multiple entries for a named function ("top-level", for
+ # instance). It's not clear the records that result are well-formed, but
+ # we act as though if a function has multiple FN's, the corresponding
+ # FNDA's are all the same.
+ for ln, fn_name in viewitems(record.functions):
+ r = self._get_range(ln)
+ if r is None:
+ rewritten_fns[ln] = fn_name
+ continue
+ new_ln = self._get_mapped_line(ln, r)
+ inc_source, _ = self._current_pp_info[r]
+ if inc_source != record.source_file:
+ gen_rec = self._get_record(inc_source)
+ gen_rec.functions[new_ln] = fn_name
+ if fn_name in record.function_exec_counts:
+ gen_rec.function_exec_counts[fn_name] = record.function_exec_counts[
+ fn_name
+ ]
+ continue
+ rewritten_fns[new_ln] = fn_name
+ record.functions = rewritten_fns
+
+ def _rewrite_branches(self, record):
+ rewritten_branches = {}
+ for (ln, block_number, branch_number), taken in viewitems(record.branches):
+ r = self._get_range(ln)
+ if r is None:
+ rewritten_branches[ln, block_number, branch_number] = taken
+ continue
+ new_ln = self._get_mapped_line(ln, r)
+ inc_source, _ = self._current_pp_info[r]
+ if inc_source != record.source_file:
+ gen_rec = self._get_record(inc_source)
+ gen_rec.branches[(new_ln, block_number, branch_number)] = taken
+ continue
+ rewritten_branches[(new_ln, block_number, branch_number)] = taken
+
+ record.branches = rewritten_branches
+
+ def rewrite_record(self, record, pp_info):
+ # Rewrite the lines in the given record according to preprocessor info
+ # and split to additional records when pp_info has included file info.
+ self._current_pp_info = dict(
+ [(tuple([int(l) for l in k.split(",")]), v) for k, v in pp_info.items()]
+ )
+ self._ranges = sorted(self._current_pp_info.keys())
+ self._additions = {}
+ self._rewrite_lines(record)
+ self._rewrite_functions(record)
+ self._rewrite_branches(record)
+
+ record.resummarize()
+
+ generated_records = self._additions.values()
+ for r in generated_records:
+ r.resummarize()
+ return generated_records
+
+
+class LcovFile(object):
+ # Simple parser/pretty-printer for lcov format.
+ # lcov parsing based on http://ltp.sourceforge.net/coverage/lcov/geninfo.1.php
+
+ # TN:<test name>
+ # SF:<absolute path to the source file>
+ # FN:<line number of function start>,<function name>
+ # FNDA:<execution count>,<function name>
+ # FNF:<number of functions found>
+ # FNH:<number of function hit>
+ # BRDA:<line number>,<block number>,<branch number>,<taken>
+ # BRF:<number of branches found>
+ # BRH:<number of branches hit>
+ # DA:<line number>,<execution count>[,<checksum>]
+ # LF:<number of instrumented lines>
+ # LH:<number of lines with a non-zero execution count>
+ # end_of_record
+ PREFIX_TYPES = {
+ "TN": 0,
+ "SF": 0,
+ "FN": 1,
+ "FNDA": 1,
+ "FNF": 0,
+ "FNH": 0,
+ "BRDA": 3,
+ "BRF": 0,
+ "BRH": 0,
+ "DA": 2,
+ "LH": 0,
+ "LF": 0,
+ }
+
+ def __init__(self, lcov_paths):
+ self.lcov_paths = lcov_paths
+
+ def iterate_records(self, rewrite_source=None):
+ current_source_file = None
+ current_pp_info = None
+ current_lines = []
+ for lcov_path in self.lcov_paths:
+ with open(lcov_path, "r", encoding="utf-8") as lcov_fh:
+ for line in lcov_fh:
+ line = line.rstrip()
+ if not line:
+ continue
+
+ if line == "end_of_record":
+ # We skip records that we couldn't rewrite, that is records for which
+ # rewrite_url returns None.
+ if current_source_file is not None:
+ yield (current_source_file, current_pp_info, current_lines)
+ current_source_file = None
+ current_pp_info = None
+ current_lines = []
+ continue
+
+ colon = line.find(":")
+ prefix = line[:colon]
+
+ if prefix == "SF":
+ sf = line[(colon + 1) :]
+ res = (
+ rewrite_source(sf)
+ if rewrite_source is not None
+ else (sf, None)
+ )
+ if res is None:
+ current_lines.append(line)
+ else:
+ current_source_file, current_pp_info = res
+ current_lines.append("SF:" + current_source_file)
+ else:
+ current_lines.append(line)
+
+ def parse_record(self, record_content):
+ self.current_record = LcovRecord()
+
+ for line in record_content:
+ colon = line.find(":")
+
+ prefix = line[:colon]
+
+ # We occasionally end up with multi-line scripts in data:
+ # uris that will trip up the parser, just skip them for now.
+ if colon < 0 or prefix not in self.PREFIX_TYPES:
+ continue
+
+ args = line[(colon + 1) :].split(",", self.PREFIX_TYPES[prefix])
+
+ def try_convert(a):
+ try:
+ return int(a)
+ except ValueError:
+ return a
+
+ args = [try_convert(a) for a in args]
+
+ try:
+ LcovFile.__dict__["parse_" + prefix](self, *args)
+ except ValueError:
+ print("Encountered an error in %s:\n%s" % (self.lcov_fh.name, line))
+ raise
+ except KeyError:
+ print("Invalid lcov line start in %s:\n%s" % (self.lcov_fh.name, line))
+ raise
+ except TypeError:
+ print("Invalid lcov line start in %s:\n%s" % (self.lcov_fh.name, line))
+ raise
+
+ ret = self.current_record
+ self.current_record = LcovRecord()
+ return ret
+
+ def print_file(self, fh, rewrite_source, rewrite_record):
+ for source_file, pp_info, record_content in self.iterate_records(
+ rewrite_source
+ ):
+ if pp_info is not None:
+ record = self.parse_record(record_content)
+ for r in rewrite_record(record, pp_info):
+ fh.write(self.format_record(r))
+ fh.write(self.format_record(record))
+ else:
+ fh.write("\n".join(record_content) + "\nend_of_record\n")
+
+ def format_record(self, record):
+ out_lines = []
+ for name in LcovRecord.__slots__:
+ if hasattr(record, name):
+ out_lines.append(LcovFile.__dict__["format_" + name](self, record))
+ return "\n".join(out_lines) + "\nend_of_record\n"
+
+ def format_test_name(self, record):
+ return "TN:%s" % record.test_name
+
+ def format_source_file(self, record):
+ return "SF:%s" % record.source_file
+
+ def format_functions(self, record):
+ # Sorting results gives deterministic output (and is a lot faster than
+ # using OrderedDict).
+ fns = []
+ for start_lineno, fn_name in sorted(viewitems(record.functions)):
+ fns.append("FN:%s,%s" % (start_lineno, fn_name))
+ return "\n".join(fns)
+
+ def format_function_exec_counts(self, record):
+ fndas = []
+ for name, exec_count in sorted(viewitems(record.function_exec_counts)):
+ fndas.append("FNDA:%s,%s" % (exec_count, name))
+ return "\n".join(fndas)
+
+ def format_function_count(self, record):
+ return "FNF:%s" % record.function_count
+
+ def format_covered_function_count(self, record):
+ return "FNH:%s" % record.covered_function_count
+
+ def format_branches(self, record):
+ brdas = []
+ for key in sorted(record.branches):
+ taken = record.branches[key]
+ taken = "-" if taken == 0 else taken
+ brdas.append("BRDA:%s" % ",".join(map(str, list(key) + [taken])))
+ return "\n".join(brdas)
+
+ def format_branch_count(self, record):
+ return "BRF:%s" % record.branch_count
+
+ def format_covered_branch_count(self, record):
+ return "BRH:%s" % record.covered_branch_count
+
+ def format_lines(self, record):
+ das = []
+ for line_no, (exec_count, checksum) in sorted(viewitems(record.lines)):
+ s = "DA:%s,%s" % (line_no, exec_count)
+ if checksum:
+ s += ",%s" % checksum
+ das.append(s)
+ return "\n".join(das)
+
+ def format_line_count(self, record):
+ return "LF:%s" % record.line_count
+
+ def format_covered_line_count(self, record):
+ return "LH:%s" % record.covered_line_count
+
+ def parse_TN(self, test_name):
+ self.current_record.test_name = test_name
+
+ def parse_SF(self, source_file):
+ self.current_record.source_file = source_file
+
+ def parse_FN(self, start_lineno, fn_name):
+ self.current_record.functions[start_lineno] = fn_name
+
+ def parse_FNDA(self, exec_count, fn_name):
+ self.current_record.function_exec_counts[fn_name] = exec_count
+
+ def parse_FNF(self, function_count):
+ self.current_record.function_count = function_count
+
+ def parse_FNH(self, covered_function_count):
+ self.current_record.covered_function_count = covered_function_count
+
+ def parse_BRDA(self, line_number, block_number, branch_number, taken):
+ taken = 0 if taken == "-" else taken
+ self.current_record.branches[(line_number, block_number, branch_number)] = taken
+
+ def parse_BRF(self, branch_count):
+ self.current_record.branch_count = branch_count
+
+ def parse_BRH(self, covered_branch_count):
+ self.current_record.covered_branch_count = covered_branch_count
+
+ def parse_DA(self, line_number, execution_count, checksum=None):
+ self.current_record.lines[line_number] = (execution_count, checksum)
+
+ def parse_LH(self, covered_line_count):
+ self.current_record.covered_line_count = covered_line_count
+
+ def parse_LF(self, line_count):
+ self.current_record.line_count = line_count
+
+
+class UrlFinderError(Exception):
+ pass
+
+
+class UrlFinder(object):
+ # Given a "chrome://" or "resource://" url, uses data from the UrlMapBackend
+ # and install manifests to find a path to the source file and the corresponding
+ # (potentially pre-processed) file in the objdir.
+ def __init__(self, chrome_map_path, appdir, gredir, extra_chrome_manifests):
+ # Cached entries
+ self._final_mapping = {}
+
+ try:
+ with open(chrome_map_path, "r", encoding="utf-8") as fh:
+ url_prefixes, overrides, install_info, buildconfig = json.load(fh)
+ except IOError:
+ print(
+ "Error reading %s. Run |./mach build-backend -b ChromeMap| to "
+ "populate the ChromeMap backend." % chrome_map_path
+ )
+ raise
+
+ self.topobjdir = buildconfig["topobjdir"]
+ self.MOZ_APP_NAME = buildconfig["MOZ_APP_NAME"]
+ self.OMNIJAR_NAME = buildconfig["OMNIJAR_NAME"]
+
+ # These are added dynamically in nsIResProtocolHandler, we might
+ # need to get them at run time.
+ if "resource:///" not in url_prefixes:
+ url_prefixes["resource:///"] = [appdir]
+ if "resource://gre/" not in url_prefixes:
+ url_prefixes["resource://gre/"] = [gredir]
+
+ self._url_prefixes = url_prefixes
+ self._url_overrides = overrides
+
+ self._respath = None
+
+ mac_bundle_name = buildconfig["MOZ_MACBUNDLE_NAME"]
+ if mac_bundle_name:
+ self._respath = mozpath.join(
+ "dist", mac_bundle_name, "Contents", "Resources"
+ )
+
+ if not extra_chrome_manifests:
+ extra_path = os.path.join(self.topobjdir, "_tests", "extra.manifest")
+ if os.path.isfile(extra_path):
+ extra_chrome_manifests = [extra_path]
+
+ if extra_chrome_manifests:
+ self._populate_chrome(extra_chrome_manifests)
+
+ self._install_mapping = install_info
+
+ def _populate_chrome(self, manifests):
+ handler = ChromeManifestHandler()
+ for m in manifests:
+ path = os.path.abspath(m)
+ for e in parse_manifest(None, path):
+ handler.handle_manifest_entry(e)
+ self._url_overrides.update(handler.overrides)
+ self._url_prefixes.update(handler.chrome_mapping)
+
+ def _find_install_prefix(self, objdir_path):
+ def _prefix(s):
+ for p in mozpath.split(s):
+ if "*" not in p:
+ yield p + "/"
+
+ offset = 0
+ for leaf in reversed(mozpath.split(objdir_path)):
+ offset += len(leaf)
+ if objdir_path[:-offset] in self._install_mapping:
+ pattern_prefix, is_pp = self._install_mapping[objdir_path[:-offset]]
+ full_leaf = objdir_path[len(objdir_path) - offset :]
+ src_prefix = "".join(_prefix(pattern_prefix))
+ self._install_mapping[objdir_path] = (
+ mozpath.join(src_prefix, full_leaf),
+ is_pp,
+ )
+ break
+ offset += 1
+
+ def _install_info(self, objdir_path):
+ if objdir_path not in self._install_mapping:
+ # If our path is missing, some prefix of it may be in the install
+ # mapping mapped to a wildcard.
+ self._find_install_prefix(objdir_path)
+ if objdir_path not in self._install_mapping:
+ raise UrlFinderError("Couldn't find entry in manifest for %s" % objdir_path)
+ return self._install_mapping[objdir_path]
+
+ def _abs_objdir_install_info(self, term):
+ obj_relpath = term[len(self.topobjdir) + 1 :]
+ res = self._install_info(obj_relpath)
+
+ # Some urls on osx will refer to paths in the mac bundle, so we
+ # re-interpret them as being their original location in dist/bin.
+ if not res and self._respath and obj_relpath.startswith(self._respath):
+ obj_relpath = obj_relpath.replace(self._respath, "dist/bin")
+ res = self._install_info(obj_relpath)
+
+ if not res:
+ raise UrlFinderError("Couldn't find entry in manifest for %s" % obj_relpath)
+ return res
+
+ def find_files(self, url):
+ # Returns a tuple of (source file, pp_info)
+ # for the given "resource:", "chrome:", or "file:" uri.
+ term = url
+ if term in self._url_overrides:
+ term = self._url_overrides[term]
+
+ if os.path.isabs(term) and term.startswith(self.topobjdir):
+ source_path, pp_info = self._abs_objdir_install_info(term)
+ return source_path, pp_info
+
+ for prefix, dests in viewitems(self._url_prefixes):
+ if term.startswith(prefix):
+ for dest in dests:
+ if not dest.endswith("/"):
+ dest += "/"
+ objdir_path = term.replace(prefix, dest)
+
+ while objdir_path.startswith("//"):
+ # The mochitest harness produces some wonky file:// uris
+ # that need to be fixed.
+ objdir_path = objdir_path[1:]
+
+ try:
+ if os.path.isabs(objdir_path) and objdir_path.startswith(
+ self.topobjdir
+ ):
+ return self._abs_objdir_install_info(objdir_path)
+ else:
+ src_path, pp_info = self._install_info(objdir_path)
+ return mozpath.normpath(src_path), pp_info
+ except UrlFinderError:
+ pass
+
+ if dest.startswith("resource://") or dest.startswith("chrome://"):
+ result = self.find_files(term.replace(prefix, dest))
+ if result:
+ return result
+
+ raise UrlFinderError("No objdir path for %s" % term)
+
+ def rewrite_url(self, url):
+ # This applies one-off rules and returns None for urls that we aren't
+ # going to be able to resolve to a source file ("about:" urls, for
+ # instance).
+ if url in self._final_mapping:
+ return self._final_mapping[url]
+ if url.endswith("> eval"):
+ return None
+ if url.endswith("> Function"):
+ return None
+ if " -> " in url:
+ url = url.split(" -> ")[1].rstrip()
+ if "?" in url:
+ url = url.split("?")[0]
+
+ url_obj = urlparse.urlparse(url)
+ if url_obj.scheme == "jar":
+ app_name = self.MOZ_APP_NAME
+ omnijar_name = self.OMNIJAR_NAME
+
+ if app_name in url:
+ if omnijar_name in url:
+ # e.g. file:///home/worker/workspace/build/application/firefox/omni.ja!/components/MainProcessSingleton.js # noqa
+ parts = url_obj.path.split(omnijar_name + "!", 1)
+ elif ".xpi!" in url:
+ # e.g. file:///home/worker/workspace/build/application/firefox/browser/features/e10srollout@mozilla.org.xpi!/bootstrap.js # noqa
+ parts = url_obj.path.split(".xpi!", 1)
+ else:
+ # We don't know how to handle this jar: path, so return it to the
+ # caller to make it print a warning.
+ return url_obj.path, None
+
+ dir_parts = parts[0].rsplit(app_name + "/", 1)
+ url = mozpath.normpath(
+ mozpath.join(
+ self.topobjdir,
+ "dist",
+ "bin",
+ dir_parts[1].lstrip("/"),
+ parts[1].lstrip("/"),
+ )
+ )
+ elif ".xpi!" in url:
+ # This matching mechanism is quite brittle and based on examples seen in the wild.
+ # There's no rule to match the XPI name to the path in dist/xpi-stage.
+ parts = url_obj.path.split(".xpi!", 1)
+ addon_name = os.path.basename(parts[0])
+ if "-test@mozilla.org" in addon_name:
+ addon_name = addon_name[: -len("-test@mozilla.org")]
+ elif addon_name.endswith("@mozilla.org"):
+ addon_name = addon_name[: -len("@mozilla.org")]
+ url = mozpath.normpath(
+ mozpath.join(
+ self.topobjdir,
+ "dist",
+ "xpi-stage",
+ addon_name,
+ parts[1].lstrip("/"),
+ )
+ )
+ elif url_obj.scheme == "file" and os.path.isabs(url_obj.path):
+ path = url_obj.path
+ if not os.path.isfile(path):
+ # This may have been in a profile directory that no
+ # longer exists.
+ return None
+ if not path.startswith(self.topobjdir):
+ return path, None
+ url = url_obj.path
+ elif url_obj.scheme in ("http", "https", "javascript", "data", "about"):
+ return None
+
+ result = self.find_files(url)
+ self._final_mapping[url] = result
+ return result
+
+
+class LcovFileRewriter(object):
+ # Class for partial parses of LCOV format and rewriting to resolve urls
+ # and preprocessed file lines.
+ def __init__(
+ self,
+ chrome_map_path,
+ appdir="dist/bin/browser/",
+ gredir="dist/bin/",
+ extra_chrome_manifests=[],
+ ):
+ self.url_finder = UrlFinder(
+ chrome_map_path, appdir, gredir, extra_chrome_manifests
+ )
+ self.pp_rewriter = RecordRewriter()
+
+ def rewrite_files(self, in_paths, output_file, output_suffix):
+ unknowns = set()
+ found_valid = [False]
+
+ def rewrite_source(url):
+ try:
+ res = self.url_finder.rewrite_url(url)
+ if res is None:
+ return None
+ except Exception as e:
+ if url not in unknowns:
+ # The exception can contain random filename used by
+ # test cases, and there can be character that cannot be
+ # encoded with the stdout encoding.
+ sys.stdout.buffer.write(
+ (
+ "Error: %s.\nCouldn't find source info for %s, removing record"
+ % (e, url)
+ ).encode(sys.stdout.encoding, errors="replace")
+ )
+ unknowns.add(url)
+ return None
+
+ source_file, pp_info = res
+ # We can't assert that the file exists here, because we don't have the source
+ # checkout available on test machines. We can bring back this assertion when
+ # bug 1432287 is fixed.
+ # assert os.path.isfile(source_file), "Couldn't find mapped source file %s at %s!" % (
+ # url, source_file)
+
+ found_valid[0] = True
+
+ return res
+
+ in_paths = [os.path.abspath(in_path) for in_path in in_paths]
+
+ if output_file:
+ lcov_file = LcovFile(in_paths)
+ with open(output_file, "w+", encoding="utf-8") as out_fh:
+ lcov_file.print_file(
+ out_fh, rewrite_source, self.pp_rewriter.rewrite_record
+ )
+ else:
+ for in_path in in_paths:
+ lcov_file = LcovFile([in_path])
+ with open(in_path + output_suffix, "w+", encoding="utf-8") as out_fh:
+ lcov_file.print_file(
+ out_fh, rewrite_source, self.pp_rewriter.rewrite_record
+ )
+
+ if not found_valid[0]:
+ print("WARNING: No valid records found in %s" % in_paths)
+ return
+
+
+def main():
+ parser = ArgumentParser(
+ description="Given a set of gcov .info files produced "
+ "by spidermonkey's code coverage, re-maps file urls "
+ "back to source files and lines in preprocessed files "
+ "back to their original locations."
+ )
+ parser.add_argument(
+ "--chrome-map-path",
+ default="chrome-map.json",
+ help="Path to the chrome-map.json file.",
+ )
+ parser.add_argument(
+ "--app-dir",
+ default="dist/bin/browser/",
+ help="Prefix of the appdir in use. This is used to map "
+ "urls starting with resource:///. It may differ by "
+ "app, but defaults to the valid value for firefox.",
+ )
+ parser.add_argument(
+ "--gre-dir",
+ default="dist/bin/",
+ help="Prefix of the gre dir in use. This is used to map "
+ "urls starting with resource://gre. It may differ by "
+ "app, but defaults to the valid value for firefox.",
+ )
+ parser.add_argument(
+ "--output-suffix", default=".out", help="The suffix to append to output files."
+ )
+ parser.add_argument(
+ "--extra-chrome-manifests",
+ nargs="+",
+ help="Paths to files containing extra chrome registration.",
+ )
+ parser.add_argument(
+ "--output-file",
+ default="",
+ help="The output file where the results are merged. Leave empty to make the rewriter not "
+ "merge files.",
+ )
+ parser.add_argument("files", nargs="+", help="The set of files to process.")
+
+ args = parser.parse_args()
+
+ rewriter = LcovFileRewriter(
+ args.chrome_map_path, args.app_dir, args.gre_dir, args.extra_chrome_manifests
+ )
+
+ files = []
+ for f in args.files:
+ if os.path.isdir(f):
+ files += [os.path.join(f, e) for e in os.listdir(f)]
+ else:
+ files.append(f)
+
+ rewriter.rewrite_files(files, args.output_file, args.output_suffix)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/codecoverage/manifest_handler.py b/python/mozbuild/mozbuild/codecoverage/manifest_handler.py
new file mode 100644
index 0000000000..1f67b4089c
--- /dev/null
+++ b/python/mozbuild/mozbuild/codecoverage/manifest_handler.py
@@ -0,0 +1,52 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from collections import defaultdict
+
+try:
+ import urlparse
+except ImportError:
+ import urllib.parse as urlparse
+
+import mozpack.path as mozpath
+from mozpack.chrome.manifest import (
+ Manifest,
+ ManifestChrome,
+ ManifestOverride,
+ ManifestResource,
+ parse_manifest,
+)
+
+
+class ChromeManifestHandler(object):
+ def __init__(self):
+ self.overrides = {}
+ self.chrome_mapping = defaultdict(set)
+
+ def handle_manifest_entry(self, entry):
+ format_strings = {
+ "content": "chrome://%s/content/",
+ "resource": "resource://%s/",
+ "locale": "chrome://%s/locale/",
+ "skin": "chrome://%s/skin/",
+ }
+
+ if isinstance(entry, (ManifestChrome, ManifestResource)):
+ if isinstance(entry, ManifestResource):
+ dest = entry.target
+ url = urlparse.urlparse(dest)
+ if not url.scheme:
+ dest = mozpath.normpath(mozpath.join(entry.base, dest))
+ if url.scheme == "file":
+ dest = mozpath.normpath(url.path)
+ else:
+ dest = mozpath.normpath(entry.path)
+
+ base_uri = format_strings[entry.type] % entry.name
+ self.chrome_mapping[base_uri].add(dest)
+ if isinstance(entry, ManifestOverride):
+ self.overrides[entry.overloaded] = entry.overload
+ if isinstance(entry, Manifest):
+ for e in parse_manifest(None, entry.path):
+ self.handle_manifest_entry(e)
diff --git a/python/mozbuild/mozbuild/codecoverage/packager.py b/python/mozbuild/mozbuild/codecoverage/packager.py
new file mode 100644
index 0000000000..92254a96f5
--- /dev/null
+++ b/python/mozbuild/mozbuild/codecoverage/packager.py
@@ -0,0 +1,71 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import argparse
+import errno
+import json
+import sys
+
+import buildconfig
+import mozpack.path as mozpath
+from mozpack.copier import FileRegistry, Jarrer
+from mozpack.files import FileFinder, GeneratedFile
+from mozpack.manifests import InstallManifest, UnreadableInstallManifest
+
+
+def describe_install_manifest(manifest, dest_dir):
+ try:
+ manifest = InstallManifest(manifest)
+ except UnreadableInstallManifest:
+ raise IOError(errno.EINVAL, "Error parsing manifest file", manifest)
+
+ reg = FileRegistry()
+
+ mapping = {}
+ manifest.populate_registry(reg)
+ dest_dir = mozpath.join(buildconfig.topobjdir, dest_dir)
+ for dest_file, src in reg:
+ if hasattr(src, "path"):
+ dest_path = mozpath.join(dest_dir, dest_file)
+ relsrc_path = mozpath.relpath(src.path, buildconfig.topsrcdir)
+ mapping[dest_path] = relsrc_path
+
+ return mapping
+
+
+def package_coverage_data(root, output_file):
+ finder = FileFinder(root)
+ jarrer = Jarrer()
+ for p, f in finder.find("**/*.gcno"):
+ jarrer.add(p, f)
+
+ dist_include_manifest = mozpath.join(
+ buildconfig.topobjdir, "_build_manifests", "install", "dist_include"
+ )
+ linked_files = describe_install_manifest(dist_include_manifest, "dist/include")
+ mapping_file = GeneratedFile(json.dumps(linked_files, sort_keys=True))
+ jarrer.add("linked-files-map.json", mapping_file)
+ jarrer.copy(output_file)
+
+
+def cli(args=sys.argv[1:]):
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ "-o", "--output-file", dest="output_file", help="Path to save packaged data to."
+ )
+ parser.add_argument(
+ "--root", dest="root", default=None, help="Root directory to search from."
+ )
+ args = parser.parse_args(args)
+
+ if not args.root:
+ from buildconfig import topobjdir
+
+ args.root = topobjdir
+
+ return package_coverage_data(args.root, args.output_file)
+
+
+if __name__ == "__main__":
+ sys.exit(cli())
diff --git a/python/mozbuild/mozbuild/compilation/__init__.py b/python/mozbuild/mozbuild/compilation/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/compilation/__init__.py
diff --git a/python/mozbuild/mozbuild/compilation/codecomplete.py b/python/mozbuild/mozbuild/compilation/codecomplete.py
new file mode 100644
index 0000000000..b5a466b729
--- /dev/null
+++ b/python/mozbuild/mozbuild/compilation/codecomplete.py
@@ -0,0 +1,55 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This modules provides functionality for dealing with code completion.
+
+from mach.decorators import Command, CommandArgument
+
+from mozbuild.shellutil import quote as shell_quote
+from mozbuild.shellutil import split as shell_split
+
+
+# Instropection commands.
+
+
+@Command(
+ "compileflags",
+ category="devenv",
+ description="Display the compilation flags for a given source file",
+)
+@CommandArgument(
+ "what", default=None, help="Source file to display compilation flags for"
+)
+def compileflags(command_context, what):
+ from mozbuild.compilation import util
+ from mozbuild.util import resolve_target_to_make
+
+ if not util.check_top_objdir(command_context.topobjdir):
+ return 1
+
+ path_arg = command_context._wrap_path_argument(what)
+
+ make_dir, make_target = resolve_target_to_make(
+ command_context.topobjdir, path_arg.relpath()
+ )
+
+ if make_dir is None and make_target is None:
+ return 1
+
+ build_vars = util.get_build_vars(make_dir, command_context)
+
+ if what.endswith(".c"):
+ cc = "CC"
+ name = "COMPILE_CFLAGS"
+ else:
+ cc = "CXX"
+ name = "COMPILE_CXXFLAGS"
+
+ if name not in build_vars:
+ return
+
+ # Drop the first flag since that is the pathname of the compiler.
+ flags = (shell_split(build_vars[cc]) + shell_split(build_vars[name]))[1:]
+
+ print(" ".join(shell_quote(arg) for arg in util.sanitize_cflags(flags)))
diff --git a/python/mozbuild/mozbuild/compilation/database.py b/python/mozbuild/mozbuild/compilation/database.py
new file mode 100644
index 0000000000..e741c88a81
--- /dev/null
+++ b/python/mozbuild/mozbuild/compilation/database.py
@@ -0,0 +1,244 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This modules provides functionality for dealing with code completion.
+
+import os
+from collections import OrderedDict, defaultdict
+
+import mozpack.path as mozpath
+
+from mozbuild.backend.common import CommonBackend
+from mozbuild.frontend.data import (
+ ComputedFlags,
+ DirectoryTraversal,
+ PerSourceFlag,
+ Sources,
+ VariablePassthru,
+)
+from mozbuild.shellutil import quote as shell_quote
+from mozbuild.util import expand_variables
+
+
+class CompileDBBackend(CommonBackend):
+ def _init(self):
+ CommonBackend._init(self)
+
+ # The database we're going to dump out to.
+ self._db = OrderedDict()
+
+ # The cache for per-directory flags
+ self._flags = {}
+
+ self._envs = {}
+ self._local_flags = defaultdict(dict)
+ self._per_source_flags = defaultdict(list)
+
+ def _build_cmd(self, cmd, filename, unified):
+ cmd = list(cmd)
+ if unified is None:
+ cmd.append(filename)
+ else:
+ cmd.append(unified)
+
+ return cmd
+
+ def consume_object(self, obj):
+ # Those are difficult directories, that will be handled later.
+ if obj.relsrcdir in (
+ "build/unix/elfhack",
+ "build/unix/elfhack/inject",
+ "build/clang-plugin",
+ "build/clang-plugin/tests",
+ ):
+ return True
+
+ consumed = CommonBackend.consume_object(self, obj)
+
+ if consumed:
+ return True
+
+ if isinstance(obj, DirectoryTraversal):
+ self._envs[obj.objdir] = obj.config
+
+ elif isinstance(obj, Sources):
+ # For other sources, include each source file.
+ for f in obj.files:
+ self._build_db_line(
+ obj.objdir, obj.relsrcdir, obj.config, f, obj.canonical_suffix
+ )
+
+ elif isinstance(obj, VariablePassthru):
+ for var in ("MOZBUILD_CMFLAGS", "MOZBUILD_CMMFLAGS"):
+ if var in obj.variables:
+ self._local_flags[obj.objdir][var] = obj.variables[var]
+
+ elif isinstance(obj, PerSourceFlag):
+ self._per_source_flags[obj.file_name].extend(obj.flags)
+
+ elif isinstance(obj, ComputedFlags):
+ for var, flags in obj.get_flags():
+ self._local_flags[obj.objdir]["COMPUTED_%s" % var] = flags
+
+ return True
+
+ def consume_finished(self):
+ CommonBackend.consume_finished(self)
+
+ db = []
+
+ for (directory, filename, unified), cmd in self._db.items():
+ env = self._envs[directory]
+ cmd = self._build_cmd(cmd, filename, unified)
+ variables = {
+ "DIST": mozpath.join(env.topobjdir, "dist"),
+ "DEPTH": env.topobjdir,
+ "MOZILLA_DIR": env.topsrcdir,
+ "topsrcdir": env.topsrcdir,
+ "topobjdir": env.topobjdir,
+ }
+ variables.update(self._local_flags[directory])
+ c = []
+ for a in cmd:
+ accum = ""
+ for word in expand_variables(a, variables).split():
+ # We can't just split() the output of expand_variables since
+ # there can be spaces enclosed by quotes, e.g. '"foo bar"'.
+ # Handle that case by checking whether there are an even
+ # number of double-quotes in the word and appending it to
+ # the accumulator if not. Meanwhile, shlex.split() and
+ # mozbuild.shellutil.split() aren't able to properly handle
+ # this and break in various ways, so we can't use something
+ # off-the-shelf.
+ has_quote = bool(word.count('"') % 2)
+ if accum and has_quote:
+ c.append(accum + " " + word)
+ accum = ""
+ elif accum and not has_quote:
+ accum += " " + word
+ elif not accum and has_quote:
+ accum = word
+ else:
+ c.append(word)
+ # Tell clangd to keep parsing to the end of a file, regardless of
+ # how many errors are encountered. (Unified builds mean that we
+ # encounter a lot of errors parsing some files.)
+ c.insert(-1, "-ferror-limit=0")
+
+ per_source_flags = self._per_source_flags.get(filename)
+ if per_source_flags is not None:
+ c.extend(per_source_flags)
+ db.append(
+ {
+ "directory": directory,
+ "command": " ".join(shell_quote(a) for a in c),
+ "file": mozpath.join(directory, filename),
+ }
+ )
+
+ import json
+
+ outputfile = self._outputfile_path()
+ with self._write_file(outputfile) as jsonout:
+ json.dump(db, jsonout, indent=0)
+
+ def _outputfile_path(self):
+ # Output the database (a JSON file) to objdir/compile_commands.json
+ return os.path.join(self.environment.topobjdir, "compile_commands.json")
+
+ def _process_unified_sources_without_mapping(self, obj):
+ for f in list(sorted(obj.files)):
+ self._build_db_line(
+ obj.objdir, obj.relsrcdir, obj.config, f, obj.canonical_suffix
+ )
+
+ def _process_unified_sources(self, obj):
+ if not obj.have_unified_mapping:
+ return self._process_unified_sources_without_mapping(obj)
+
+ # For unified sources, only include the unified source file.
+ # Note that unified sources are never used for host sources.
+ for f in obj.unified_source_mapping:
+ self._build_db_line(
+ obj.objdir, obj.relsrcdir, obj.config, f[0], obj.canonical_suffix
+ )
+ for entry in f[1]:
+ self._build_db_line(
+ obj.objdir,
+ obj.relsrcdir,
+ obj.config,
+ entry,
+ obj.canonical_suffix,
+ unified=f[0],
+ )
+
+ def _handle_idl_manager(self, idl_manager):
+ pass
+
+ def _handle_ipdl_sources(
+ self,
+ ipdl_dir,
+ sorted_ipdl_sources,
+ sorted_nonstatic_ipdl_sources,
+ sorted_static_ipdl_sources,
+ ):
+ pass
+
+ def _handle_webidl_build(
+ self,
+ bindings_dir,
+ unified_source_mapping,
+ webidls,
+ expected_build_output_files,
+ global_define_files,
+ ):
+ for f in unified_source_mapping:
+ self._build_db_line(bindings_dir, None, self.environment, f[0], ".cpp")
+
+ COMPILERS = {
+ ".c": "CC",
+ ".cpp": "CXX",
+ ".m": "CC",
+ ".mm": "CXX",
+ }
+
+ CFLAGS = {
+ ".c": "CFLAGS",
+ ".cpp": "CXXFLAGS",
+ ".m": "CFLAGS",
+ ".mm": "CXXFLAGS",
+ }
+
+ def _get_compiler_args(self, cenv, canonical_suffix):
+ if canonical_suffix not in self.COMPILERS:
+ return None
+ return cenv.substs[self.COMPILERS[canonical_suffix]].split()
+
+ def _build_db_line(
+ self, objdir, reldir, cenv, filename, canonical_suffix, unified=None
+ ):
+ compiler_args = self._get_compiler_args(cenv, canonical_suffix)
+ if compiler_args is None:
+ return
+ db = self._db.setdefault(
+ (objdir, filename, unified),
+ compiler_args + ["-o", "/dev/null", "-c"],
+ )
+ reldir = reldir or mozpath.relpath(objdir, cenv.topobjdir)
+
+ def append_var(name):
+ value = cenv.substs.get(name)
+ if not value:
+ return
+ if isinstance(value, str):
+ value = value.split()
+ db.extend(value)
+
+ db.append("$(COMPUTED_%s)" % self.CFLAGS[canonical_suffix])
+ if canonical_suffix == ".m":
+ append_var("OS_COMPILE_CMFLAGS")
+ db.append("$(MOZBUILD_CMFLAGS)")
+ elif canonical_suffix == ".mm":
+ append_var("OS_COMPILE_CMMFLAGS")
+ db.append("$(MOZBUILD_CMMFLAGS)")
diff --git a/python/mozbuild/mozbuild/compilation/util.py b/python/mozbuild/mozbuild/compilation/util.py
new file mode 100644
index 0000000000..fc06382a3b
--- /dev/null
+++ b/python/mozbuild/mozbuild/compilation/util.py
@@ -0,0 +1,64 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+
+
+def check_top_objdir(topobjdir):
+ top_make = os.path.join(topobjdir, "Makefile")
+ if not os.path.exists(top_make):
+ print(
+ "Your tree has not been built yet. Please run "
+ "|mach build| with no arguments."
+ )
+ return False
+ return True
+
+
+def get_build_vars(directory, cmd):
+ build_vars = {}
+
+ def on_line(line):
+ elements = [s.strip() for s in line.split("=", 1)]
+
+ if len(elements) != 2:
+ return
+
+ build_vars[elements[0]] = elements[1]
+
+ try:
+ old_logger = cmd.log_manager.replace_terminal_handler(None)
+ cmd._run_make(
+ directory=directory,
+ target="showbuild",
+ log=False,
+ print_directory=False,
+ num_jobs=1,
+ silent=True,
+ line_handler=on_line,
+ )
+ finally:
+ cmd.log_manager.replace_terminal_handler(old_logger)
+
+ return build_vars
+
+
+def sanitize_cflags(flags):
+ # We filter out -Xclang arguments as clang based tools typically choke on
+ # passing these flags down to the clang driver. -Xclang tells the clang
+ # driver driver to pass whatever comes after it down to clang cc1, which is
+ # why we skip -Xclang and the argument immediately after it. Here is an
+ # example: the following two invocations pass |-foo -bar -baz| to cc1:
+ # clang -cc1 -foo -bar -baz
+ # clang -Xclang -foo -Xclang -bar -Xclang -baz
+ sanitized = []
+ saw_xclang = False
+ for flag in flags:
+ if flag == "-Xclang":
+ saw_xclang = True
+ elif saw_xclang:
+ saw_xclang = False
+ else:
+ sanitized.append(flag)
+ return sanitized
diff --git a/python/mozbuild/mozbuild/compilation/warnings.py b/python/mozbuild/mozbuild/compilation/warnings.py
new file mode 100644
index 0000000000..4f0ef57e51
--- /dev/null
+++ b/python/mozbuild/mozbuild/compilation/warnings.py
@@ -0,0 +1,392 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This modules provides functionality for dealing with compiler warnings.
+
+import errno
+import io
+import json
+import os
+import re
+
+import mozpack.path as mozpath
+import six
+
+from mozbuild.util import hash_file
+
+# Regular expression to strip ANSI color sequences from a string. This is
+# needed to properly analyze Clang compiler output, which may be colorized.
+# It assumes ANSI escape sequences.
+RE_STRIP_COLORS = re.compile(r"\x1b\[[\d;]+m")
+
+# This captures Clang diagnostics with the standard formatting.
+RE_CLANG_WARNING_AND_ERROR = re.compile(
+ r"""
+ (?P<file>[^:]+)
+ :
+ (?P<line>\d+)
+ :
+ (?P<column>\d+)
+ :
+ \s(?P<type>warning|error):\s
+ (?P<message>.+)
+ \[(?P<flag>[^\]]+)
+ """,
+ re.X,
+)
+
+# This captures Clang-cl warning format.
+RE_CLANG_CL_WARNING_AND_ERROR = re.compile(
+ r"""
+ (?P<file>.*)
+ \((?P<line>\d+),(?P<column>\d+)\)
+ \s?:\s+(?P<type>warning|error):\s
+ (?P<message>.*)
+ \[(?P<flag>[^\]]+)
+ """,
+ re.X,
+)
+
+IN_FILE_INCLUDED_FROM = "In file included from "
+
+
+class CompilerWarning(dict):
+ """Represents an individual compiler warning."""
+
+ def __init__(self):
+ dict.__init__(self)
+
+ self["filename"] = None
+ self["line"] = None
+ self["column"] = None
+ self["message"] = None
+ self["flag"] = None
+
+ def copy(self):
+ """Returns a copy of this compiler warning."""
+ w = CompilerWarning()
+ w.update(self)
+ return w
+
+ # Since we inherit from dict, functools.total_ordering gets confused.
+ # Thus, we define a key function, a generic comparison, and then
+ # implement all the rich operators with those; approach is from:
+ # http://regebro.wordpress.com/2010/12/13/python-implementing-rich-comparison-the-correct-way/
+ def _cmpkey(self):
+ return (self["filename"], self["line"], self["column"])
+
+ def _compare(self, other, func):
+ if not isinstance(other, CompilerWarning):
+ return NotImplemented
+
+ return func(self._cmpkey(), other._cmpkey())
+
+ def __eq__(self, other):
+ return self._compare(other, lambda s, o: s == o)
+
+ def __neq__(self, other):
+ return self._compare(other, lambda s, o: s != o)
+
+ def __lt__(self, other):
+ return self._compare(other, lambda s, o: s < o)
+
+ def __le__(self, other):
+ return self._compare(other, lambda s, o: s <= o)
+
+ def __gt__(self, other):
+ return self._compare(other, lambda s, o: s > o)
+
+ def __ge__(self, other):
+ return self._compare(other, lambda s, o: s >= o)
+
+ def __hash__(self):
+ """Define so this can exist inside a set, etc."""
+ return hash(tuple(sorted(self.items())))
+
+
+class WarningsDatabase(object):
+ """Holds a collection of warnings.
+
+ The warnings database is a semi-intelligent container that holds warnings
+ encountered during builds.
+
+ The warnings database is backed by a JSON file. But, that is transparent
+ to consumers.
+
+ Under most circumstances, the warnings database is insert only. When a
+ warning is encountered, the caller simply blindly inserts it into the
+ database. The database figures out whether it is a dupe, etc.
+
+ During the course of development, it is common for warnings to change
+ slightly as source code changes. For example, line numbers will disagree.
+ The WarningsDatabase handles this by storing the hash of a file a warning
+ occurred in. At warning insert time, if the hash of the file does not match
+ what is stored in the database, the existing warnings for that file are
+ purged from the database.
+
+ Callers should periodically prune old, invalid warnings from the database
+ by calling prune(). A good time to do this is at the end of a build.
+ """
+
+ def __init__(self):
+ """Create an empty database."""
+ self._files = {}
+
+ def __len__(self):
+ i = 0
+ for value in self._files.values():
+ i += len(value["warnings"])
+
+ return i
+
+ def __iter__(self):
+ for value in self._files.values():
+ for warning in value["warnings"]:
+ yield warning
+
+ def __contains__(self, item):
+ for value in self._files.values():
+ for warning in value["warnings"]:
+ if warning == item:
+ return True
+
+ return False
+
+ @property
+ def warnings(self):
+ """All the CompilerWarning instances in this database."""
+ for value in self._files.values():
+ for w in value["warnings"]:
+ yield w
+
+ def type_counts(self, dirpath=None):
+ """Returns a mapping of warning types to their counts."""
+
+ types = {}
+ for value in self._files.values():
+ for warning in value["warnings"]:
+ if dirpath and not mozpath.normsep(warning["filename"]).startswith(
+ dirpath
+ ):
+ continue
+ flag = warning["flag"]
+ count = types.get(flag, 0)
+ count += 1
+
+ types[flag] = count
+
+ return types
+
+ def has_file(self, filename):
+ """Whether we have any warnings for the specified file."""
+ return filename in self._files
+
+ def warnings_for_file(self, filename):
+ """Obtain the warnings for the specified file."""
+ f = self._files.get(filename, {"warnings": []})
+
+ for warning in f["warnings"]:
+ yield warning
+
+ def insert(self, warning, compute_hash=True):
+ assert isinstance(warning, CompilerWarning)
+
+ filename = warning["filename"]
+
+ new_hash = None
+
+ if compute_hash:
+ new_hash = hash_file(filename)
+
+ if filename in self._files:
+ if new_hash != self._files[filename]["hash"]:
+ del self._files[filename]
+
+ value = self._files.get(
+ filename,
+ {
+ "hash": new_hash,
+ "warnings": set(),
+ },
+ )
+
+ value["warnings"].add(warning)
+
+ self._files[filename] = value
+
+ def prune(self):
+ """Prune the contents of the database.
+
+ This removes warnings that are no longer valid. A warning is no longer
+ valid if the file it was in no longer exists or if the content has
+ changed.
+
+ The check for changed content catches the case where a file previously
+ contained warnings but no longer does.
+ """
+
+ # Need to calculate up front since we are mutating original object.
+ filenames = list(six.iterkeys(self._files))
+ for filename in filenames:
+ if not os.path.exists(filename):
+ del self._files[filename]
+ continue
+
+ if self._files[filename]["hash"] is None:
+ continue
+
+ current_hash = hash_file(filename)
+ if current_hash != self._files[filename]["hash"]:
+ del self._files[filename]
+ continue
+
+ def serialize(self, fh):
+ """Serialize the database to an open file handle."""
+ obj = {"files": {}}
+
+ # All this hackery because JSON can't handle sets.
+ for k, v in six.iteritems(self._files):
+ obj["files"][k] = {}
+
+ for k2, v2 in six.iteritems(v):
+ normalized = v2
+ if isinstance(v2, set):
+ normalized = list(v2)
+ obj["files"][k][k2] = normalized
+
+ to_write = six.ensure_text(json.dumps(obj, indent=2))
+ fh.write(to_write)
+
+ def deserialize(self, fh):
+ """Load serialized content from a handle into the current instance."""
+ obj = json.load(fh)
+
+ self._files = obj["files"]
+
+ # Normalize data types.
+ for filename, value in six.iteritems(self._files):
+ if "warnings" in value:
+ normalized = set()
+ for d in value["warnings"]:
+ w = CompilerWarning()
+ w.update(d)
+ normalized.add(w)
+
+ self._files[filename]["warnings"] = normalized
+
+ def load_from_file(self, filename):
+ """Load the database from a file."""
+ with io.open(filename, "r", encoding="utf-8") as fh:
+ self.deserialize(fh)
+
+ def save_to_file(self, filename):
+ """Save the database to a file."""
+ try:
+ # Ensure the directory exists
+ os.makedirs(os.path.dirname(filename))
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+ with io.open(filename, "w", encoding="utf-8", newline="\n") as fh:
+ self.serialize(fh)
+
+
+class WarningsCollector(object):
+ """Collects warnings from text data.
+
+ Instances of this class receive data (usually the output of compiler
+ invocations) and parse it into warnings.
+
+ The collector works by incrementally receiving data, usually line-by-line
+ output from the compiler. Therefore, it can maintain state to parse
+ multi-line warning messages.
+ """
+
+ def __init__(self, cb, objdir=None):
+ """Initialize a new collector.
+
+ ``cb`` is a callable that is called with a ``CompilerWarning``
+ instance whenever a new warning is parsed.
+
+ ``objdir`` is the object directory. Used for normalizing paths.
+ """
+ self.cb = cb
+ self.objdir = objdir
+ self.included_from = []
+
+ def process_line(self, line):
+ """Take a line of text and process it for a warning."""
+
+ filtered = RE_STRIP_COLORS.sub("", line)
+
+ # Clang warnings in files included from the one(s) being compiled will
+ # start with "In file included from /path/to/file:line:". Here, we
+ # record those.
+ if filtered.startswith(IN_FILE_INCLUDED_FROM):
+ included_from = filtered[len(IN_FILE_INCLUDED_FROM) :]
+
+ parts = included_from.split(":")
+
+ self.included_from.append(parts[0])
+
+ return
+
+ warning = CompilerWarning()
+ filename = None
+
+ # TODO make more efficient so we run minimal regexp matches.
+ match_clang = RE_CLANG_WARNING_AND_ERROR.match(filtered)
+ match_clang_cl = RE_CLANG_CL_WARNING_AND_ERROR.match(filtered)
+ if match_clang:
+ d = match_clang.groupdict()
+
+ filename = d["file"]
+ warning["type"] = d["type"]
+ warning["line"] = int(d["line"])
+ warning["column"] = int(d["column"])
+ warning["flag"] = d["flag"]
+ warning["message"] = d["message"].rstrip()
+
+ elif match_clang_cl:
+ d = match_clang_cl.groupdict()
+
+ filename = d["file"]
+ warning["type"] = d["type"]
+ warning["line"] = int(d["line"])
+ warning["column"] = int(d["column"])
+ warning["flag"] = d["flag"]
+ warning["message"] = d["message"].rstrip()
+
+ else:
+ self.included_from = []
+ return None
+
+ filename = os.path.normpath(filename)
+
+ # Sometimes we get relative includes. These typically point to files in
+ # the object directory. We try to resolve the relative path.
+ if not os.path.isabs(filename):
+ filename = self._normalize_relative_path(filename)
+
+ warning["filename"] = filename
+
+ self.cb(warning)
+
+ return warning
+
+ def _normalize_relative_path(self, filename):
+ # Special case files in dist/include.
+ idx = filename.find("/dist/include")
+ if idx != -1:
+ return self.objdir + filename[idx:]
+
+ for included_from in self.included_from:
+ source_dir = os.path.dirname(included_from)
+
+ candidate = os.path.normpath(os.path.join(source_dir, filename))
+
+ if os.path.exists(candidate):
+ return candidate
+
+ return filename
diff --git a/python/mozbuild/mozbuild/config_status.py b/python/mozbuild/mozbuild/config_status.py
new file mode 100644
index 0000000000..8e8a7f625b
--- /dev/null
+++ b/python/mozbuild/mozbuild/config_status.py
@@ -0,0 +1,184 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# Combined with build/autoconf/config.status.m4, ConfigStatus is an almost
+# drop-in replacement for autoconf 2.13's config.status, with features
+# borrowed from autoconf > 2.5, and additional features.
+
+import logging
+import os
+import sys
+import time
+from argparse import ArgumentParser
+from itertools import chain
+
+from mach.logging import LoggingManager
+
+from mozbuild.backend import backends, get_backend_class
+from mozbuild.backend.configenvironment import ConfigEnvironment
+from mozbuild.base import MachCommandConditions
+from mozbuild.frontend.emitter import TreeMetadataEmitter
+from mozbuild.frontend.reader import BuildReader
+from mozbuild.mozinfo import write_mozinfo
+from mozbuild.util import FileAvoidWrite, process_time
+
+log_manager = LoggingManager()
+
+
+ANDROID_IDE_ADVERTISEMENT = """
+=============
+ADVERTISEMENT
+
+You are building GeckoView. After your build completes, you can open
+the top source directory in Android Studio directly and build using Gradle.
+See the documentation at
+
+https://firefox-source-docs.mozilla.org/mobile/android/geckoview/contributor/geckoview-quick-start.html#build-using-android-studio
+=============
+""".strip()
+
+
+def config_status(
+ topobjdir=".",
+ topsrcdir=".",
+ defines=None,
+ substs=None,
+ source=None,
+ mozconfig=None,
+ args=sys.argv[1:],
+):
+ """Main function, providing config.status functionality.
+
+ Contrary to config.status, it doesn't use CONFIG_FILES or CONFIG_HEADERS
+ variables.
+
+ Without the -n option, this program acts as config.status and considers
+ the current directory as the top object directory, even when config.status
+ is in a different directory. It will, however, treat the directory
+ containing config.status as the top object directory with the -n option.
+
+ The options to this function are passed when creating the
+ ConfigEnvironment. These lists, as well as the actual wrapper script
+ around this function, are meant to be generated by configure.
+ See build/autoconf/config.status.m4.
+ """
+
+ if "CONFIG_FILES" in os.environ:
+ raise Exception(
+ "Using the CONFIG_FILES environment variable is not " "supported."
+ )
+ if "CONFIG_HEADERS" in os.environ:
+ raise Exception(
+ "Using the CONFIG_HEADERS environment variable is not " "supported."
+ )
+
+ if not os.path.isabs(topsrcdir):
+ raise Exception(
+ "topsrcdir must be defined as an absolute directory: " "%s" % topsrcdir
+ )
+
+ default_backends = ["RecursiveMake"]
+ default_backends = (substs or {}).get("BUILD_BACKENDS", ["RecursiveMake"])
+
+ parser = ArgumentParser()
+ parser.add_argument(
+ "-v",
+ "--verbose",
+ dest="verbose",
+ action="store_true",
+ help="display verbose output",
+ )
+ parser.add_argument(
+ "-n",
+ dest="not_topobjdir",
+ action="store_true",
+ help="do not consider current directory as top object directory",
+ )
+ parser.add_argument(
+ "-d", "--diff", action="store_true", help="print diffs of changed files."
+ )
+ parser.add_argument(
+ "-b",
+ "--backend",
+ nargs="+",
+ choices=sorted(backends),
+ default=default_backends,
+ help="what backend to build (default: %s)." % " ".join(default_backends),
+ )
+ parser.add_argument(
+ "--dry-run", action="store_true", help="do everything except writing files out."
+ )
+ options = parser.parse_args(args)
+
+ # Without -n, the current directory is meant to be the top object directory
+ if not options.not_topobjdir:
+ topobjdir = os.path.realpath(".")
+
+ env = ConfigEnvironment(
+ topsrcdir,
+ topobjdir,
+ defines=defines,
+ substs=substs,
+ source=source,
+ mozconfig=mozconfig,
+ )
+
+ with FileAvoidWrite(os.path.join(topobjdir, "mozinfo.json")) as f:
+ write_mozinfo(f, env, os.environ)
+
+ cpu_start = process_time()
+ time_start = time.monotonic()
+
+ # Make appropriate backend instances, defaulting to RecursiveMakeBackend,
+ # or what is in BUILD_BACKENDS.
+ selected_backends = [get_backend_class(b)(env) for b in options.backend]
+
+ if options.dry_run:
+ for b in selected_backends:
+ b.dry_run = True
+
+ reader = BuildReader(env)
+ emitter = TreeMetadataEmitter(env)
+ # This won't actually do anything because of the magic of generators.
+ definitions = emitter.emit(reader.read_topsrcdir())
+
+ log_level = logging.DEBUG if options.verbose else logging.INFO
+ log_manager.add_terminal_logging(level=log_level)
+ log_manager.enable_unstructured()
+
+ print("Reticulating splines...", file=sys.stderr)
+ if len(selected_backends) > 1:
+ definitions = list(definitions)
+
+ for the_backend in selected_backends:
+ the_backend.consume(definitions)
+
+ execution_time = 0.0
+ for obj in chain((reader, emitter), selected_backends):
+ summary = obj.summary()
+ print(summary, file=sys.stderr)
+ execution_time += summary.execution_time
+ if hasattr(obj, "gyp_summary"):
+ summary = obj.gyp_summary()
+ print(summary, file=sys.stderr)
+
+ cpu_time = process_time() - cpu_start
+ wall_time = time.monotonic() - time_start
+ efficiency = cpu_time / wall_time if wall_time else 100
+ untracked = wall_time - execution_time
+
+ print(
+ "Total wall time: {:.2f}s; CPU time: {:.2f}s; Efficiency: "
+ "{:.0%}; Untracked: {:.2f}s".format(wall_time, cpu_time, efficiency, untracked),
+ file=sys.stderr,
+ )
+
+ if options.diff:
+ for the_backend in selected_backends:
+ for path, diff in sorted(the_backend.file_diffs.items()):
+ print("\n".join(diff))
+
+ # Advertise Android Studio if it is appropriate.
+ if MachCommandConditions.is_android(env):
+ print(ANDROID_IDE_ADVERTISEMENT)
diff --git a/python/mozbuild/mozbuild/configure/__init__.py b/python/mozbuild/mozbuild/configure/__init__.py
new file mode 100644
index 0000000000..f60f179d6b
--- /dev/null
+++ b/python/mozbuild/mozbuild/configure/__init__.py
@@ -0,0 +1,1311 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import codecs
+import inspect
+import logging
+import os
+import re
+import sys
+import types
+from collections import OrderedDict
+from contextlib import contextmanager
+from functools import wraps
+
+import mozpack.path as mozpath
+import six
+from six.moves import builtins as __builtin__
+
+from mozbuild.configure.help import HelpFormatter
+from mozbuild.configure.options import (
+ HELP_OPTIONS_CATEGORY,
+ CommandLineHelper,
+ ConflictingOptionError,
+ InvalidOptionError,
+ Option,
+ OptionValue,
+)
+from mozbuild.configure.util import ConfigureOutputHandler, LineIO, getpreferredencoding
+from mozbuild.util import (
+ ReadOnlyDict,
+ ReadOnlyNamespace,
+ exec_,
+ memoize,
+ memoized_property,
+ system_encoding,
+)
+
+# TRACE logging level, below (thus more verbose than) DEBUG
+TRACE = 5
+
+
+class ConfigureError(Exception):
+ pass
+
+
+class SandboxDependsFunction(object):
+ """Sandbox-visible representation of @depends functions."""
+
+ def __init__(self, unsandboxed):
+ self._or = unsandboxed.__or__
+ self._and = unsandboxed.__and__
+ self._getattr = unsandboxed.__getattr__
+
+ def __call__(self, *arg, **kwargs):
+ raise ConfigureError("The `%s` function may not be called" % self.__name__)
+
+ def __or__(self, other):
+ if not isinstance(other, SandboxDependsFunction):
+ raise ConfigureError(
+ "Can only do binary arithmetic operations "
+ "with another @depends function."
+ )
+ return self._or(other).sandboxed
+
+ def __and__(self, other):
+ if not isinstance(other, SandboxDependsFunction):
+ raise ConfigureError(
+ "Can only do binary arithmetic operations "
+ "with another @depends function."
+ )
+ return self._and(other).sandboxed
+
+ def __cmp__(self, other):
+ raise ConfigureError("Cannot compare @depends functions.")
+
+ def __eq__(self, other):
+ raise ConfigureError("Cannot compare @depends functions.")
+
+ def __hash__(self):
+ return object.__hash__(self)
+
+ def __ne__(self, other):
+ raise ConfigureError("Cannot compare @depends functions.")
+
+ def __lt__(self, other):
+ raise ConfigureError("Cannot compare @depends functions.")
+
+ def __le__(self, other):
+ raise ConfigureError("Cannot compare @depends functions.")
+
+ def __gt__(self, other):
+ raise ConfigureError("Cannot compare @depends functions.")
+
+ def __ge__(self, other):
+ raise ConfigureError("Cannot compare @depends functions.")
+
+ def __getattr__(self, key):
+ return self._getattr(key).sandboxed
+
+ def __nonzero__(self):
+ raise ConfigureError("Cannot do boolean operations on @depends functions.")
+
+
+class DependsFunction(object):
+ __slots__ = (
+ "_func",
+ "_name",
+ "dependencies",
+ "when",
+ "sandboxed",
+ "sandbox",
+ "_result",
+ )
+
+ def __init__(self, sandbox, func, dependencies, when=None):
+ assert isinstance(sandbox, ConfigureSandbox)
+ assert not inspect.isgeneratorfunction(func)
+ # Allow non-functions when there are no dependencies. This is equivalent
+ # to passing a lambda that returns the given value.
+ if not (inspect.isroutine(func) or not dependencies):
+ print(func)
+ assert inspect.isroutine(func) or not dependencies
+ self._func = func
+ self._name = getattr(func, "__name__", None)
+ self.dependencies = dependencies
+ self.sandboxed = wraps(func)(SandboxDependsFunction(self))
+ self.sandbox = sandbox
+ self.when = when
+ sandbox._depends[self.sandboxed] = self
+
+ # Only @depends functions with a dependency on '--help' are executed
+ # immediately. Everything else is queued for later execution.
+ if sandbox._help_option in dependencies:
+ sandbox._value_for(self)
+ elif not sandbox._help:
+ sandbox._execution_queue.append((sandbox._value_for, (self,)))
+
+ @property
+ def name(self):
+ return self._name
+
+ @name.setter
+ def name(self, value):
+ self._name = value
+
+ @property
+ def sandboxed_dependencies(self):
+ return [
+ d.sandboxed if isinstance(d, DependsFunction) else d
+ for d in self.dependencies
+ ]
+
+ @memoize
+ def result(self):
+ if self.when and not self.sandbox._value_for(self.when):
+ return None
+
+ if inspect.isroutine(self._func):
+ resolved_args = [self.sandbox._value_for(d) for d in self.dependencies]
+ return self._func(*resolved_args)
+ return self._func
+
+ def __repr__(self):
+ return "<%s %s(%s)>" % (
+ self.__class__.__name__,
+ self.name,
+ ", ".join(repr(d) for d in self.dependencies),
+ )
+
+ def __or__(self, other):
+ if isinstance(other, SandboxDependsFunction):
+ other = self.sandbox._depends.get(other)
+ assert isinstance(other, DependsFunction)
+ assert self.sandbox is other.sandbox
+ return CombinedDependsFunction(self.sandbox, self.or_impl, (self, other))
+
+ @staticmethod
+ def or_impl(iterable):
+ # Applies "or" to all the items of iterable.
+ # e.g. if iterable contains a, b and c, returns `a or b or c`.
+ for i in iterable:
+ if i:
+ return i
+ return i
+
+ def __and__(self, other):
+ if isinstance(other, SandboxDependsFunction):
+ other = self.sandbox._depends.get(other)
+ assert isinstance(other, DependsFunction)
+ assert self.sandbox is other.sandbox
+ return CombinedDependsFunction(self.sandbox, self.and_impl, (self, other))
+
+ @staticmethod
+ def and_impl(iterable):
+ # Applies "and" to all the items of iterable.
+ # e.g. if iterable contains a, b and c, returns `a and b and c`.
+ for i in iterable:
+ if not i:
+ return i
+ return i
+
+ def __getattr__(self, key):
+ if key.startswith("_"):
+ return super(DependsFunction, self).__getattr__(key)
+ # Our function may return None or an object that simply doesn't have
+ # the wanted key. In that case, just return None.
+ return TrivialDependsFunction(
+ self.sandbox, lambda x: getattr(x, key, None), [self], self.when
+ )
+
+
+class TrivialDependsFunction(DependsFunction):
+ """Like a DependsFunction, but the linter won't expect it to have a
+ dependency on --help ever."""
+
+
+class CombinedDependsFunction(DependsFunction):
+ def __init__(self, sandbox, func, dependencies):
+ flatten_deps = []
+ for d in dependencies:
+ if isinstance(d, CombinedDependsFunction) and d._func is func:
+ for d2 in d.dependencies:
+ if d2 not in flatten_deps:
+ flatten_deps.append(d2)
+ elif d not in flatten_deps:
+ flatten_deps.append(d)
+
+ super(CombinedDependsFunction, self).__init__(sandbox, func, flatten_deps)
+
+ @memoize
+ def result(self):
+ resolved_args = (self.sandbox._value_for(d) for d in self.dependencies)
+ return self._func(resolved_args)
+
+ def __eq__(self, other):
+ return (
+ isinstance(other, self.__class__)
+ and self._func is other._func
+ and set(self.dependencies) == set(other.dependencies)
+ )
+
+ def __hash__(self):
+ return object.__hash__(self)
+
+ def __ne__(self, other):
+ return not self == other
+
+
+class SandboxedGlobal(dict):
+ """Identifiable dict type for use as function global"""
+
+
+def forbidden_import(*args, **kwargs):
+ raise ImportError("Importing modules is forbidden")
+
+
+class ConfigureSandbox(dict):
+ """Represents a sandbox for executing Python code for build configuration.
+ This is a different kind of sandboxing than the one used for moz.build
+ processing.
+
+ The sandbox has 9 primitives:
+ - option
+ - depends
+ - template
+ - imports
+ - include
+ - set_config
+ - set_define
+ - imply_option
+ - only_when
+
+ `option`, `include`, `set_config`, `set_define` and `imply_option` are
+ functions. `depends`, `template`, and `imports` are decorators. `only_when`
+ is a context_manager.
+
+ These primitives are declared as name_impl methods to this class and
+ the mapping name -> name_impl is done automatically in __getitem__.
+
+ Additional primitives should be frowned upon to keep the sandbox itself as
+ simple as possible. Instead, helpers should be created within the sandbox
+ with the existing primitives.
+
+ The sandbox is given, at creation, a dict where the yielded configuration
+ will be stored.
+
+ config = {}
+ sandbox = ConfigureSandbox(config)
+ sandbox.run(path)
+ do_stuff(config)
+ """
+
+ # The default set of builtins. We expose unicode as str to make sandboxed
+ # files more python3-ready.
+ BUILTINS = ReadOnlyDict(
+ {
+ b: getattr(__builtin__, b, None)
+ for b in (
+ "AssertionError",
+ "False",
+ "None",
+ "True",
+ "__build_class__", # will be None on py2
+ "all",
+ "any",
+ "bool",
+ "dict",
+ "enumerate",
+ "getattr",
+ "hasattr",
+ "int",
+ "isinstance",
+ "len",
+ "list",
+ "max",
+ "min",
+ "range",
+ "set",
+ "sorted",
+ "tuple",
+ "zip",
+ )
+ },
+ __import__=forbidden_import,
+ str=six.text_type,
+ )
+
+ # Expose a limited set of functions from os.path
+ OS = ReadOnlyNamespace(
+ path=ReadOnlyNamespace(
+ **{
+ k: getattr(mozpath, k, getattr(os.path, k))
+ for k in (
+ "abspath",
+ "basename",
+ "dirname",
+ "isabs",
+ "join",
+ "normcase",
+ "normpath",
+ "realpath",
+ "relpath",
+ )
+ }
+ )
+ )
+
+ def __init__(
+ self,
+ config,
+ environ=os.environ,
+ argv=sys.argv,
+ stdout=sys.stdout,
+ stderr=sys.stderr,
+ logger=None,
+ ):
+ dict.__setitem__(self, "__builtins__", self.BUILTINS)
+
+ self._environ = dict(environ)
+
+ self._paths = []
+ self._all_paths = set()
+ self._templates = set()
+ # Associate SandboxDependsFunctions to DependsFunctions.
+ self._depends = OrderedDict()
+ self._seen = set()
+ # Store the @imports added to a given function.
+ self._imports = {}
+
+ self._options = OrderedDict()
+ # Store raw option (as per command line or environment) for each Option
+ self._raw_options = OrderedDict()
+
+ # Store options added with `imply_option`, and the reason they were
+ # added (which can either have been given to `imply_option`, or
+ # inferred. Their order matters, so use a list.
+ self._implied_options = []
+
+ # Store all results from _prepare_function
+ self._prepared_functions = set()
+
+ # Queue of functions to execute, with their arguments
+ self._execution_queue = []
+
+ # Store the `when`s associated to some options.
+ self._conditions = {}
+
+ # A list of conditions to apply as a default `when` for every *_impl()
+ self._default_conditions = []
+
+ self._helper = CommandLineHelper(environ, argv)
+
+ assert isinstance(config, dict)
+ self._config = config
+
+ # Tracks how many templates "deep" we are in the stack.
+ self._template_depth = 0
+
+ logging.addLevelName(TRACE, "TRACE")
+ if logger is None:
+ logger = moz_logger = logging.getLogger("moz.configure")
+ logger.setLevel(logging.DEBUG)
+ formatter = logging.Formatter("%(levelname)s: %(message)s")
+ handler = ConfigureOutputHandler(stdout, stderr)
+ handler.setFormatter(formatter)
+ queue_debug = handler.queue_debug
+ logger.addHandler(handler)
+
+ else:
+ assert isinstance(logger, logging.Logger)
+ moz_logger = None
+
+ @contextmanager
+ def queue_debug():
+ yield
+
+ self._logger = logger
+
+ # Some callers will manage to log a bytestring with characters in it
+ # that can't be converted to ascii. Make our log methods robust to this
+ # by detecting the encoding that a producer is likely to have used.
+ encoding = getpreferredencoding()
+
+ def wrapped_log_method(logger, key):
+ method = getattr(logger, key)
+
+ def wrapped(*args, **kwargs):
+ out_args = [
+ six.ensure_text(arg, encoding=encoding or "utf-8")
+ if isinstance(arg, six.binary_type)
+ else arg
+ for arg in args
+ ]
+ return method(*out_args, **kwargs)
+
+ return wrapped
+
+ log_namespace = {
+ k: wrapped_log_method(logger, k)
+ for k in ("debug", "info", "warning", "error")
+ }
+ log_namespace["queue_debug"] = queue_debug
+ self.log_impl = ReadOnlyNamespace(**log_namespace)
+
+ self._help = None
+ self._help_option = self.option_impl(
+ "--help", help="print this message", category=HELP_OPTIONS_CATEGORY
+ )
+ self._seen.add(self._help_option)
+
+ self._always = DependsFunction(self, lambda: True, [])
+ self._never = DependsFunction(self, lambda: False, [])
+
+ if self._value_for(self._help_option):
+ self._help = HelpFormatter(argv[0])
+ self._help.add(self._help_option)
+ elif moz_logger:
+ handler = logging.FileHandler(
+ "config.log", mode="w", delay=True, encoding="utf-8"
+ )
+ handler.setFormatter(formatter)
+ logger.addHandler(handler)
+
+ def include_file(self, path):
+ """Include one file in the sandbox. Users of this class probably want
+ to use `run` instead.
+
+ Note: this will execute all template invocations, as well as @depends
+ functions that depend on '--help', but nothing else.
+ """
+
+ if self._paths:
+ path = mozpath.join(mozpath.dirname(self._paths[-1]), path)
+ path = mozpath.normpath(path)
+ if not mozpath.basedir(path, (mozpath.dirname(self._paths[0]),)):
+ raise ConfigureError(
+ "Cannot include `%s` because it is not in a subdirectory "
+ "of `%s`" % (path, mozpath.dirname(self._paths[0]))
+ )
+ else:
+ path = mozpath.realpath(mozpath.abspath(path))
+ if path in self._all_paths:
+ raise ConfigureError(
+ "Cannot include `%s` because it was included already." % path
+ )
+ self._paths.append(path)
+ self._all_paths.add(path)
+
+ with open(path, "rb") as fh:
+ source = fh.read()
+
+ code = compile(source, path, "exec")
+
+ exec_(code, self)
+
+ self._paths.pop(-1)
+
+ def run(self, path=None):
+ """Executes the given file within the sandbox, as well as everything
+ pending from any other included file, and ensure the overall
+ consistency of the executed script(s)."""
+ if path:
+ self.include_file(path)
+
+ for option in six.itervalues(self._options):
+ # All options must be referenced by some @depends function
+ if option not in self._seen:
+ raise ConfigureError(
+ "Option `%s` is not handled ; reference it with a @depends"
+ % option.option
+ )
+
+ self._value_for(option)
+
+ # All implied options should exist.
+ for implied_option in self._implied_options:
+ value = self._resolve(implied_option.value)
+ if value is not None:
+ # There are two ways to end up here: either the implied option
+ # is unknown, or it's known but there was a dependency loop
+ # that prevented the implication from being applied.
+ option = self._options.get(implied_option.name)
+ if not option:
+ raise ConfigureError(
+ "`%s`, emitted from `%s` line %d, is unknown."
+ % (
+ implied_option.option,
+ implied_option.caller[1],
+ implied_option.caller[2],
+ )
+ )
+ # If the option is known, check that the implied value doesn't
+ # conflict with what value was attributed to the option.
+ if implied_option.when and not self._value_for(implied_option.when):
+ continue
+ option_value = self._value_for_option(option)
+ if value != option_value:
+ reason = implied_option.reason
+ if isinstance(reason, Option):
+ reason = self._raw_options.get(reason) or reason.option
+ reason = reason.split("=", 1)[0]
+ value = OptionValue.from_(value)
+ raise InvalidOptionError(
+ "'%s' implied by '%s' conflicts with '%s' from the %s"
+ % (
+ value.format(option.option),
+ reason,
+ option_value.format(option.option),
+ option_value.origin,
+ )
+ )
+
+ # All options should have been removed (handled) by now.
+ for arg in self._helper:
+ without_value = arg.split("=", 1)[0]
+ msg = "Unknown option: %s" % without_value
+ if self._help:
+ self._logger.warning(msg)
+ else:
+ raise InvalidOptionError(msg)
+
+ # Run the execution queue
+ for func, args in self._execution_queue:
+ func(*args)
+
+ if self._help:
+ with LineIO(self.log_impl.info) as out:
+ self._help.usage(out)
+
+ def __getitem__(self, key):
+ impl = "%s_impl" % key
+ func = getattr(self, impl, None)
+ if func:
+ return func
+
+ return super(ConfigureSandbox, self).__getitem__(key)
+
+ def __setitem__(self, key, value):
+ if (
+ key in self.BUILTINS
+ or key == "__builtins__"
+ or hasattr(self, "%s_impl" % key)
+ ):
+ raise KeyError("Cannot reassign builtins")
+
+ if inspect.isfunction(value) and value not in self._templates:
+ value = self._prepare_function(value)
+
+ elif (
+ not isinstance(value, SandboxDependsFunction)
+ and value not in self._templates
+ and not (inspect.isclass(value) and issubclass(value, Exception))
+ ):
+ raise KeyError(
+ "Cannot assign `%s` because it is neither a "
+ "@depends nor a @template" % key
+ )
+
+ if isinstance(value, SandboxDependsFunction):
+ self._depends[value].name = key
+
+ return super(ConfigureSandbox, self).__setitem__(key, value)
+
+ def _resolve(self, arg):
+ if isinstance(arg, SandboxDependsFunction):
+ return self._value_for_depends(self._depends[arg])
+ return arg
+
+ def _value_for(self, obj):
+ if isinstance(obj, SandboxDependsFunction):
+ assert obj in self._depends
+ return self._value_for_depends(self._depends[obj])
+
+ elif isinstance(obj, DependsFunction):
+ return self._value_for_depends(obj)
+
+ elif isinstance(obj, Option):
+ return self._value_for_option(obj)
+
+ assert False
+
+ @memoize
+ def _value_for_depends(self, obj):
+ value = obj.result()
+ self._logger.log(TRACE, "%r = %r", obj, value)
+ return value
+
+ @memoize
+ def _value_for_option(self, option):
+ implied = {}
+ matching_implied_options = [
+ o for o in self._implied_options if o.name in (option.name, option.env)
+ ]
+ # Update self._implied_options before going into the loop with the non-matching
+ # options.
+ self._implied_options = [
+ o for o in self._implied_options if o.name not in (option.name, option.env)
+ ]
+
+ for implied_option in matching_implied_options:
+ if implied_option.when and not self._value_for(implied_option.when):
+ continue
+
+ value = self._resolve(implied_option.value)
+
+ if value is not None:
+ value = OptionValue.from_(value)
+ opt = value.format(implied_option.option)
+ self._helper.add(opt, "implied")
+ implied[opt] = implied_option
+
+ try:
+ value, option_string = self._helper.handle(option)
+ except ConflictingOptionError as e:
+ reason = implied[e.arg].reason
+ if isinstance(reason, Option):
+ reason = self._raw_options.get(reason) or reason.option
+ reason = reason.split("=", 1)[0]
+ raise InvalidOptionError(
+ "'%s' implied by '%s' conflicts with '%s' from the %s"
+ % (e.arg, reason, e.old_arg, e.old_origin)
+ )
+
+ if value.origin == "implied":
+ recursed_value = getattr(self, "__value_for_option").get((option,))
+ if recursed_value is not None:
+ _, filename, line, _, _, _ = implied[value.format(option.option)].caller
+ raise ConfigureError(
+ "'%s' appears somewhere in the direct or indirect dependencies when "
+ "resolving imply_option at %s:%d" % (option.option, filename, line)
+ )
+
+ if option_string:
+ self._raw_options[option] = option_string
+
+ when = self._conditions.get(option)
+ # If `when` resolves to a false-ish value, we always return None.
+ # This makes option(..., when='--foo') equivalent to
+ # option(..., when=depends('--foo')(lambda x: x)).
+ if when and not self._value_for(when) and value is not None:
+ # If the option was passed explicitly, we throw an error that
+ # the option is not available. Except when the option was passed
+ # from the environment, because that would be too cumbersome.
+ if value.origin not in ("default", "environment"):
+ raise InvalidOptionError(
+ "%s is not available in this configuration"
+ % option_string.split("=", 1)[0]
+ )
+ self._logger.log(TRACE, "%r = None", option)
+ return None
+
+ self._logger.log(TRACE, "%r = %r", option, value)
+ return value
+
+ def _dependency(self, arg, callee_name, arg_name=None):
+ if isinstance(arg, six.string_types):
+ prefix, name, values = Option.split_option(arg)
+ if values != ():
+ raise ConfigureError("Option must not contain an '='")
+ if name not in self._options:
+ raise ConfigureError(
+ "'%s' is not a known option. " "Maybe it's declared too late?" % arg
+ )
+ arg = self._options[name]
+ self._seen.add(arg)
+ elif isinstance(arg, SandboxDependsFunction):
+ assert arg in self._depends
+ arg = self._depends[arg]
+ else:
+ raise TypeError(
+ "Cannot use object of type '%s' as %sargument to %s"
+ % (
+ type(arg).__name__,
+ "`%s` " % arg_name if arg_name else "",
+ callee_name,
+ )
+ )
+ return arg
+
+ def _normalize_when(self, when, callee_name):
+ if when is True:
+ when = self._always
+ elif when is False:
+ when = self._never
+ elif when is not None:
+ when = self._dependency(when, callee_name, "when")
+
+ if self._default_conditions:
+ # Create a pseudo @depends function for the combination of all
+ # default conditions and `when`.
+ dependencies = [when] if when else []
+ dependencies.extend(self._default_conditions)
+ if len(dependencies) == 1:
+ return dependencies[0]
+ return CombinedDependsFunction(self, all, dependencies)
+ return when
+
+ @contextmanager
+ def only_when_impl(self, when):
+ """Implementation of only_when()
+
+ `only_when` is a context manager that essentially makes calls to
+ other sandbox functions within the context block ignored.
+ """
+ when = self._normalize_when(when, "only_when")
+ if when and self._default_conditions[-1:] != [when]:
+ self._default_conditions.append(when)
+ yield
+ self._default_conditions.pop()
+ else:
+ yield
+
+ def option_impl(self, *args, **kwargs):
+ """Implementation of option()
+ This function creates and returns an Option() object, passing it the
+ resolved arguments (uses the result of functions when functions are
+ passed). In most cases, the result of this function is not expected to
+ be used.
+ Command line argument/environment variable parsing for this Option is
+ handled here.
+ """
+ when = self._normalize_when(kwargs.get("when"), "option")
+ args = [self._resolve(arg) for arg in args]
+ kwargs = {k: self._resolve(v) for k, v in six.iteritems(kwargs) if k != "when"}
+ # The Option constructor needs to look up the stack to infer a category
+ # for the Option, since the category is based on the filename where the
+ # Option is defined. However, if the Option is defined in a template, we
+ # want the category to reference the caller of the template rather than
+ # the caller of the option() function.
+ kwargs["define_depth"] = self._template_depth * 3
+ option = Option(*args, **kwargs)
+ if when:
+ self._conditions[option] = when
+ if option.name in self._options:
+ raise ConfigureError("Option `%s` already defined" % option.option)
+ if option.env in self._options:
+ raise ConfigureError("Option `%s` already defined" % option.env)
+ if option.name:
+ self._options[option.name] = option
+ if option.env:
+ self._options[option.env] = option
+
+ if self._help and (when is None or self._value_for(when)):
+ self._help.add(option)
+
+ return option
+
+ def depends_impl(self, *args, **kwargs):
+ """Implementation of @depends()
+ This function is a decorator. It returns a function that subsequently
+ takes a function and returns a dummy function. The dummy function
+ identifies the actual function for the sandbox, while preventing
+ further function calls from within the sandbox.
+
+ @depends() takes a variable number of option strings or dummy function
+ references. The decorated function is called as soon as the decorator
+ is called, and the arguments it receives are the OptionValue or
+ function results corresponding to each of the arguments to @depends.
+ As an exception, when a HelpFormatter is attached, only functions that
+ have '--help' in their @depends argument list are called.
+
+ The decorated function is altered to use a different global namespace
+ for its execution. This different global namespace exposes a limited
+ set of functions from os.path.
+ """
+ for k in kwargs:
+ if k != "when":
+ raise TypeError(
+ "depends_impl() got an unexpected keyword argument '%s'" % k
+ )
+
+ when = self._normalize_when(kwargs.get("when"), "@depends")
+
+ if not when and not args:
+ raise ConfigureError("@depends needs at least one argument")
+
+ dependencies = tuple(self._dependency(arg, "@depends") for arg in args)
+
+ conditions = [
+ self._conditions[d]
+ for d in dependencies
+ if d in self._conditions and isinstance(d, Option)
+ ]
+ for c in conditions:
+ if c != when:
+ raise ConfigureError(
+ "@depends function needs the same `when` "
+ "as options it depends on"
+ )
+
+ def decorator(func):
+ if inspect.isgeneratorfunction(func):
+ raise ConfigureError(
+ "Cannot decorate generator functions with @depends"
+ )
+ if inspect.isroutine(func):
+ if func in self._templates:
+ raise TypeError("Cannot use a @template function here")
+ func = self._prepare_function(func)
+ elif isinstance(func, SandboxDependsFunction):
+ raise TypeError("Cannot nest @depends functions")
+ elif dependencies:
+ raise TypeError(
+ "Cannot wrap literal values in @depends with dependencies"
+ )
+ depends = DependsFunction(self, func, dependencies, when=when)
+ return depends.sandboxed
+
+ return decorator
+
+ def include_impl(self, what, when=None):
+ """Implementation of include().
+ Allows to include external files for execution in the sandbox.
+ It is possible to use a @depends function as argument, in which case
+ the result of the function is the file name to include. This latter
+ feature is only really meant for --enable-application/--enable-project.
+ """
+ with self.only_when_impl(when):
+ what = self._resolve(what)
+ if what:
+ if not isinstance(what, six.string_types):
+ raise TypeError("Unexpected type: '%s'" % type(what).__name__)
+ self.include_file(what)
+
+ def template_impl(self, func):
+ """Implementation of @template.
+ This function is a decorator. Template functions are called
+ immediately. They are altered so that their global namespace exposes
+ a limited set of functions from os.path, as well as `depends` and
+ `option`.
+ Templates allow to simplify repetitive constructs, or to implement
+ helper decorators and somesuch.
+ """
+
+ def update_globals(glob):
+ glob.update(
+ (k[: -len("_impl")], getattr(self, k))
+ for k in dir(self)
+ if k.endswith("_impl") and k != "template_impl"
+ )
+ glob.update((k, v) for k, v in six.iteritems(self) if k not in glob)
+
+ template = self._prepare_function(func, update_globals)
+
+ # Any function argument to the template must be prepared to be sandboxed.
+ # If the template itself returns a function (in which case, it's very
+ # likely a decorator), that function must be prepared to be sandboxed as
+ # well.
+ def wrap_template(template):
+ isfunction = inspect.isfunction
+
+ def maybe_prepare_function(obj):
+ if isfunction(obj):
+ return self._prepare_function(obj)
+ return obj
+
+ # The following function may end up being prepared to be sandboxed,
+ # so it mustn't depend on anything from the global scope in this
+ # file. It can however depend on variables from the closure, thus
+ # maybe_prepare_function and isfunction are declared above to be
+ # available there.
+ @self.wraps(template)
+ def wrapper(*args, **kwargs):
+ args = [maybe_prepare_function(arg) for arg in args]
+ kwargs = {k: maybe_prepare_function(v) for k, v in kwargs.items()}
+ self._template_depth += 1
+ ret = template(*args, **kwargs)
+ self._template_depth -= 1
+ if isfunction(ret):
+ # We can't expect the sandboxed code to think about all the
+ # details of implementing decorators, so do some of the
+ # work for them. If the function takes exactly one function
+ # as argument and returns a function, it must be a
+ # decorator, so mark the returned function as wrapping the
+ # function passed in.
+ if len(args) == 1 and not kwargs and isfunction(args[0]):
+ ret = self.wraps(args[0])(ret)
+ return wrap_template(ret)
+ return ret
+
+ return wrapper
+
+ wrapper = wrap_template(template)
+ self._templates.add(wrapper)
+ return wrapper
+
+ def wraps(self, func):
+ return wraps(func)
+
+ RE_MODULE = re.compile("^[a-zA-Z0-9_\.]+$")
+
+ def imports_impl(self, _import, _from=None, _as=None):
+ """Implementation of @imports.
+ This decorator imports the given _import from the given _from module
+ optionally under a different _as name.
+ The options correspond to the various forms for the import builtin.
+
+ @imports('sys')
+ @imports(_from='mozpack', _import='path', _as='mozpath')
+ """
+ for value, required in ((_import, True), (_from, False), (_as, False)):
+
+ if not isinstance(value, six.string_types) and (
+ required or value is not None
+ ):
+ raise TypeError("Unexpected type: '%s'" % type(value).__name__)
+ if value is not None and not self.RE_MODULE.match(value):
+ raise ValueError("Invalid argument to @imports: '%s'" % value)
+ if _as and "." in _as:
+ raise ValueError("Invalid argument to @imports: '%s'" % _as)
+
+ def decorator(func):
+ if func in self._templates:
+ raise ConfigureError("@imports must appear after @template")
+ if func in self._depends:
+ raise ConfigureError("@imports must appear after @depends")
+ # For the imports to apply in the order they appear in the
+ # .configure file, we accumulate them in reverse order and apply
+ # them later.
+ imports = self._imports.setdefault(func, [])
+ imports.insert(0, (_from, _import, _as))
+ return func
+
+ return decorator
+
+ def _apply_imports(self, func, glob):
+ for _from, _import, _as in self._imports.pop(func, ()):
+ self._get_one_import(_from, _import, _as, glob)
+
+ def _handle_wrapped_import(self, _from, _import, _as, glob):
+ """Given the name of a module, "import" a mocked package into the glob
+ iff the module is one that we wrap (either for the sandbox or for the
+ purpose of testing). Applies if the wrapped module is exposed by an
+ attribute of `self`.
+
+ For example, if the import statement is `from os import environ`, then
+ this function will set
+ glob['environ'] = self._wrapped_os.environ.
+
+ Iff this function handles the given import, return True.
+ """
+ module = (_from or _import).split(".")[0]
+ attr = "_wrapped_" + module
+ wrapped = getattr(self, attr, None)
+ if wrapped:
+ if _as or _from:
+ obj = self._recursively_get_property(
+ module, (_from + "." if _from else "") + _import, wrapped
+ )
+ glob[_as or _import] = obj
+ else:
+ glob[module] = wrapped
+ return True
+ else:
+ return False
+
+ def _recursively_get_property(self, module, what, wrapped):
+ """Traverse the wrapper object `wrapped` (which represents the module
+ `module`) and return the property represented by `what`, which may be a
+ series of nested attributes.
+
+ For example, if `module` is 'os' and `what` is 'os.path.join',
+ return `wrapped.path.join`.
+ """
+ if what == module:
+ return wrapped
+ assert what.startswith(module + ".")
+ attrs = what[len(module + ".") :].split(".")
+ for attr in attrs:
+ wrapped = getattr(wrapped, attr)
+ return wrapped
+
+ @memoized_property
+ def _wrapped_os(self):
+ wrapped_os = {}
+ exec_("from os import *", {}, wrapped_os)
+ # Special case os and os.environ so that os.environ is our copy of
+ # the environment.
+ wrapped_os["environ"] = self._environ
+ # Also override some os.path functions with ours.
+ wrapped_path = {}
+ exec_("from os.path import *", {}, wrapped_path)
+ wrapped_path.update(self.OS.path.__dict__)
+ wrapped_os["path"] = ReadOnlyNamespace(**wrapped_path)
+ return ReadOnlyNamespace(**wrapped_os)
+
+ @memoized_property
+ def _wrapped_subprocess(self):
+ wrapped_subprocess = {}
+ exec_("from subprocess import *", {}, wrapped_subprocess)
+
+ def wrap(function):
+ def wrapper(*args, **kwargs):
+ if kwargs.get("env") is None and self._environ:
+ kwargs["env"] = dict(self._environ)
+
+ return function(*args, **kwargs)
+
+ return wrapper
+
+ for f in ("call", "check_call", "check_output", "Popen", "run"):
+ # `run` is new to python 3.5. In case this still runs from python2
+ # code, avoid failing here.
+ if f in wrapped_subprocess:
+ wrapped_subprocess[f] = wrap(wrapped_subprocess[f])
+
+ return ReadOnlyNamespace(**wrapped_subprocess)
+
+ @memoized_property
+ def _wrapped_six(self):
+ if six.PY3:
+ return six
+ wrapped_six = {}
+ exec_("from six import *", {}, wrapped_six)
+ wrapped_six_moves = {}
+ exec_("from six.moves import *", {}, wrapped_six_moves)
+ wrapped_six_moves_builtins = {}
+ exec_("from six.moves.builtins import *", {}, wrapped_six_moves_builtins)
+
+ # Special case for the open() builtin, because otherwise, using it
+ # fails with "IOError: file() constructor not accessible in
+ # restricted mode". We also make open() look more like python 3's,
+ # decoding to unicode strings unless the mode says otherwise.
+ def wrapped_open(name, mode=None, buffering=None):
+ args = (name,)
+ kwargs = {}
+ if buffering is not None:
+ kwargs["buffering"] = buffering
+ if mode is not None:
+ args += (mode,)
+ if "b" in mode:
+ return open(*args, **kwargs)
+ kwargs["encoding"] = system_encoding
+ return codecs.open(*args, **kwargs)
+
+ wrapped_six_moves_builtins["open"] = wrapped_open
+ wrapped_six_moves["builtins"] = ReadOnlyNamespace(**wrapped_six_moves_builtins)
+ wrapped_six["moves"] = ReadOnlyNamespace(**wrapped_six_moves)
+
+ return ReadOnlyNamespace(**wrapped_six)
+
+ def _get_one_import(self, _from, _import, _as, glob):
+ """Perform the given import, placing the result into the dict glob."""
+ if not _from and _import == "__builtin__":
+ glob[_as or "__builtin__"] = __builtin__
+ return
+ if _from == "__builtin__":
+ _from = "six.moves.builtins"
+ # The special `__sandbox__` module gives access to the sandbox
+ # instance.
+ if not _from and _import == "__sandbox__":
+ glob[_as or _import] = self
+ return
+ if self._handle_wrapped_import(_from, _import, _as, glob):
+ return
+ # If we've gotten this far, we should just do a normal import.
+ # Until this proves to be a performance problem, just construct an
+ # import statement and execute it.
+ import_line = "%simport %s%s" % (
+ ("from %s " % _from) if _from else "",
+ _import,
+ (" as %s" % _as) if _as else "",
+ )
+ exec_(import_line, {}, glob)
+
+ def _resolve_and_set(self, data, name, value, when=None):
+ # Don't set anything when --help was on the command line
+ if self._help:
+ return
+ if when and not self._value_for(when):
+ return
+ name = self._resolve(name)
+ if name is None:
+ return
+ if not isinstance(name, six.string_types):
+ raise TypeError("Unexpected type: '%s'" % type(name).__name__)
+ if name in data:
+ raise ConfigureError(
+ "Cannot add '%s' to configuration: Key already " "exists" % name
+ )
+ value = self._resolve(value)
+ if value is not None:
+ if self._logger.isEnabledFor(TRACE):
+ if data is self._config:
+ self._logger.log(TRACE, "set_config(%s, %r)", name, value)
+ elif data is self._config.get("DEFINES"):
+ self._logger.log(TRACE, "set_define(%s, %r)", name, value)
+ data[name] = value
+
+ def set_config_impl(self, name, value, when=None):
+ """Implementation of set_config().
+ Set the configuration items with the given name to the given value.
+ Both `name` and `value` can be references to @depends functions,
+ in which case the result from these functions is used. If the result
+ of either function is None, the configuration item is not set.
+ """
+ when = self._normalize_when(when, "set_config")
+
+ self._execution_queue.append(
+ (self._resolve_and_set, (self._config, name, value, when))
+ )
+
+ def set_define_impl(self, name, value, when=None):
+ """Implementation of set_define().
+ Set the define with the given name to the given value. Both `name` and
+ `value` can be references to @depends functions, in which case the
+ result from these functions is used. If the result of either function
+ is None, the define is not set. If the result is False, the define is
+ explicitly undefined (-U).
+ """
+ when = self._normalize_when(when, "set_define")
+
+ defines = self._config.setdefault("DEFINES", {})
+ self._execution_queue.append(
+ (self._resolve_and_set, (defines, name, value, when))
+ )
+
+ def imply_option_impl(self, option, value, reason=None, when=None):
+ """Implementation of imply_option().
+ Injects additional options as if they had been passed on the command
+ line. The `option` argument is a string as in option()'s `name` or
+ `env`. The option must be declared after `imply_option` references it.
+ The `value` argument indicates the value to pass to the option.
+ It can be:
+ - True. In this case `imply_option` injects the positive option
+
+ (--enable-foo/--with-foo).
+ imply_option('--enable-foo', True)
+ imply_option('--disable-foo', True)
+
+ are both equivalent to `--enable-foo` on the command line.
+
+ - False. In this case `imply_option` injects the negative option
+
+ (--disable-foo/--without-foo).
+ imply_option('--enable-foo', False)
+ imply_option('--disable-foo', False)
+
+ are both equivalent to `--disable-foo` on the command line.
+
+ - None. In this case `imply_option` does nothing.
+ imply_option('--enable-foo', None)
+ imply_option('--disable-foo', None)
+
+ are both equivalent to not passing any flag on the command line.
+
+ - a string or a tuple. In this case `imply_option` injects the positive
+ option with the given value(s).
+
+ imply_option('--enable-foo', 'a')
+ imply_option('--disable-foo', 'a')
+
+ are both equivalent to `--enable-foo=a` on the command line.
+ imply_option('--enable-foo', ('a', 'b'))
+ imply_option('--disable-foo', ('a', 'b'))
+
+ are both equivalent to `--enable-foo=a,b` on the command line.
+
+ Because imply_option('--disable-foo', ...) can be misleading, it is
+ recommended to use the positive form ('--enable' or '--with') for
+ `option`.
+
+ The `value` argument can also be (and usually is) a reference to a
+ @depends function, in which case the result of that function will be
+ used as per the descripted mapping above.
+
+ The `reason` argument indicates what caused the option to be implied.
+ It is necessary when it cannot be inferred from the `value`.
+ """
+
+ when = self._normalize_when(when, "imply_option")
+
+ # Don't do anything when --help was on the command line
+ if self._help:
+ return
+ if not reason and isinstance(value, SandboxDependsFunction):
+ deps = self._depends[value].dependencies
+ possible_reasons = [d for d in deps if d != self._help_option]
+ if len(possible_reasons) == 1:
+ if isinstance(possible_reasons[0], Option):
+ reason = possible_reasons[0]
+ if not reason and (
+ isinstance(value, (bool, tuple)) or isinstance(value, six.string_types)
+ ):
+ # A reason can be provided automatically when imply_option
+ # is called with an immediate value.
+ _, filename, line, _, _, _ = inspect.stack()[1]
+ reason = "imply_option at %s:%s" % (filename, line)
+
+ if not reason:
+ raise ConfigureError(
+ "Cannot infer what implies '%s'. Please add a `reason` to "
+ "the `imply_option` call." % option
+ )
+
+ prefix, name, values = Option.split_option(option)
+ if values != ():
+ raise ConfigureError("Implied option must not contain an '='")
+
+ self._implied_options.append(
+ ReadOnlyNamespace(
+ option=option,
+ prefix=prefix,
+ name=name,
+ value=value,
+ caller=inspect.stack()[1],
+ reason=reason,
+ when=when,
+ )
+ )
+
+ def _prepare_function(self, func, update_globals=None):
+ """Alter the given function global namespace with the common ground
+ for @depends, and @template.
+ """
+ if not inspect.isfunction(func):
+ raise TypeError("Unexpected type: '%s'" % type(func).__name__)
+ if func in self._prepared_functions:
+ return func
+
+ glob = SandboxedGlobal(
+ (k, v)
+ for k, v in six.iteritems(func.__globals__)
+ if (inspect.isfunction(v) and v not in self._templates)
+ or (inspect.isclass(v) and issubclass(v, Exception))
+ )
+ glob.update(
+ __builtins__=self.BUILTINS,
+ __file__=self._paths[-1] if self._paths else "",
+ __name__=self._paths[-1] if self._paths else "",
+ os=self.OS,
+ log=self.log_impl,
+ namespace=ReadOnlyNamespace,
+ )
+ if update_globals:
+ update_globals(glob)
+
+ # The execution model in the sandbox doesn't guarantee the execution
+ # order will always be the same for a given function, and if it uses
+ # variables from a closure that are changed after the function is
+ # declared, depending when the function is executed, the value of the
+ # variable can differ. For consistency, we force the function to use
+ # the value from the earliest it can be run, which is at declaration.
+ # Note this is not entirely bullet proof (if the value is e.g. a list,
+ # the list contents could have changed), but covers the bases.
+ closure = None
+ if func.__closure__:
+
+ def makecell(content):
+ def f():
+ content
+
+ return f.__closure__[0]
+
+ closure = tuple(makecell(cell.cell_contents) for cell in func.__closure__)
+
+ new_func = self.wraps(func)(
+ types.FunctionType(
+ func.__code__, glob, func.__name__, func.__defaults__, closure
+ )
+ )
+
+ @self.wraps(new_func)
+ def wrapped(*args, **kwargs):
+ if func in self._imports:
+ self._apply_imports(func, glob)
+ return new_func(*args, **kwargs)
+
+ self._prepared_functions.add(wrapped)
+ return wrapped
diff --git a/python/mozbuild/mozbuild/configure/check_debug_ranges.py b/python/mozbuild/mozbuild/configure/check_debug_ranges.py
new file mode 100644
index 0000000000..f82624c14f
--- /dev/null
+++ b/python/mozbuild/mozbuild/configure/check_debug_ranges.py
@@ -0,0 +1,68 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This script returns the number of items for the DW_AT_ranges corresponding
+# to a given compilation unit. This is used as a helper to find a bug in some
+# versions of GNU ld.
+
+import re
+import subprocess
+import sys
+
+
+def get_range_for(compilation_unit, debug_info):
+ """Returns the range offset for a given compilation unit
+ in a given debug_info."""
+ name = ranges = ""
+ search_cu = False
+ for nfo in debug_info.splitlines():
+ if "DW_TAG_compile_unit" in nfo:
+ search_cu = True
+ elif "DW_TAG_" in nfo or not nfo.strip():
+ if name == compilation_unit and ranges != "":
+ return int(ranges, 16)
+ name = ranges = ""
+ search_cu = False
+ if search_cu:
+ if "DW_AT_name" in nfo:
+ name = nfo.rsplit(None, 1)[1]
+ elif "DW_AT_ranges" in nfo:
+ ranges = nfo.rsplit(None, 1)[1]
+ return None
+
+
+def get_range_length(range, debug_ranges):
+ """Returns the number of items in the range starting at the
+ given offset."""
+ length = 0
+ for line in debug_ranges.splitlines():
+ m = re.match("\s*([0-9a-fA-F]+)\s+([0-9a-fA-F]+)\s+([0-9a-fA-F]+)", line)
+ if m and int(m.group(1), 16) == range:
+ length += 1
+ return length
+
+
+def main(bin, compilation_unit):
+ p = subprocess.Popen(
+ ["objdump", "-W", bin],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ universal_newlines=True,
+ )
+ (out, err) = p.communicate()
+ sections = re.split("\n(Contents of the|The section) ", out)
+ debug_info = [s for s in sections if s.startswith(".debug_info")]
+ debug_ranges = [s for s in sections if s.startswith(".debug_ranges")]
+ if not debug_ranges or not debug_info:
+ return 0
+
+ range = get_range_for(compilation_unit, debug_info[0])
+ if range is not None:
+ return get_range_length(range, debug_ranges[0])
+
+ return -1
+
+
+if __name__ == "__main__":
+ print(main(*sys.argv[1:]))
diff --git a/python/mozbuild/mozbuild/configure/constants.py b/python/mozbuild/mozbuild/configure/constants.py
new file mode 100644
index 0000000000..a36152651d
--- /dev/null
+++ b/python/mozbuild/mozbuild/configure/constants.py
@@ -0,0 +1,131 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from collections import OrderedDict
+
+from mozbuild.util import EnumString
+
+CompilerType = EnumString.subclass(
+ "clang",
+ "clang-cl",
+ "gcc",
+ "msvc",
+)
+
+OS = EnumString.subclass(
+ "Android",
+ "DragonFly",
+ "FreeBSD",
+ "GNU",
+ "NetBSD",
+ "OpenBSD",
+ "OSX",
+ "SunOS",
+ "WINNT",
+ "WASI",
+)
+
+Kernel = EnumString.subclass(
+ "Darwin",
+ "DragonFly",
+ "FreeBSD",
+ "kFreeBSD",
+ "Linux",
+ "NetBSD",
+ "OpenBSD",
+ "SunOS",
+ "WINNT",
+ "WASI",
+)
+
+CPU_bitness = {
+ "aarch64": 64,
+ "Alpha": 64,
+ "arm": 32,
+ "hppa": 32,
+ "ia64": 64,
+ "loongarch64": 64,
+ "m68k": 32,
+ "mips32": 32,
+ "mips64": 64,
+ "ppc": 32,
+ "ppc64": 64,
+ "riscv64": 64,
+ "s390": 32,
+ "s390x": 64,
+ "sh4": 32,
+ "sparc": 32,
+ "sparc64": 64,
+ "x86": 32,
+ "x86_64": 64,
+ "wasm32": 32,
+}
+
+CPU = EnumString.subclass(*CPU_bitness.keys())
+
+Endianness = EnumString.subclass(
+ "big",
+ "little",
+)
+
+WindowsBinaryType = EnumString.subclass(
+ "win32",
+ "win64",
+)
+
+Abi = EnumString.subclass(
+ "msvc",
+ "mingw",
+)
+
+# The order of those checks matter
+CPU_preprocessor_checks = OrderedDict(
+ (
+ ("x86", "__i386__ || _M_IX86"),
+ ("x86_64", "__x86_64__ || _M_X64"),
+ ("arm", "__arm__ || _M_ARM"),
+ ("aarch64", "__aarch64__ || _M_ARM64"),
+ ("ia64", "__ia64__"),
+ ("s390x", "__s390x__"),
+ ("s390", "__s390__"),
+ ("ppc64", "__powerpc64__"),
+ ("ppc", "__powerpc__"),
+ ("Alpha", "__alpha__"),
+ ("hppa", "__hppa__"),
+ ("sparc64", "__sparc__ && __arch64__"),
+ ("sparc", "__sparc__"),
+ ("m68k", "__m68k__"),
+ ("mips64", "__mips64"),
+ ("mips32", "__mips__"),
+ ("riscv64", "__riscv && __riscv_xlen == 64"),
+ ("loongarch64", "__loongarch64"),
+ ("sh4", "__sh__"),
+ ("wasm32", "__wasm32__"),
+ )
+)
+
+assert sorted(CPU_preprocessor_checks.keys()) == sorted(CPU.POSSIBLE_VALUES)
+
+kernel_preprocessor_checks = {
+ "Darwin": "__APPLE__",
+ "DragonFly": "__DragonFly__",
+ "FreeBSD": "__FreeBSD__",
+ "kFreeBSD": "__FreeBSD_kernel__",
+ "Linux": "__linux__",
+ "NetBSD": "__NetBSD__",
+ "OpenBSD": "__OpenBSD__",
+ "SunOS": "__sun__",
+ "WINNT": "_WIN32 || __CYGWIN__",
+ "WASI": "__wasi__",
+}
+
+assert sorted(kernel_preprocessor_checks.keys()) == sorted(Kernel.POSSIBLE_VALUES)
+
+OS_preprocessor_checks = {
+ "Android": "__ANDROID__",
+}
+
+# We intentionally don't include all possible OSes in our checks, because we
+# only care about OS mismatches for specific target OSes.
+# assert sorted(OS_preprocessor_checks.keys()) == sorted(OS.POSSIBLE_VALUES)
diff --git a/python/mozbuild/mozbuild/configure/help.py b/python/mozbuild/mozbuild/configure/help.py
new file mode 100644
index 0000000000..bfd5e6ad6d
--- /dev/null
+++ b/python/mozbuild/mozbuild/configure/help.py
@@ -0,0 +1,90 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import re
+from collections import defaultdict
+
+from mozbuild.configure.options import Option
+
+
+class HelpFormatter(object):
+ def __init__(self, argv0):
+ self.intro = ["Usage: %s [options]" % os.path.basename(argv0)]
+ self.options = []
+
+ def add(self, option):
+ assert isinstance(option, Option)
+ if option.possible_origins == ("implied",):
+ # Don't display help if our option can only be implied.
+ return
+ self.options.append(option)
+
+ def format_options_by_category(self, options_by_category):
+ ret = []
+ for category, options in sorted(
+ options_by_category.items(), key=lambda x: x[0]
+ ):
+ ret.append(" " + category + ":")
+ for option in sorted(options, key=lambda opt: opt.option):
+ opt = option.option
+ if option.choices:
+ opt += "={%s}" % ",".join(option.choices)
+ help = self.format_help(option)
+ if len(option.default):
+ if help:
+ help += " "
+ help += "[%s]" % ",".join(option.default)
+
+ if len(opt) > 24 or not help:
+ ret.append(" %s" % opt)
+ if help:
+ ret.append("%s%s" % (" " * 30, help))
+ else:
+ ret.append(" %-24s %s" % (opt, help))
+ ret.append("")
+ return ret
+
+ RE_FORMAT = re.compile(r"{([^|}]*)\|([^|}]*)}")
+
+ # Return formatted help text for --{enable,disable,with,without}-* options.
+ #
+ # Format is the following syntax:
+ # {String for --enable or --with|String for --disable or --without}
+ #
+ # For example, '{Enable|Disable} optimizations' will be formatted to
+ # 'Enable optimizations' if the options's prefix is 'enable' or 'with',
+ # and formatted to 'Disable optimizations' if the options's prefix is
+ # 'disable' or 'without'.
+ def format_help(self, option):
+ if not option.help:
+ return ""
+
+ if option.prefix in ("enable", "with"):
+ replacement = r"\1"
+ elif option.prefix in ("disable", "without"):
+ replacement = r"\2"
+ else:
+ return option.help
+
+ return self.RE_FORMAT.sub(replacement, option.help)
+
+ def usage(self, out):
+ options_by_category = defaultdict(list)
+ env_by_category = defaultdict(list)
+ for option in self.options:
+ target = options_by_category if option.name else env_by_category
+ target[option.category].append(option)
+ options_formatted = [
+ "Options: [defaults in brackets after descriptions]"
+ ] + self.format_options_by_category(options_by_category)
+ env_formatted = ["Environment variables:"] + self.format_options_by_category(
+ env_by_category
+ )
+ print(
+ "\n\n".join(
+ "\n".join(t) for t in (self.intro, options_formatted, env_formatted)
+ ),
+ file=out,
+ )
diff --git a/python/mozbuild/mozbuild/configure/lint.py b/python/mozbuild/mozbuild/configure/lint.py
new file mode 100644
index 0000000000..7ea379b1ef
--- /dev/null
+++ b/python/mozbuild/mozbuild/configure/lint.py
@@ -0,0 +1,348 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import inspect
+import re
+import types
+from dis import Bytecode
+from functools import wraps
+from io import StringIO
+
+from mozbuild.util import memoize
+
+from . import (
+ CombinedDependsFunction,
+ ConfigureError,
+ ConfigureSandbox,
+ DependsFunction,
+ SandboxDependsFunction,
+ SandboxedGlobal,
+ TrivialDependsFunction,
+)
+from .help import HelpFormatter
+
+
+class LintSandbox(ConfigureSandbox):
+ def __init__(self, environ=None, argv=None, stdout=None, stderr=None):
+ out = StringIO()
+ stdout = stdout or out
+ stderr = stderr or out
+ environ = environ or {}
+ argv = argv or []
+ self._wrapped = {}
+ self._has_imports = set()
+ self._bool_options = []
+ self._bool_func_options = []
+ self.LOG = ""
+ super(LintSandbox, self).__init__(
+ {}, environ=environ, argv=argv, stdout=stdout, stderr=stderr
+ )
+
+ def run(self, path=None):
+ if path:
+ self.include_file(path)
+
+ for dep in self._depends.values():
+ self._check_dependencies(dep)
+
+ def _raise_from(self, exception, obj, line=0):
+ """
+ Raises the given exception as if it were emitted from the given
+ location.
+
+ The location is determined from the values of obj and line.
+ - `obj` can be a function or DependsFunction, in which case
+ `line` corresponds to the line within the function the exception
+ will be raised from (as an offset from the function's firstlineno).
+ - `obj` can be a stack frame, in which case `line` is ignored.
+ """
+
+ def thrower(e):
+ raise e
+
+ if isinstance(obj, DependsFunction):
+ obj, _ = self.unwrap(obj._func)
+
+ if inspect.isfunction(obj):
+ funcname = obj.__name__
+ filename = obj.__code__.co_filename
+ firstline = obj.__code__.co_firstlineno
+ line += firstline
+ elif inspect.isframe(obj):
+ funcname = obj.f_code.co_name
+ filename = obj.f_code.co_filename
+ firstline = obj.f_code.co_firstlineno
+ line = obj.f_lineno
+ else:
+ # Don't know how to handle the given location, still raise the
+ # exception.
+ raise exception
+
+ # Create a new function from the above thrower that pretends
+ # the `def` line is on the first line of the function given as
+ # argument, and the `raise` line is on the line given as argument.
+
+ offset = line - firstline
+ # co_lnotab is a string where each pair of consecutive character is
+ # (chr(byte_increment), chr(line_increment)), mapping bytes in co_code
+ # to line numbers relative to co_firstlineno.
+ # If the offset we need to encode is larger than what fits in a 8-bit
+ # signed integer, we need to split it.
+ co_lnotab = bytes([0, 127] * (offset // 127) + [0, offset % 127])
+ code = thrower.__code__
+ codetype_args = [
+ code.co_argcount,
+ code.co_kwonlyargcount,
+ code.co_nlocals,
+ code.co_stacksize,
+ code.co_flags,
+ code.co_code,
+ code.co_consts,
+ code.co_names,
+ code.co_varnames,
+ filename,
+ funcname,
+ firstline,
+ co_lnotab,
+ ]
+ if hasattr(code, "co_posonlyargcount"):
+ # co_posonlyargcount was introduced in Python 3.8.
+ codetype_args.insert(1, code.co_posonlyargcount)
+
+ code = types.CodeType(*codetype_args)
+ thrower = types.FunctionType(
+ code,
+ thrower.__globals__,
+ funcname,
+ thrower.__defaults__,
+ thrower.__closure__,
+ )
+ thrower(exception)
+
+ def _check_dependencies(self, obj):
+ if isinstance(obj, CombinedDependsFunction) or obj in (
+ self._always,
+ self._never,
+ ):
+ return
+ if not inspect.isroutine(obj._func):
+ return
+ func, glob = self.unwrap(obj._func)
+ func_args = inspect.getfullargspec(func)
+ if func_args.varkw:
+ e = ConfigureError(
+ "Keyword arguments are not allowed in @depends functions"
+ )
+ self._raise_from(e, func)
+
+ all_args = list(func_args.args)
+ if func_args.varargs:
+ all_args.append(func_args.varargs)
+ used_args = set()
+
+ for instr in Bytecode(func):
+ if instr.opname in ("LOAD_FAST", "LOAD_CLOSURE"):
+ if instr.argval in all_args:
+ used_args.add(instr.argval)
+
+ for num, arg in enumerate(all_args):
+ if arg not in used_args:
+ dep = obj.dependencies[num]
+ if dep != self._help_option or not self._need_help_dependency(obj):
+ if isinstance(dep, DependsFunction):
+ dep = dep.name
+ else:
+ dep = dep.option
+ e = ConfigureError("The dependency on `%s` is unused" % dep)
+ self._raise_from(e, func)
+
+ def _need_help_dependency(self, obj):
+ if isinstance(obj, (CombinedDependsFunction, TrivialDependsFunction)):
+ return False
+ if isinstance(obj, DependsFunction):
+ if obj in (self._always, self._never) or not inspect.isroutine(obj._func):
+ return False
+ func, glob = self.unwrap(obj._func)
+ # We allow missing --help dependencies for functions that:
+ # - don't use @imports
+ # - don't have a closure
+ # - don't use global variables
+ if func in self._has_imports or func.__closure__:
+ return True
+ for instr in Bytecode(func):
+ if instr.opname in ("LOAD_GLOBAL", "STORE_GLOBAL"):
+ # There is a fake os module when one is not imported,
+ # and it's allowed for functions without a --help
+ # dependency.
+ if instr.argval == "os" and glob.get("os") is self.OS:
+ continue
+ if instr.argval in self.BUILTINS:
+ continue
+ if instr.argval in "namespace":
+ continue
+ return True
+ return False
+
+ def _missing_help_dependency(self, obj):
+ if isinstance(obj, DependsFunction) and self._help_option in obj.dependencies:
+ return False
+ return self._need_help_dependency(obj)
+
+ @memoize
+ def _value_for_depends(self, obj):
+ with_help = self._help_option in obj.dependencies
+ if with_help:
+ for arg in obj.dependencies:
+ if self._missing_help_dependency(arg):
+ e = ConfigureError(
+ "Missing '--help' dependency because `%s` depends on "
+ "'--help' and `%s`" % (obj.name, arg.name)
+ )
+ self._raise_from(e, arg)
+ elif self._missing_help_dependency(obj):
+ e = ConfigureError("Missing '--help' dependency")
+ self._raise_from(e, obj)
+ return super(LintSandbox, self)._value_for_depends(obj)
+
+ def option_impl(self, *args, **kwargs):
+ result = super(LintSandbox, self).option_impl(*args, **kwargs)
+ when = self._conditions.get(result)
+ if when:
+ self._value_for(when)
+
+ self._check_option(result, *args, **kwargs)
+
+ return result
+
+ def _check_option(self, option, *args, **kwargs):
+ if "default" not in kwargs:
+ return
+ if len(args) == 0:
+ return
+
+ self._check_prefix_for_bool_option(*args, **kwargs)
+ self._check_help_for_option_with_func_default(option, *args, **kwargs)
+
+ def _check_prefix_for_bool_option(self, *args, **kwargs):
+ name = args[0]
+ default = kwargs["default"]
+
+ if type(default) != bool:
+ return
+
+ table = {
+ True: {
+ "enable": "disable",
+ "with": "without",
+ },
+ False: {
+ "disable": "enable",
+ "without": "with",
+ },
+ }
+ for prefix, replacement in table[default].items():
+ if name.startswith("--{}-".format(prefix)):
+ frame = inspect.currentframe()
+ while frame and frame.f_code.co_name != self.option_impl.__name__:
+ frame = frame.f_back
+ e = ConfigureError(
+ "{} should be used instead of "
+ "{} with default={}".format(
+ name.replace(
+ "--{}-".format(prefix), "--{}-".format(replacement)
+ ),
+ name,
+ default,
+ )
+ )
+ self._raise_from(e, frame.f_back if frame else None)
+
+ def _check_help_for_option_with_func_default(self, option, *args, **kwargs):
+ default = kwargs["default"]
+
+ if not isinstance(default, SandboxDependsFunction):
+ return
+
+ if not option.prefix:
+ return
+
+ default = self._resolve(default)
+ if type(default) is str:
+ return
+
+ help = kwargs["help"]
+ match = re.search(HelpFormatter.RE_FORMAT, help)
+ if match:
+ return
+
+ if option.prefix in ("enable", "disable"):
+ rule = "{Enable|Disable}"
+ else:
+ rule = "{With|Without}"
+
+ frame = inspect.currentframe()
+ while frame and frame.f_code.co_name != self.option_impl.__name__:
+ frame = frame.f_back
+ e = ConfigureError(
+ '`help` should contain "{}" because of non-constant default'.format(rule)
+ )
+ self._raise_from(e, frame.f_back if frame else None)
+
+ def unwrap(self, func):
+ glob = func.__globals__
+ while func in self._wrapped:
+ if isinstance(func.__globals__, SandboxedGlobal):
+ glob = func.__globals__
+ func = self._wrapped[func]
+ return func, glob
+
+ def wraps(self, func):
+ def do_wraps(wrapper):
+ self._wrapped[wrapper] = func
+ return wraps(func)(wrapper)
+
+ return do_wraps
+
+ def imports_impl(self, _import, _from=None, _as=None):
+ wrapper = super(LintSandbox, self).imports_impl(_import, _from=_from, _as=_as)
+
+ def decorator(func):
+ self._has_imports.add(func)
+ return wrapper(func)
+
+ return decorator
+
+ def _prepare_function(self, func, update_globals=None):
+ wrapped = super(LintSandbox, self)._prepare_function(func, update_globals)
+ _, glob = self.unwrap(wrapped)
+ imports = set()
+ for _from, _import, _as in self._imports.get(func, ()):
+ if _as:
+ imports.add(_as)
+ else:
+ what = _import.split(".")[0]
+ imports.add(what)
+ if _from == "__builtin__" and _import in glob["__builtins__"]:
+ e = NameError(
+ "builtin '{}' doesn't need to be imported".format(_import)
+ )
+ self._raise_from(e, func)
+ for instr in Bytecode(func):
+ code = func.__code__
+ if (
+ instr.opname == "LOAD_GLOBAL"
+ and instr.argval not in glob
+ and instr.argval not in imports
+ and instr.argval not in glob["__builtins__"]
+ and instr.argval not in code.co_varnames[: code.co_argcount]
+ ):
+ # Raise the same kind of error as what would happen during
+ # execution.
+ e = NameError("global name '{}' is not defined".format(instr.argval))
+ if instr.starts_line is None:
+ self._raise_from(e, func)
+ else:
+ self._raise_from(e, func, instr.starts_line - code.co_firstlineno)
+
+ return wrapped
diff --git a/python/mozbuild/mozbuild/configure/options.py b/python/mozbuild/mozbuild/configure/options.py
new file mode 100644
index 0000000000..cc3b4516ea
--- /dev/null
+++ b/python/mozbuild/mozbuild/configure/options.py
@@ -0,0 +1,614 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import inspect
+import os
+import sys
+from collections import OrderedDict
+
+import six
+
+HELP_OPTIONS_CATEGORY = "Help options"
+# List of whitelisted option categories. If you want to add a new category,
+# simply add it to this list; however, exercise discretion as
+# "./configure --help" becomes less useful if there are an excessive number of
+# categories.
+_ALL_CATEGORIES = (HELP_OPTIONS_CATEGORY,)
+
+
+def _infer_option_category(define_depth):
+ stack_frame = inspect.stack(0)[3 + define_depth]
+ try:
+ path = os.path.relpath(stack_frame[0].f_code.co_filename)
+ except ValueError:
+ # If this call fails, it means the relative path couldn't be determined
+ # (e.g. because this file is on a different drive than the cwd on a
+ # Windows machine). That's fine, just use the absolute filename.
+ path = stack_frame[0].f_code.co_filename
+ return "Options from " + path
+
+
+def istupleofstrings(obj):
+ return (
+ isinstance(obj, tuple)
+ and len(obj)
+ and all(isinstance(o, six.string_types) for o in obj)
+ )
+
+
+class OptionValue(tuple):
+ """Represents the value of a configure option.
+
+ This class is not meant to be used directly. Use its subclasses instead.
+
+ The `origin` attribute holds where the option comes from (e.g. environment,
+ command line, or default)
+ """
+
+ def __new__(cls, values=(), origin="unknown"):
+ return super(OptionValue, cls).__new__(cls, values)
+
+ def __init__(self, values=(), origin="unknown"):
+ self.origin = origin
+
+ def format(self, option):
+ if option.startswith("--"):
+ prefix, name, values = Option.split_option(option)
+ assert values == ()
+ for prefix_set in (
+ ("disable", "enable"),
+ ("without", "with"),
+ ):
+ if prefix in prefix_set:
+ prefix = prefix_set[int(bool(self))]
+ break
+ if prefix:
+ option = "--%s-%s" % (prefix, name)
+ elif self:
+ option = "--%s" % name
+ else:
+ return ""
+ if len(self):
+ return "%s=%s" % (option, ",".join(self))
+ return option
+ elif self and not len(self):
+ return "%s=1" % option
+ return "%s=%s" % (option, ",".join(self))
+
+ def __eq__(self, other):
+ # This is to catch naive comparisons against strings and other
+ # types in moz.configure files, as it is really easy to write
+ # value == 'foo'. We only raise a TypeError for instances that
+ # have content, because value-less instances (like PositiveOptionValue
+ # and NegativeOptionValue) are common and it is trivial to
+ # compare these.
+ if not isinstance(other, tuple) and len(self):
+ raise TypeError(
+ "cannot compare a populated %s against an %s; "
+ "OptionValue instances are tuples - did you mean to "
+ "compare against member elements using [x]?"
+ % (type(other).__name__, type(self).__name__)
+ )
+
+ # Allow explicit tuples to be compared.
+ if type(other) == tuple:
+ return tuple.__eq__(self, other)
+ elif isinstance(other, bool):
+ return bool(self) == other
+ # Else we're likely an OptionValue class.
+ elif type(other) != type(self):
+ return False
+ else:
+ return super(OptionValue, self).__eq__(other)
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def __repr__(self):
+ return "%s%s" % (self.__class__.__name__, super(OptionValue, self).__repr__())
+
+ @staticmethod
+ def from_(value):
+ if isinstance(value, OptionValue):
+ return value
+ elif value is True:
+ return PositiveOptionValue()
+ elif value is False or value == ():
+ return NegativeOptionValue()
+ elif isinstance(value, six.string_types):
+ return PositiveOptionValue((value,))
+ elif isinstance(value, tuple):
+ return PositiveOptionValue(value)
+ else:
+ raise TypeError("Unexpected type: '%s'" % type(value).__name__)
+
+
+class PositiveOptionValue(OptionValue):
+ """Represents the value for a positive option (--enable/--with/--foo)
+ in the form of a tuple for when values are given to the option (in the form
+ --option=value[,value2...].
+ """
+
+ def __nonzero__(self): # py2
+ return True
+
+ def __bool__(self): # py3
+ return True
+
+
+class NegativeOptionValue(OptionValue):
+ """Represents the value for a negative option (--disable/--without)
+
+ This is effectively an empty tuple with a `origin` attribute.
+ """
+
+ def __new__(cls, origin="unknown"):
+ return super(NegativeOptionValue, cls).__new__(cls, origin=origin)
+
+ def __init__(self, origin="unknown"):
+ super(NegativeOptionValue, self).__init__(origin=origin)
+
+
+class InvalidOptionError(Exception):
+ pass
+
+
+class ConflictingOptionError(InvalidOptionError):
+ def __init__(self, message, **format_data):
+ if format_data:
+ message = message.format(**format_data)
+ super(ConflictingOptionError, self).__init__(message)
+ for k, v in six.iteritems(format_data):
+ setattr(self, k, v)
+
+
+class Option(object):
+ """Represents a configure option
+
+ A configure option can be a command line flag or an environment variable
+ or both.
+
+ - `name` is the full command line flag (e.g. --enable-foo).
+ - `env` is the environment variable name (e.g. ENV)
+ - `nargs` is the number of arguments the option may take. It can be a
+ number or the special values '?' (0 or 1), '*' (0 or more), or '+' (1 or
+ more).
+ - `default` can be used to give a default value to the option. When the
+ `name` of the option starts with '--enable-' or '--with-', the implied
+ default is an empty PositiveOptionValue. When it starts with '--disable-'
+ or '--without-', the implied default is a NegativeOptionValue.
+ - `choices` restricts the set of values that can be given to the option.
+ - `help` is the option description for use in the --help output.
+ - `possible_origins` is a tuple of strings that are origins accepted for
+ this option. Example origins are 'mozconfig', 'implied', and 'environment'.
+ - `category` is a human-readable string used only for categorizing command-
+ line options when displaying the output of `configure --help`. If not
+ supplied, the script will attempt to infer an appropriate category based
+ on the name of the file where the option was defined. If supplied it must
+ be in the _ALL_CATEGORIES list above.
+ - `define_depth` should generally only be used by templates that are used
+ to instantiate an option indirectly. Set this to a positive integer to
+ force the script to look into a deeper stack frame when inferring the
+ `category`.
+ """
+
+ __slots__ = (
+ "id",
+ "prefix",
+ "name",
+ "env",
+ "nargs",
+ "default",
+ "choices",
+ "help",
+ "possible_origins",
+ "category",
+ "define_depth",
+ )
+
+ def __init__(
+ self,
+ name=None,
+ env=None,
+ nargs=None,
+ default=None,
+ possible_origins=None,
+ choices=None,
+ category=None,
+ help=None,
+ define_depth=0,
+ ):
+ if not name and not env:
+ raise InvalidOptionError(
+ "At least an option name or an environment variable name must "
+ "be given"
+ )
+ if name:
+ if not isinstance(name, six.string_types):
+ raise InvalidOptionError("Option must be a string")
+ if not name.startswith("--"):
+ raise InvalidOptionError("Option must start with `--`")
+ if "=" in name:
+ raise InvalidOptionError("Option must not contain an `=`")
+ if not name.islower():
+ raise InvalidOptionError("Option must be all lowercase")
+ if env:
+ if not isinstance(env, six.string_types):
+ raise InvalidOptionError("Environment variable name must be a string")
+ if not env.isupper():
+ raise InvalidOptionError(
+ "Environment variable name must be all uppercase"
+ )
+ if nargs not in (None, "?", "*", "+") and not (
+ isinstance(nargs, int) and nargs >= 0
+ ):
+ raise InvalidOptionError(
+ "nargs must be a positive integer, '?', '*' or '+'"
+ )
+ if (
+ not isinstance(default, six.string_types)
+ and not isinstance(default, (bool, type(None)))
+ and not istupleofstrings(default)
+ ):
+ raise InvalidOptionError(
+ "default must be a bool, a string or a tuple of strings"
+ )
+ if choices and not istupleofstrings(choices):
+ raise InvalidOptionError("choices must be a tuple of strings")
+ if category and not isinstance(category, six.string_types):
+ raise InvalidOptionError("Category must be a string")
+ if category and category not in _ALL_CATEGORIES:
+ raise InvalidOptionError(
+ "Category must either be inferred or in the _ALL_CATEGORIES "
+ "list in options.py: %s" % ", ".join(_ALL_CATEGORIES)
+ )
+ if not isinstance(define_depth, int):
+ raise InvalidOptionError("DefineDepth must be an integer")
+ if not help:
+ raise InvalidOptionError("A help string must be provided")
+ if possible_origins and not istupleofstrings(possible_origins):
+ raise InvalidOptionError("possible_origins must be a tuple of strings")
+ self.possible_origins = possible_origins
+
+ if name:
+ prefix, name, values = self.split_option(name)
+ assert values == ()
+
+ # --disable and --without options mean the default is enabled.
+ # --enable and --with options mean the default is disabled.
+ # However, we allow a default to be given so that the default
+ # can be affected by other factors.
+ if prefix:
+ if default is None:
+ default = prefix in ("disable", "without")
+ elif default is False:
+ prefix = {
+ "disable": "enable",
+ "without": "with",
+ }.get(prefix, prefix)
+ elif default is True:
+ prefix = {
+ "enable": "disable",
+ "with": "without",
+ }.get(prefix, prefix)
+ else:
+ prefix = ""
+
+ self.prefix = prefix
+ self.name = name
+ self.env = env
+ if default in (None, False):
+ self.default = NegativeOptionValue(origin="default")
+ elif isinstance(default, tuple):
+ self.default = PositiveOptionValue(default, origin="default")
+ elif default is True:
+ self.default = PositiveOptionValue(origin="default")
+ else:
+ self.default = PositiveOptionValue((default,), origin="default")
+ if nargs is None:
+ nargs = 0
+ if len(self.default) == 1:
+ nargs = "?"
+ elif len(self.default) > 1:
+ nargs = "*"
+ elif choices:
+ nargs = 1
+ self.nargs = nargs
+ has_choices = choices is not None
+ if isinstance(self.default, PositiveOptionValue):
+ if has_choices and len(self.default) == 0:
+ raise InvalidOptionError(
+ "A `default` must be given along with `choices`"
+ )
+ if not self._validate_nargs(len(self.default)):
+ raise InvalidOptionError("The given `default` doesn't satisfy `nargs`")
+ if has_choices and not all(d in choices for d in self.default):
+ raise InvalidOptionError(
+ "The `default` value must be one of %s"
+ % ", ".join("'%s'" % c for c in choices)
+ )
+ elif has_choices:
+ maxargs = self.maxargs
+ if len(choices) < maxargs and maxargs != sys.maxsize:
+ raise InvalidOptionError("Not enough `choices` for `nargs`")
+ self.choices = choices
+ self.help = help
+ self.category = category or _infer_option_category(define_depth)
+
+ @staticmethod
+ def split_option(option):
+ """Split a flag or variable into a prefix, a name and values
+
+ Variables come in the form NAME=values (no prefix).
+ Flags come in the form --name=values or --prefix-name=values
+ where prefix is one of 'with', 'without', 'enable' or 'disable'.
+ The '=values' part is optional. Values are separated with commas.
+ """
+ if not isinstance(option, six.string_types):
+ raise InvalidOptionError("Option must be a string")
+
+ elements = option.split("=", 1)
+ name = elements[0]
+ values = tuple(elements[1].split(",")) if len(elements) == 2 else ()
+ if name.startswith("--"):
+ name = name[2:]
+ if not name.islower():
+ raise InvalidOptionError("Option must be all lowercase")
+ elements = name.split("-", 1)
+ prefix = elements[0]
+ if len(elements) == 2 and prefix in (
+ "enable",
+ "disable",
+ "with",
+ "without",
+ ):
+ return prefix, elements[1], values
+ else:
+ if name.startswith("-"):
+ raise InvalidOptionError(
+ "Option must start with two dashes instead of one"
+ )
+ if name.islower():
+ raise InvalidOptionError(
+ 'Environment variable name "%s" must be all uppercase' % name
+ )
+ return "", name, values
+
+ @staticmethod
+ def _join_option(prefix, name):
+ # The constraints around name and env in __init__ make it so that
+ # we can distinguish between flags and environment variables with
+ # islower/isupper.
+ if name.isupper():
+ assert not prefix
+ return name
+ elif prefix:
+ return "--%s-%s" % (prefix, name)
+ return "--%s" % name
+
+ @property
+ def option(self):
+ if self.prefix or self.name:
+ return self._join_option(self.prefix, self.name)
+ else:
+ return self.env
+
+ @property
+ def minargs(self):
+ if isinstance(self.nargs, int):
+ return self.nargs
+ return 1 if self.nargs == "+" else 0
+
+ @property
+ def maxargs(self):
+ if isinstance(self.nargs, int):
+ return self.nargs
+ return 1 if self.nargs == "?" else sys.maxsize
+
+ def _validate_nargs(self, num):
+ minargs, maxargs = self.minargs, self.maxargs
+ return num >= minargs and num <= maxargs
+
+ def get_value(self, option=None, origin="unknown"):
+ """Given a full command line option (e.g. --enable-foo=bar) or a
+ variable assignment (FOO=bar), returns the corresponding OptionValue.
+
+ Note: variable assignments can come from either the environment or
+ from the command line (e.g. `../configure CFLAGS=-O2`)
+ """
+ if not option:
+ return self.default
+
+ if self.possible_origins and origin not in self.possible_origins:
+ raise InvalidOptionError(
+ "%s can not be set by %s. Values are accepted from: %s"
+ % (option, origin, ", ".join(self.possible_origins))
+ )
+
+ prefix, name, values = self.split_option(option)
+ option = self._join_option(prefix, name)
+
+ assert name in (self.name, self.env)
+
+ if prefix in ("disable", "without"):
+ if values != ():
+ raise InvalidOptionError("Cannot pass a value to %s" % option)
+ return NegativeOptionValue(origin=origin)
+
+ if name == self.env:
+ if values == ("",):
+ return NegativeOptionValue(origin=origin)
+ if self.nargs in (0, "?", "*") and values == ("1",):
+ return PositiveOptionValue(origin=origin)
+
+ values = PositiveOptionValue(values, origin=origin)
+
+ if not self._validate_nargs(len(values)):
+ raise InvalidOptionError(
+ "%s takes %s value%s"
+ % (
+ option,
+ {
+ "?": "0 or 1",
+ "*": "0 or more",
+ "+": "1 or more",
+ }.get(self.nargs, str(self.nargs)),
+ "s" if (not isinstance(self.nargs, int) or self.nargs != 1) else "",
+ )
+ )
+
+ if len(values) and self.choices:
+ relative_result = None
+ for val in values:
+ if self.nargs in ("+", "*"):
+ if val.startswith(("+", "-")):
+ if relative_result is None:
+ relative_result = list(self.default)
+ sign = val[0]
+ val = val[1:]
+ if sign == "+":
+ if val not in relative_result:
+ relative_result.append(val)
+ else:
+ try:
+ relative_result.remove(val)
+ except ValueError:
+ pass
+
+ if val not in self.choices:
+ raise InvalidOptionError(
+ "'%s' is not one of %s"
+ % (val, ", ".join("'%s'" % c for c in self.choices))
+ )
+
+ if relative_result is not None:
+ values = PositiveOptionValue(relative_result, origin=origin)
+
+ return values
+
+ def __repr__(self):
+ return "<%s [%s]>" % (self.__class__.__name__, self.option)
+
+
+class CommandLineHelper(object):
+ """Helper class to handle the various ways options can be given either
+ on the command line of through the environment.
+
+ For instance, an Option('--foo', env='FOO') can be passed as --foo on the
+ command line, or as FOO=1 in the environment *or* on the command line.
+
+ If multiple variants are given, command line is prefered over the
+ environment, and if different values are given on the command line, the
+ last one wins. (This mimicks the behavior of autoconf, avoiding to break
+ existing mozconfigs using valid options in weird ways)
+
+ Extra options can be added afterwards through API calls. For those,
+ conflicting values will raise an exception.
+ """
+
+ def __init__(self, environ=os.environ, argv=sys.argv):
+ self._environ = dict(environ)
+ self._args = OrderedDict()
+ self._extra_args = OrderedDict()
+ self._origins = {}
+ self._last = 0
+
+ assert argv and not argv[0].startswith("--")
+ for arg in argv[1:]:
+ self.add(arg, "command-line", self._args)
+
+ def add(self, arg, origin="command-line", args=None):
+ assert origin != "default"
+ prefix, name, values = Option.split_option(arg)
+ if args is None:
+ args = self._extra_args
+ if args is self._extra_args and name in self._extra_args:
+ old_arg = self._extra_args[name][0]
+ old_prefix, _, old_values = Option.split_option(old_arg)
+ if prefix != old_prefix or values != old_values:
+ raise ConflictingOptionError(
+ "Cannot add '{arg}' to the {origin} set because it "
+ "conflicts with '{old_arg}' that was added earlier",
+ arg=arg,
+ origin=origin,
+ old_arg=old_arg,
+ old_origin=self._origins[old_arg],
+ )
+ self._last += 1
+ args[name] = arg, self._last
+ self._origins[arg] = origin
+
+ def _prepare(self, option, args):
+ arg = None
+ origin = "command-line"
+ from_name = args.get(option.name)
+ from_env = args.get(option.env)
+ if from_name and from_env:
+ arg1, pos1 = from_name
+ arg2, pos2 = from_env
+ arg, pos = (arg1, pos1) if abs(pos1) > abs(pos2) else (arg2, pos2)
+ if args is self._extra_args and (
+ option.get_value(arg1) != option.get_value(arg2)
+ ):
+ origin = self._origins[arg]
+ old_arg = arg2 if abs(pos1) > abs(pos2) else arg1
+ raise ConflictingOptionError(
+ "Cannot add '{arg}' to the {origin} set because it "
+ "conflicts with '{old_arg}' that was added earlier",
+ arg=arg,
+ origin=origin,
+ old_arg=old_arg,
+ old_origin=self._origins[old_arg],
+ )
+ elif from_name or from_env:
+ arg, pos = from_name if from_name else from_env
+ elif option.env and args is self._args:
+ env = self._environ.get(option.env)
+ if env is not None:
+ arg = "%s=%s" % (option.env, env)
+ origin = "environment"
+
+ origin = self._origins.get(arg, origin)
+
+ for k in (option.name, option.env):
+ try:
+ del args[k]
+ except KeyError:
+ pass
+
+ return arg, origin
+
+ def handle(self, option):
+ """Return the OptionValue corresponding to the given Option instance,
+ depending on the command line, environment, and extra arguments, and
+ the actual option or variable that set it.
+ Only works once for a given Option.
+ """
+ assert isinstance(option, Option)
+
+ arg, origin = self._prepare(option, self._args)
+ ret = option.get_value(arg, origin)
+
+ extra_arg, extra_origin = self._prepare(option, self._extra_args)
+ extra_ret = option.get_value(extra_arg, extra_origin)
+
+ if extra_ret.origin == "default":
+ return ret, arg
+
+ if ret.origin != "default" and extra_ret != ret:
+ raise ConflictingOptionError(
+ "Cannot add '{arg}' to the {origin} set because it conflicts "
+ "with {old_arg} from the {old_origin} set",
+ arg=extra_arg,
+ origin=extra_ret.origin,
+ old_arg=arg,
+ old_origin=ret.origin,
+ )
+
+ return extra_ret, extra_arg
+
+ def __iter__(self):
+ for d in (self._args, self._extra_args):
+ for arg, pos in six.itervalues(d):
+ yield arg
diff --git a/python/mozbuild/mozbuild/configure/util.py b/python/mozbuild/mozbuild/configure/util.py
new file mode 100644
index 0000000000..a58dc4d3f4
--- /dev/null
+++ b/python/mozbuild/mozbuild/configure/util.py
@@ -0,0 +1,235 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import codecs
+import io
+import itertools
+import locale
+import logging
+import os
+import sys
+from collections import deque
+from contextlib import contextmanager
+
+import six
+from looseversion import LooseVersion
+
+
+def getpreferredencoding():
+ # locale._parse_localename makes locale.getpreferredencoding
+ # return None when LC_ALL is C, instead of e.g. 'US-ASCII' or
+ # 'ANSI_X3.4-1968' when it uses nl_langinfo.
+ encoding = None
+ try:
+ encoding = locale.getpreferredencoding()
+ except ValueError:
+ # On english OSX, LC_ALL is UTF-8 (not en-US.UTF-8), and
+ # that throws off locale._parse_localename, which ends up
+ # being used on e.g. homebrew python.
+ if os.environ.get("LC_ALL", "").upper() == "UTF-8":
+ encoding = "utf-8"
+ return encoding
+
+
+class Version(LooseVersion):
+ """A simple subclass of looseversion.LooseVersion.
+ Adds attributes for `major`, `minor`, `patch` for the first three
+ version components so users can easily pull out major/minor
+ versions, like:
+
+ v = Version('1.2b')
+ v.major == 1
+ v.minor == 2
+ v.patch == 0
+ """
+
+ def __init__(self, version):
+ # Can't use super, LooseVersion's base class is not a new-style class.
+ LooseVersion.__init__(self, version)
+ # Take the first three integer components, stopping at the first
+ # non-integer and padding the rest with zeroes.
+ (self.major, self.minor, self.patch) = list(
+ itertools.chain(
+ itertools.takewhile(lambda x: isinstance(x, int), self.version),
+ (0, 0, 0),
+ )
+ )[:3]
+
+
+class ConfigureOutputHandler(logging.Handler):
+ """A logging handler class that sends info messages to stdout and other
+ messages to stderr.
+
+ Messages sent to stdout are not formatted with the attached Formatter.
+ Additionally, if they end with '... ', no newline character is printed,
+ making the next message printed follow the '... '.
+
+ Only messages above log level INFO (included) are logged.
+
+ Messages below that level can be kept until an ERROR message is received,
+ at which point the last `maxlen` accumulated messages below INFO are
+ printed out. This feature is only enabled under the `queue_debug` context
+ manager.
+ """
+
+ def __init__(self, stdout=sys.stdout, stderr=sys.stderr, maxlen=20):
+ super(ConfigureOutputHandler, self).__init__()
+
+ # Python has this feature where it sets the encoding of pipes to
+ # ascii, which blatantly fails when trying to print out non-ascii.
+ def fix_encoding(fh):
+ if six.PY3:
+ return fh
+ try:
+ isatty = fh.isatty()
+ except AttributeError:
+ isatty = True
+
+ if not isatty:
+ encoding = getpreferredencoding()
+ if encoding:
+ return codecs.getwriter(encoding)(fh)
+ return fh
+
+ self._stdout = fix_encoding(stdout)
+ self._stderr = fix_encoding(stderr) if stdout != stderr else self._stdout
+ try:
+ fd1 = self._stdout.fileno()
+ fd2 = self._stderr.fileno()
+ self._same_output = self._is_same_output(fd1, fd2)
+ except (AttributeError, io.UnsupportedOperation):
+ self._same_output = self._stdout == self._stderr
+ self._stdout_waiting = None
+ self._debug = deque(maxlen=maxlen + 1)
+ self._keep_if_debug = self.THROW
+ self._queue_is_active = False
+
+ @staticmethod
+ def _is_same_output(fd1, fd2):
+ if fd1 == fd2:
+ return True
+ stat1 = os.fstat(fd1)
+ stat2 = os.fstat(fd2)
+ return stat1.st_ino == stat2.st_ino and stat1.st_dev == stat2.st_dev
+
+ # possible values for _stdout_waiting
+ WAITING = 1
+ INTERRUPTED = 2
+
+ # possible values for _keep_if_debug
+ THROW = 0
+ KEEP = 1
+ PRINT = 2
+
+ def emit(self, record):
+ try:
+ if record.levelno == logging.INFO:
+ stream = self._stdout
+ msg = six.ensure_text(record.getMessage())
+ if self._stdout_waiting == self.INTERRUPTED and self._same_output:
+ msg = " ... %s" % msg
+ self._stdout_waiting = msg.endswith("... ")
+ if msg.endswith("... "):
+ self._stdout_waiting = self.WAITING
+ else:
+ self._stdout_waiting = None
+ msg = "%s\n" % msg
+ elif record.levelno < logging.INFO and self._keep_if_debug != self.PRINT:
+ if self._keep_if_debug == self.KEEP:
+ self._debug.append(record)
+ return
+ else:
+ if record.levelno >= logging.ERROR and len(self._debug):
+ self._emit_queue()
+
+ if self._stdout_waiting == self.WAITING and self._same_output:
+ self._stdout_waiting = self.INTERRUPTED
+ self._stdout.write("\n")
+ self._stdout.flush()
+ stream = self._stderr
+ msg = "%s\n" % self.format(record)
+ stream.write(msg)
+ stream.flush()
+ except (KeyboardInterrupt, SystemExit, IOError):
+ raise
+ except Exception:
+ self.handleError(record)
+
+ @contextmanager
+ def queue_debug(self):
+ if self._queue_is_active:
+ yield
+ return
+ self._queue_is_active = True
+ self._keep_if_debug = self.KEEP
+ try:
+ yield
+ except Exception:
+ self._emit_queue()
+ # The exception will be handled and very probably printed out by
+ # something upper in the stack.
+ raise
+ finally:
+ self._queue_is_active = False
+ self._keep_if_debug = self.THROW
+ self._debug.clear()
+
+ def _emit_queue(self):
+ self._keep_if_debug = self.PRINT
+ if len(self._debug) == self._debug.maxlen:
+ r = self._debug.popleft()
+ self.emit(
+ logging.LogRecord(
+ r.name,
+ r.levelno,
+ r.pathname,
+ r.lineno,
+ "<truncated - see config.log for full output>",
+ (),
+ None,
+ )
+ )
+ while True:
+ try:
+ self.emit(self._debug.popleft())
+ except IndexError:
+ break
+ self._keep_if_debug = self.KEEP
+
+
+class LineIO(object):
+ """File-like class that sends each line of the written data to a callback
+ (without carriage returns).
+ """
+
+ def __init__(self, callback, errors="strict"):
+ self._callback = callback
+ self._buf = ""
+ self._encoding = getpreferredencoding()
+ self._errors = errors
+
+ def write(self, buf):
+ buf = six.ensure_text(buf, encoding=self._encoding or "utf-8")
+ lines = buf.splitlines()
+ if not lines:
+ return
+ if self._buf:
+ lines[0] = self._buf + lines[0]
+ self._buf = ""
+ if not buf.endswith("\n"):
+ self._buf = lines.pop()
+
+ for line in lines:
+ self._callback(line)
+
+ def close(self):
+ if self._buf:
+ self._callback(self._buf)
+ self._buf = ""
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *args):
+ self.close()
diff --git a/python/mozbuild/mozbuild/controller/__init__.py b/python/mozbuild/mozbuild/controller/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/controller/__init__.py
diff --git a/python/mozbuild/mozbuild/controller/building.py b/python/mozbuild/mozbuild/controller/building.py
new file mode 100644
index 0000000000..de6c01afe4
--- /dev/null
+++ b/python/mozbuild/mozbuild/controller/building.py
@@ -0,0 +1,1872 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import errno
+import getpass
+import io
+import json
+import logging
+import os
+import re
+import shutil
+import subprocess
+import sys
+import time
+from collections import Counter, OrderedDict, namedtuple
+from textwrap import TextWrapper
+
+import six
+from mach.site import CommandSiteManager
+
+try:
+ import psutil
+except Exception:
+ psutil = None
+
+import mozfile
+import mozpack.path as mozpath
+from mach.mixin.logging import LoggingMixin
+from mach.util import get_state_dir
+from mozsystemmonitor.resourcemonitor import SystemResourceMonitor
+from mozterm.widgets import Footer
+
+from ..backend import get_backend_class
+from ..base import MozbuildObject
+from ..compilation.warnings import WarningsCollector, WarningsDatabase
+from ..testing import install_test_files
+from ..util import FileAvoidWrite, mkdir, resolve_target_to_make
+from .clobber import Clobberer
+
+FINDER_SLOW_MESSAGE = """
+===================
+PERFORMANCE WARNING
+
+The OS X Finder application (file indexing used by Spotlight) used a lot of CPU
+during the build - an average of %f%% (100%% is 1 core). This made your build
+slower.
+
+Consider adding ".noindex" to the end of your object directory name to have
+Finder ignore it. Or, add an indexing exclusion through the Spotlight System
+Preferences.
+===================
+""".strip()
+
+
+INSTALL_TESTS_CLOBBER = "".join(
+ [
+ TextWrapper().fill(line) + "\n"
+ for line in """
+The build system was unable to install tests because the CLOBBER file has \
+been updated. This means if you edited any test files, your changes may not \
+be picked up until a full/clobber build is performed.
+
+The easiest and fastest way to perform a clobber build is to run:
+
+ $ mach clobber
+ $ mach build
+
+If you did not modify any test files, it is safe to ignore this message \
+and proceed with running tests. To do this run:
+
+ $ touch {clobber_file}
+""".splitlines()
+ ]
+)
+
+CLOBBER_REQUESTED_MESSAGE = """
+===================
+The CLOBBER file was updated prior to this build. A clobber build may be
+required to succeed, but we weren't expecting it to.
+
+Please consider filing a bug for this failure if you have reason to believe
+this is a clobber bug and not due to local changes.
+===================
+""".strip()
+
+
+BuildOutputResult = namedtuple(
+ "BuildOutputResult", ("warning", "state_changed", "message")
+)
+
+
+class TierStatus(object):
+ """Represents the state and progress of tier traversal.
+
+ The build system is organized into linear phases called tiers. Each tier
+ executes in the order it was defined, 1 at a time.
+ """
+
+ def __init__(self, resources):
+ """Accepts a SystemResourceMonitor to record results against."""
+ self.tiers = OrderedDict()
+ self.tier_status = OrderedDict()
+ self.resources = resources
+
+ def set_tiers(self, tiers):
+ """Record the set of known tiers."""
+ for tier in tiers:
+ self.tiers[tier] = dict(
+ begin_time=None,
+ finish_time=None,
+ duration=None,
+ )
+ self.tier_status[tier] = None
+
+ def begin_tier(self, tier):
+ """Record that execution of a tier has begun."""
+ self.tier_status[tier] = "active"
+ t = self.tiers[tier]
+ t["begin_time"] = time.monotonic()
+ self.resources.begin_phase(tier)
+
+ def finish_tier(self, tier):
+ """Record that execution of a tier has finished."""
+ self.tier_status[tier] = "finished"
+ t = self.tiers[tier]
+ t["finish_time"] = time.monotonic()
+ t["duration"] = self.resources.finish_phase(tier)
+
+ def tiered_resource_usage(self):
+ """Obtains an object containing resource usage for tiers.
+
+ The returned object is suitable for serialization.
+ """
+ o = []
+
+ for tier, state in self.tiers.items():
+ t_entry = dict(
+ name=tier,
+ start=state["begin_time"],
+ end=state["finish_time"],
+ duration=state["duration"],
+ )
+
+ self.add_resources_to_dict(t_entry, phase=tier)
+
+ o.append(t_entry)
+
+ return o
+
+ def add_resources_to_dict(self, entry, start=None, end=None, phase=None):
+ """Helper function to append resource information to a dict."""
+ cpu_percent = self.resources.aggregate_cpu_percent(
+ start=start, end=end, phase=phase, per_cpu=False
+ )
+ cpu_times = self.resources.aggregate_cpu_times(
+ start=start, end=end, phase=phase, per_cpu=False
+ )
+ io = self.resources.aggregate_io(start=start, end=end, phase=phase)
+
+ if cpu_percent is None:
+ return entry
+
+ entry["cpu_percent"] = cpu_percent
+ entry["cpu_times"] = list(cpu_times)
+ entry["io"] = list(io)
+
+ return entry
+
+ def add_resource_fields_to_dict(self, d):
+ for usage in self.resources.range_usage():
+ cpu_times = self.resources.aggregate_cpu_times(per_cpu=False)
+
+ d["cpu_times_fields"] = list(cpu_times._fields)
+ d["io_fields"] = list(usage.io._fields)
+ d["virt_fields"] = list(usage.virt._fields)
+ d["swap_fields"] = list(usage.swap._fields)
+
+ return d
+
+
+class BuildMonitor(MozbuildObject):
+ """Monitors the output of the build."""
+
+ def init(self, warnings_path):
+ """Create a new monitor.
+
+ warnings_path is a path of a warnings database to use.
+ """
+ self._warnings_path = warnings_path
+ self.resources = SystemResourceMonitor(poll_interval=1.0)
+ self._resources_started = False
+
+ self.tiers = TierStatus(self.resources)
+
+ self.warnings_database = WarningsDatabase()
+ if os.path.exists(warnings_path):
+ try:
+ self.warnings_database.load_from_file(warnings_path)
+ except ValueError:
+ os.remove(warnings_path)
+
+ # Contains warnings unique to this invocation. Not populated with old
+ # warnings.
+ self.instance_warnings = WarningsDatabase()
+
+ def on_warning(warning):
+ # Skip `errors`
+ if warning["type"] == "error":
+ return
+
+ filename = warning["filename"]
+
+ if not os.path.exists(filename):
+ raise Exception("Could not find file containing warning: %s" % filename)
+
+ self.warnings_database.insert(warning)
+ # Make a copy so mutations don't impact other database.
+ self.instance_warnings.insert(warning.copy())
+
+ self._warnings_collector = WarningsCollector(on_warning, objdir=self.topobjdir)
+ self._build_tasks = []
+
+ self.build_objects = []
+ self.build_dirs = set()
+
+ def start(self):
+ """Record the start of the build."""
+ self.start_time = time.monotonic()
+ self._finder_start_cpu = self._get_finder_cpu_usage()
+
+ def start_resource_recording(self):
+ # This should be merged into start() once bug 892342 lands.
+ self.resources.start()
+ self._resources_started = True
+
+ def on_line(self, line):
+ """Consume a line of output from the build system.
+
+ This will parse the line for state and determine whether more action is
+ needed.
+
+ Returns a BuildOutputResult instance.
+
+ In this named tuple, warning will be an object describing a new parsed
+ warning. Otherwise it will be None.
+
+ state_changed indicates whether the build system changed state with
+ this line. If the build system changed state, the caller may want to
+ query this instance for the current state in order to update UI, etc.
+
+ message is either None, or the content of a message to be
+ displayed to the user.
+ """
+ message = None
+
+ if line.startswith("BUILDSTATUS"):
+ args = line.split()[1:]
+
+ action = args.pop(0)
+ update_needed = True
+
+ if action == "TIERS":
+ self.tiers.set_tiers(args)
+ update_needed = False
+ elif action == "TIER_START":
+ tier = args[0]
+ self.tiers.begin_tier(tier)
+ elif action == "TIER_FINISH":
+ (tier,) = args
+ self.tiers.finish_tier(tier)
+ elif action == "OBJECT_FILE":
+ self.build_objects.append(args[0])
+ update_needed = False
+ elif action == "BUILD_VERBOSE":
+ build_dir = args[0]
+ if build_dir not in self.build_dirs:
+ self.build_dirs.add(build_dir)
+ message = build_dir
+ update_needed = False
+ else:
+ raise Exception("Unknown build status: %s" % action)
+
+ return BuildOutputResult(None, update_needed, message)
+ elif line.startswith("BUILDTASK"):
+ _, data = line.split(maxsplit=1)
+ # Check that we can parse the JSON. Skip this line if we can't;
+ # we'll be missing data, but that's not a huge deal.
+ try:
+ json.loads(data)
+ self._build_tasks.append(data)
+ except json.decoder.JSONDecodeError:
+ pass
+ return BuildOutputResult(None, False, None)
+
+ warning = None
+
+ try:
+ warning = self._warnings_collector.process_line(line)
+ message = line
+ except Exception:
+ pass
+
+ return BuildOutputResult(warning, False, message)
+
+ def stop_resource_recording(self):
+ if self._resources_started:
+ self.resources.stop()
+
+ self._resources_started = False
+
+ def finish(self, record_usage=True):
+ """Record the end of the build."""
+ self.stop_resource_recording()
+ self.end_time = time.monotonic()
+ self._finder_end_cpu = self._get_finder_cpu_usage()
+ self.elapsed = self.end_time - self.start_time
+
+ self.warnings_database.prune()
+ self.warnings_database.save_to_file(self._warnings_path)
+
+ if "MOZ_AUTOMATION" not in os.environ:
+ build_tasks_path = self._get_state_filename("build_tasks.json")
+ with io.open(build_tasks_path, "w", encoding="utf-8", newline="\n") as fh:
+ fh.write("[")
+ first = True
+ for task in self._build_tasks:
+ # We've already verified all of these are valid JSON, so we
+ # can write the data out to the file directly.
+ fh.write("%s\n %s" % ("," if not first else "", task))
+ first = False
+ fh.write("\n]\n")
+
+ # Record usage.
+ if not record_usage:
+ return
+
+ try:
+ usage = self.get_resource_usage()
+ if not usage:
+ return
+
+ self.log_resource_usage(usage)
+ # When running on automation, we store the resource usage data in
+ # the upload path, alongside, for convenience, a copy of the HTML
+ # viewer.
+ if "MOZ_AUTOMATION" in os.environ and "UPLOAD_PATH" in os.environ:
+ build_resources_path = os.path.join(
+ os.environ["UPLOAD_PATH"], "build_resources.json"
+ )
+ shutil.copy(
+ os.path.join(
+ self.topsrcdir,
+ "python",
+ "mozbuild",
+ "mozbuild",
+ "resources",
+ "html-build-viewer",
+ "build_resources.html",
+ ),
+ os.environ["UPLOAD_PATH"],
+ )
+ else:
+ build_resources_path = self._get_state_filename("build_resources.json")
+ with io.open(
+ build_resources_path, "w", encoding="utf-8", newline="\n"
+ ) as fh:
+ to_write = six.ensure_text(
+ json.dumps(self.resources.as_dict(), indent=2)
+ )
+ fh.write(to_write)
+ except Exception as e:
+ self.log(
+ logging.WARNING,
+ "build_resources_error",
+ {"msg": str(e)},
+ "Exception when writing resource usage file: {msg}",
+ )
+
+ def _get_finder_cpu_usage(self):
+ """Obtain the CPU usage of the Finder app on OS X.
+
+ This is used to detect high CPU usage.
+ """
+ if not sys.platform.startswith("darwin"):
+ return None
+
+ if not psutil:
+ return None
+
+ for proc in psutil.process_iter():
+ if proc.name != "Finder":
+ continue
+
+ if proc.username != getpass.getuser():
+ continue
+
+ # Try to isolate system finder as opposed to other "Finder"
+ # processes.
+ if not proc.exe.endswith("CoreServices/Finder.app/Contents/MacOS/Finder"):
+ continue
+
+ return proc.get_cpu_times()
+
+ return None
+
+ def have_high_finder_usage(self):
+ """Determine whether there was high Finder CPU usage during the build.
+
+ Returns True if there was high Finder CPU usage, False if there wasn't,
+ or None if there is nothing to report.
+ """
+ if not self._finder_start_cpu:
+ return None, None
+
+ # We only measure if the measured range is sufficiently long.
+ if self.elapsed < 15:
+ return None, None
+
+ if not self._finder_end_cpu:
+ return None, None
+
+ start = self._finder_start_cpu
+ end = self._finder_end_cpu
+
+ start_total = start.user + start.system
+ end_total = end.user + end.system
+
+ cpu_seconds = end_total - start_total
+
+ # If Finder used more than 25% of 1 core during the build, report an
+ # error.
+ finder_percent = cpu_seconds / self.elapsed * 100
+
+ return finder_percent > 25, finder_percent
+
+ def have_excessive_swapping(self):
+ """Determine whether there was excessive swapping during the build.
+
+ Returns a tuple of (excessive, swap_in, swap_out). All values are None
+ if no swap information is available.
+ """
+ if not self.have_resource_usage:
+ return None, None, None
+
+ swap_in = sum(m.swap.sin for m in self.resources.measurements)
+ swap_out = sum(m.swap.sout for m in self.resources.measurements)
+
+ # The threshold of 1024 MB has been arbitrarily chosen.
+ #
+ # Choosing a proper value that is ideal for everyone is hard. We will
+ # likely iterate on the logic until people are generally satisfied.
+ # If a value is too low, the eventual warning produced does not carry
+ # much meaning. If the threshold is too high, people may not see the
+ # warning and the warning will thus be ineffective.
+ excessive = swap_in > 512 * 1048576 or swap_out > 512 * 1048576
+ return excessive, swap_in, swap_out
+
+ @property
+ def have_resource_usage(self):
+ """Whether resource usage is available."""
+ return self.resources.start_time is not None
+
+ def get_resource_usage(self):
+ """Produce a data structure containing the low-level resource usage information.
+
+ This data structure can e.g. be serialized into JSON and saved for
+ subsequent analysis.
+
+ If no resource usage is available, None is returned.
+ """
+ if not self.have_resource_usage:
+ return None
+
+ cpu_percent = self.resources.aggregate_cpu_percent(phase=None, per_cpu=False)
+ cpu_times = self.resources.aggregate_cpu_times(phase=None, per_cpu=False)
+ io = self.resources.aggregate_io(phase=None)
+
+ o = dict(
+ version=3,
+ argv=sys.argv,
+ start=self.start_time,
+ end=self.end_time,
+ duration=self.end_time - self.start_time,
+ resources=[],
+ cpu_percent=cpu_percent,
+ cpu_times=cpu_times,
+ io=io,
+ objects=self.build_objects,
+ )
+
+ o["tiers"] = self.tiers.tiered_resource_usage()
+
+ self.tiers.add_resource_fields_to_dict(o)
+
+ for usage in self.resources.range_usage():
+ cpu_percent = self.resources.aggregate_cpu_percent(
+ usage.start, usage.end, per_cpu=False
+ )
+ cpu_times = self.resources.aggregate_cpu_times(
+ usage.start, usage.end, per_cpu=False
+ )
+
+ entry = dict(
+ start=usage.start,
+ end=usage.end,
+ virt=list(usage.virt),
+ swap=list(usage.swap),
+ )
+
+ self.tiers.add_resources_to_dict(entry, start=usage.start, end=usage.end)
+
+ o["resources"].append(entry)
+
+ # If the imports for this file ran before the in-tree virtualenv
+ # was bootstrapped (for instance, for a clobber build in automation),
+ # psutil might not be available.
+ #
+ # Treat psutil as optional to avoid an outright failure to log resources
+ # TODO: it would be nice to collect data on the storage device as well
+ # in this case.
+ o["system"] = {}
+ if psutil:
+ o["system"].update(
+ dict(
+ logical_cpu_count=psutil.cpu_count(),
+ physical_cpu_count=psutil.cpu_count(logical=False),
+ swap_total=psutil.swap_memory()[0],
+ vmem_total=psutil.virtual_memory()[0],
+ )
+ )
+
+ return o
+
+ def log_resource_usage(self, usage):
+ """Summarize the resource usage of this build in a log message."""
+
+ if not usage:
+ return
+
+ params = dict(
+ duration=self.end_time - self.start_time,
+ cpu_percent=usage["cpu_percent"],
+ io_read_bytes=usage["io"].read_bytes,
+ io_write_bytes=usage["io"].write_bytes,
+ io_read_time=usage["io"].read_time,
+ io_write_time=usage["io"].write_time,
+ )
+
+ message = (
+ "Overall system resources - Wall time: {duration:.0f}s; "
+ "CPU: {cpu_percent:.0f}%; "
+ "Read bytes: {io_read_bytes}; Write bytes: {io_write_bytes}; "
+ "Read time: {io_read_time}; Write time: {io_write_time}"
+ )
+
+ self.log(logging.WARNING, "resource_usage", params, message)
+
+ excessive, sin, sout = self.have_excessive_swapping()
+ if excessive is not None and (sin or sout):
+ sin /= 1048576
+ sout /= 1048576
+ self.log(
+ logging.WARNING,
+ "swap_activity",
+ {"sin": sin, "sout": sout},
+ "Swap in/out (MB): {sin}/{sout}",
+ )
+
+ def ccache_stats(self, ccache=None):
+ ccache_stats = None
+
+ if ccache is None:
+ ccache = mozfile.which("ccache")
+ if ccache:
+ # With CCache v3.7+ we can use --print-stats
+ has_machine_format = CCacheStats.check_version_3_7_or_newer(ccache)
+ try:
+ output = subprocess.check_output(
+ [ccache, "--print-stats" if has_machine_format else "-s"],
+ universal_newlines=True,
+ )
+ ccache_stats = CCacheStats(output, has_machine_format)
+ except ValueError as e:
+ self.log(logging.WARNING, "ccache", {"msg": str(e)}, "{msg}")
+ return ccache_stats
+
+
+class TerminalLoggingHandler(logging.Handler):
+ """Custom logging handler that works with terminal window dressing.
+
+ This class should probably live elsewhere, like the mach core. Consider
+ this a proving ground for its usefulness.
+ """
+
+ def __init__(self):
+ logging.Handler.__init__(self)
+
+ self.fh = sys.stdout
+ self.footer = None
+
+ def flush(self):
+ self.acquire()
+
+ try:
+ self.fh.flush()
+ finally:
+ self.release()
+
+ def emit(self, record):
+ msg = self.format(record)
+
+ self.acquire()
+
+ try:
+ if self.footer:
+ self.footer.clear()
+
+ self.fh.write(msg)
+ self.fh.write("\n")
+
+ if self.footer:
+ self.footer.draw()
+
+ # If we don't flush, the footer may not get drawn.
+ self.fh.flush()
+ finally:
+ self.release()
+
+
+class BuildProgressFooter(Footer):
+ """Handles display of a build progress indicator in a terminal.
+
+ When mach builds inside a blessed-supported terminal, it will render
+ progress information collected from a BuildMonitor. This class converts the
+ state of BuildMonitor into terminal output.
+ """
+
+ def __init__(self, terminal, monitor):
+ Footer.__init__(self, terminal)
+ self.tiers = six.viewitems(monitor.tiers.tier_status)
+
+ def draw(self):
+ """Draws this footer in the terminal."""
+
+ if not self.tiers:
+ return
+
+ # The drawn terminal looks something like:
+ # TIER: static export libs tools
+
+ parts = [("bold", "TIER:")]
+ append = parts.append
+ for tier, status in self.tiers:
+ if status is None:
+ append(tier)
+ elif status == "finished":
+ append(("green", tier))
+ else:
+ append(("underline_yellow", tier))
+
+ self.write(parts)
+
+
+class OutputManager(LoggingMixin):
+ """Handles writing job output to a terminal or log."""
+
+ def __init__(self, log_manager, footer):
+ self.populate_logger()
+
+ self.footer = None
+ terminal = log_manager.terminal
+
+ # TODO convert terminal footer to config file setting.
+ if not terminal:
+ return
+ if os.environ.get("INSIDE_EMACS", None):
+ return
+
+ if os.environ.get("MACH_NO_TERMINAL_FOOTER", None):
+ footer = None
+
+ self.t = terminal
+ self.footer = footer
+
+ self._handler = TerminalLoggingHandler()
+ self._handler.setFormatter(log_manager.terminal_formatter)
+ self._handler.footer = self.footer
+
+ old = log_manager.replace_terminal_handler(self._handler)
+ self._handler.level = old.level
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ if self.footer:
+ self.footer.clear()
+ # Prevents the footer from being redrawn if logging occurs.
+ self._handler.footer = None
+
+ def write_line(self, line):
+ if self.footer:
+ self.footer.clear()
+
+ print(line)
+
+ if self.footer:
+ self.footer.draw()
+
+ def refresh(self):
+ if not self.footer:
+ return
+
+ self.footer.clear()
+ self.footer.draw()
+
+
+class BuildOutputManager(OutputManager):
+ """Handles writing build output to a terminal, to logs, etc."""
+
+ def __init__(self, log_manager, monitor, footer):
+ self.monitor = monitor
+ OutputManager.__init__(self, log_manager, footer)
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ OutputManager.__exit__(self, exc_type, exc_value, traceback)
+
+ # Ensure the resource monitor is stopped because leaving it running
+ # could result in the process hanging on exit because the resource
+ # collection child process hasn't been told to stop.
+ self.monitor.stop_resource_recording()
+
+ def on_line(self, line):
+ warning, state_changed, message = self.monitor.on_line(line)
+
+ if message:
+ self.log(logging.INFO, "build_output", {"line": message}, "{line}")
+ elif state_changed:
+ have_handler = hasattr(self, "handler")
+ if have_handler:
+ self.handler.acquire()
+ try:
+ self.refresh()
+ finally:
+ if have_handler:
+ self.handler.release()
+
+
+class StaticAnalysisFooter(Footer):
+ """Handles display of a static analysis progress indicator in a terminal."""
+
+ def __init__(self, terminal, monitor):
+ Footer.__init__(self, terminal)
+ self.monitor = monitor
+
+ def draw(self):
+ """Draws this footer in the terminal."""
+
+ monitor = self.monitor
+ total = monitor.num_files
+ processed = monitor.num_files_processed
+ percent = "(%.2f%%)" % (processed * 100.0 / total)
+ parts = [
+ ("bright_black", "Processing"),
+ ("yellow", str(processed)),
+ ("bright_black", "of"),
+ ("yellow", str(total)),
+ ("bright_black", "files"),
+ ("green", percent),
+ ]
+ if monitor.current_file:
+ parts.append(("bold", monitor.current_file))
+
+ self.write(parts)
+
+
+class StaticAnalysisOutputManager(OutputManager):
+ """Handles writing static analysis output to a terminal or file."""
+
+ def __init__(self, log_manager, monitor, footer):
+ self.monitor = monitor
+ self.raw = ""
+ OutputManager.__init__(self, log_manager, footer)
+
+ def on_line(self, line):
+ warning, relevant = self.monitor.on_line(line)
+ if relevant:
+ self.raw += line + "\n"
+
+ if warning:
+ self.log(
+ logging.INFO,
+ "compiler_warning",
+ warning,
+ "Warning: {flag} in {filename}: {message}",
+ )
+
+ if relevant:
+ self.log(logging.INFO, "build_output", {"line": line}, "{line}")
+ else:
+ have_handler = hasattr(self, "handler")
+ if have_handler:
+ self.handler.acquire()
+ try:
+ self.refresh()
+ finally:
+ if have_handler:
+ self.handler.release()
+
+ def write(self, path, output_format):
+ assert output_format in ("text", "json"), "Invalid output format {}".format(
+ output_format
+ )
+ path = os.path.realpath(path)
+
+ if output_format == "json":
+ self.monitor._warnings_database.save_to_file(path)
+
+ else:
+ with io.open(path, "w", encoding="utf-8", newline="\n") as f:
+ f.write(self.raw)
+
+ self.log(
+ logging.INFO,
+ "write_output",
+ {"path": path, "format": output_format},
+ "Wrote {format} output in {path}",
+ )
+
+
+class CCacheStats(object):
+ """Holds statistics from ccache.
+
+ Instances can be subtracted from each other to obtain differences.
+ print() or str() the object to show a ``ccache -s`` like output
+ of the captured stats.
+
+ """
+
+ STATS_KEYS = [
+ # (key, description)
+ # Refer to stats.c in ccache project for all the descriptions.
+ ("stats_zeroed", ("stats zeroed", "stats zero time")),
+ ("stats_updated", "stats updated"),
+ ("cache_hit_direct", "cache hit (direct)"),
+ ("cache_hit_preprocessed", "cache hit (preprocessed)"),
+ ("cache_hit_rate", "cache hit rate"),
+ ("cache_miss", "cache miss"),
+ ("link", "called for link"),
+ ("preprocessing", "called for preprocessing"),
+ ("multiple", "multiple source files"),
+ ("stdout", "compiler produced stdout"),
+ ("no_output", "compiler produced no output"),
+ ("empty_output", "compiler produced empty output"),
+ ("failed", "compile failed"),
+ ("error", "ccache internal error"),
+ ("preprocessor_error", "preprocessor error"),
+ ("cant_use_pch", "can't use precompiled header"),
+ ("compiler_missing", "couldn't find the compiler"),
+ ("cache_file_missing", "cache file missing"),
+ ("bad_args", "bad compiler arguments"),
+ ("unsupported_lang", "unsupported source language"),
+ ("compiler_check_failed", "compiler check failed"),
+ ("autoconf", "autoconf compile/link"),
+ ("unsupported_code_directive", "unsupported code directive"),
+ ("unsupported_compiler_option", "unsupported compiler option"),
+ ("out_stdout", "output to stdout"),
+ ("out_device", "output to a non-regular file"),
+ ("no_input", "no input file"),
+ ("bad_extra_file", "error hashing extra file"),
+ ("num_cleanups", "cleanups performed"),
+ ("cache_files", "files in cache"),
+ ("cache_size", "cache size"),
+ ("cache_max_size", "max cache size"),
+ ]
+
+ SKIP_LINES = (
+ "cache directory",
+ "primary config",
+ "secondary config",
+ )
+
+ STATS_KEYS_3_7_PLUS = {
+ "stats_zeroed_timestamp": "stats_zeroed",
+ "stats_updated_timestamp": "stats_updated",
+ "direct_cache_hit": "cache_hit_direct",
+ "preprocessed_cache_hit": "cache_hit_preprocessed",
+ # "cache_hit_rate" is not provided
+ "cache_miss": "cache_miss",
+ "called_for_link": "link",
+ "called_for_preprocessing": "preprocessing",
+ "multiple_source_files": "multiple",
+ "compiler_produced_stdout": "stdout",
+ "compiler_produced_no_output": "no_output",
+ "compiler_produced_empty_output": "empty_output",
+ "compile_failed": "failed",
+ "internal_error": "error",
+ "preprocessor_error": "preprocessor_error",
+ "could_not_use_precompiled_header": "cant_use_pch",
+ "could_not_find_compiler": "compiler_missing",
+ "missing_cache_file": "cache_file_missing",
+ "bad_compiler_arguments": "bad_args",
+ "unsupported_source_language": "unsupported_lang",
+ "compiler_check_failed": "compiler_check_failed",
+ "autoconf_test": "autoconf",
+ "unsupported_code_directive": "unsupported_code_directive",
+ "unsupported_compiler_option": "unsupported_compiler_option",
+ "output_to_stdout": "out_stdout",
+ "output_to_a_non_file": "out_device",
+ "no_input_file": "no_input",
+ "error_hashing_extra_file": "bad_extra_file",
+ "cleanups_performed": "num_cleanups",
+ "files_in_cache": "cache_files",
+ "cache_size_kibibyte": "cache_size",
+ # "cache_max_size" is obsolete and not printed anymore
+ }
+
+ ABSOLUTE_KEYS = {"cache_files", "cache_size", "cache_max_size"}
+ FORMAT_KEYS = {"cache_size", "cache_max_size"}
+
+ GiB = 1024 ** 3
+ MiB = 1024 ** 2
+ KiB = 1024
+
+ def __init__(self, output=None, has_machine_format=False):
+ """Construct an instance from the output of ccache -s."""
+ self._values = {}
+
+ if not output:
+ return
+
+ if has_machine_format:
+ self._parse_machine_format(output)
+ else:
+ self._parse_human_format(output)
+
+ def _parse_machine_format(self, output):
+ for line in output.splitlines():
+ line = line.strip()
+ key, _, value = line.partition("\t")
+ stat_key = self.STATS_KEYS_3_7_PLUS.get(key)
+ if stat_key:
+ value = int(value)
+ if key.endswith("_kibibyte"):
+ value *= 1024
+ self._values[stat_key] = value
+
+ (direct, preprocessed, miss) = self.hit_rates()
+ self._values["cache_hit_rate"] = (direct + preprocessed) * 100
+
+ def _parse_human_format(self, output):
+ for line in output.splitlines():
+ line = line.strip()
+ if line:
+ self._parse_line(line)
+
+ def _parse_line(self, line):
+ line = six.ensure_text(line)
+ for stat_key, stat_description in self.STATS_KEYS:
+ if line.startswith(stat_description):
+ raw_value = self._strip_prefix(line, stat_description)
+ self._values[stat_key] = self._parse_value(raw_value)
+ break
+ else:
+ if not line.startswith(self.SKIP_LINES):
+ raise ValueError("Failed to parse ccache stats output: %s" % line)
+
+ @staticmethod
+ def _strip_prefix(line, prefix):
+ if isinstance(prefix, tuple):
+ for p in prefix:
+ line = CCacheStats._strip_prefix(line, p)
+ return line
+ return line[len(prefix) :].strip() if line.startswith(prefix) else line
+
+ @staticmethod
+ def _parse_value(raw_value):
+ try:
+ # ccache calls strftime with '%c' (src/stats.c)
+ ts = time.strptime(raw_value, "%c")
+ return int(time.mktime(ts))
+ except ValueError:
+ if raw_value == "never":
+ return 0
+ pass
+
+ value = raw_value.split()
+ unit = ""
+ if len(value) == 1:
+ numeric = value[0]
+ elif len(value) == 2:
+ numeric, unit = value
+ else:
+ raise ValueError("Failed to parse ccache stats value: %s" % raw_value)
+
+ if "." in numeric:
+ numeric = float(numeric)
+ else:
+ numeric = int(numeric)
+
+ if unit in ("GB", "Gbytes"):
+ unit = CCacheStats.GiB
+ elif unit in ("MB", "Mbytes"):
+ unit = CCacheStats.MiB
+ elif unit in ("KB", "Kbytes"):
+ unit = CCacheStats.KiB
+ else:
+ unit = 1
+
+ return int(numeric * unit)
+
+ def hit_rate_message(self):
+ return (
+ "ccache (direct) hit rate: {:.1%}; (preprocessed) hit rate: {:.1%};"
+ " miss rate: {:.1%}".format(*self.hit_rates())
+ )
+
+ def hit_rates(self):
+ direct = self._values["cache_hit_direct"]
+ preprocessed = self._values["cache_hit_preprocessed"]
+ miss = self._values["cache_miss"]
+ total = float(direct + preprocessed + miss)
+
+ if total > 0:
+ direct /= total
+ preprocessed /= total
+ miss /= total
+
+ return (direct, preprocessed, miss)
+
+ def __sub__(self, other):
+ result = CCacheStats()
+
+ for k, prefix in self.STATS_KEYS:
+ if k not in self._values and k not in other._values:
+ continue
+
+ our_value = self._values.get(k, 0)
+ other_value = other._values.get(k, 0)
+
+ if k in self.ABSOLUTE_KEYS:
+ result._values[k] = our_value
+ else:
+ result._values[k] = our_value - other_value
+
+ return result
+
+ def __str__(self):
+ LEFT_ALIGN = 34
+ lines = []
+
+ for stat_key, stat_description in self.STATS_KEYS:
+ if stat_key not in self._values:
+ continue
+
+ value = self._values[stat_key]
+
+ if stat_key in self.FORMAT_KEYS:
+ value = "%15s" % self._format_value(value)
+ else:
+ value = "%8u" % value
+
+ if isinstance(stat_description, tuple):
+ stat_description = stat_description[0]
+
+ lines.append("%s%s" % (stat_description.ljust(LEFT_ALIGN), value))
+
+ return "\n".join(lines)
+
+ def __nonzero__(self):
+ relative_values = [
+ v for k, v in self._values.items() if k not in self.ABSOLUTE_KEYS
+ ]
+ return all(v >= 0 for v in relative_values) and any(
+ v > 0 for v in relative_values
+ )
+
+ def __bool__(self):
+ return self.__nonzero__()
+
+ @staticmethod
+ def _format_value(v):
+ if v > CCacheStats.GiB:
+ return "%.1f Gbytes" % (float(v) / CCacheStats.GiB)
+ elif v > CCacheStats.MiB:
+ return "%.1f Mbytes" % (float(v) / CCacheStats.MiB)
+ else:
+ return "%.1f Kbytes" % (float(v) / CCacheStats.KiB)
+
+ @staticmethod
+ def check_version_3_7_or_newer(ccache):
+ output_version = subprocess.check_output(
+ [ccache, "--version"], universal_newlines=True
+ )
+ return CCacheStats._is_version_3_7_or_newer(output_version)
+
+ @staticmethod
+ def _is_version_3_7_or_newer(output):
+ if "ccache version" not in output:
+ return False
+
+ major = 0
+ minor = 0
+
+ for line in output.splitlines():
+ version = re.search(r"ccache version (\d+).(\d+).*", line)
+ if version:
+ major = int(version.group(1))
+ minor = int(version.group(2))
+ break
+
+ return ((major << 8) + minor) >= ((3 << 8) + 7)
+
+
+class BuildDriver(MozbuildObject):
+ """Provides a high-level API for build actions."""
+
+ def __init__(self, *args, **kwargs):
+ MozbuildObject.__init__(self, *args, virtualenv_name="build", **kwargs)
+ self.metrics = None
+ self.mach_context = None
+
+ def build(
+ self,
+ metrics,
+ what=None,
+ jobs=0,
+ job_size=0,
+ directory=None,
+ verbose=False,
+ keep_going=False,
+ mach_context=None,
+ append_env=None,
+ virtualenv_topobjdir=None,
+ ):
+ """Invoke the build backend.
+
+ ``what`` defines the thing to build. If not defined, the default
+ target is used.
+ """
+ self.metrics = metrics
+ self.mach_context = mach_context
+ warnings_path = self._get_state_filename("warnings.json")
+ monitor = self._spawn(BuildMonitor)
+ monitor.init(warnings_path)
+ footer = BuildProgressFooter(self.log_manager.terminal, monitor)
+
+ # Disable indexing in objdir because it is not necessary and can slow
+ # down builds.
+ mkdir(self.topobjdir, not_indexed=True)
+
+ with BuildOutputManager(self.log_manager, monitor, footer) as output:
+ monitor.start()
+
+ if directory is not None and not what:
+ print("Can only use -C/--directory with an explicit target " "name.")
+ return 1
+
+ if directory is not None:
+ directory = mozpath.normsep(directory)
+ if directory.startswith("/"):
+ directory = directory[1:]
+
+ monitor.start_resource_recording()
+
+ if self._check_clobber(self.mozconfig, os.environ):
+ return 1
+
+ self.mach_context.command_attrs["clobber"] = False
+ self.metrics.mozbuild.clobber.set(False)
+ config = None
+ try:
+ config = self.config_environment
+ except Exception:
+ # If we don't already have a config environment this is either
+ # a fresh objdir or $OBJDIR/config.status has been removed for
+ # some reason, which indicates a clobber of sorts.
+ self.mach_context.command_attrs["clobber"] = True
+ self.metrics.mozbuild.clobber.set(True)
+
+ # Record whether a clobber was requested so we can print
+ # a special message later if the build fails.
+ clobber_requested = False
+
+ # Write out any changes to the current mozconfig in case
+ # they should invalidate configure.
+ self._write_mozconfig_json()
+
+ previous_backend = None
+ if config is not None:
+ previous_backend = config.substs.get("BUILD_BACKENDS", [None])[0]
+
+ config_rc = None
+ # Even if we have a config object, it may be out of date
+ # if something that influences its result has changed.
+ if config is None or self.build_out_of_date(
+ mozpath.join(self.topobjdir, "config.status"),
+ mozpath.join(self.topobjdir, "config_status_deps.in"),
+ ):
+ if previous_backend and "Make" not in previous_backend:
+ clobber_requested = self._clobber_configure()
+
+ if config is None:
+ print(" Config object not found by mach.")
+
+ config_rc = self.configure(
+ metrics,
+ buildstatus_messages=True,
+ line_handler=output.on_line,
+ append_env=append_env,
+ virtualenv_topobjdir=virtualenv_topobjdir,
+ )
+
+ if config_rc != 0:
+ return config_rc
+
+ config = self.reload_config_environment()
+
+ if config.substs.get("MOZ_USING_CCACHE"):
+ ccache = config.substs.get("CCACHE")
+ ccache_start = monitor.ccache_stats(ccache)
+ else:
+ ccache_start = None
+
+ # Collect glean metrics
+ substs = config.substs
+ mozbuild_metrics = metrics.mozbuild
+ mozbuild_metrics.compiler.set(substs.get("CC_TYPE", None))
+
+ def get_substs_flag(name):
+ return bool(substs.get(name, None))
+
+ mozbuild_metrics.artifact.set(get_substs_flag("MOZ_ARTIFACT_BUILDS"))
+ mozbuild_metrics.debug.set(get_substs_flag("MOZ_DEBUG"))
+ mozbuild_metrics.opt.set(get_substs_flag("MOZ_OPTIMIZE"))
+ mozbuild_metrics.ccache.set(get_substs_flag("CCACHE"))
+ using_sccache = get_substs_flag("MOZ_USING_SCCACHE")
+ mozbuild_metrics.sccache.set(using_sccache)
+ mozbuild_metrics.icecream.set(get_substs_flag("CXX_IS_ICECREAM"))
+ mozbuild_metrics.project.set(substs.get("MOZ_BUILD_APP", ""))
+
+ all_backends = config.substs.get("BUILD_BACKENDS", [None])
+ active_backend = all_backends[0]
+
+ status = None
+
+ if not config_rc and any(
+ [
+ self.backend_out_of_date(
+ mozpath.join(self.topobjdir, "backend.%sBackend" % backend)
+ )
+ for backend in all_backends
+ ]
+ ):
+ print("Build configuration changed. Regenerating backend.")
+ args = [
+ config.substs["PYTHON3"],
+ mozpath.join(self.topobjdir, "config.status"),
+ ]
+ self.run_process(args, cwd=self.topobjdir, pass_thru=True)
+
+ if jobs == 0:
+ for param in self.mozconfig.get("make_extra") or []:
+ key, value = param.split("=", 1)
+ if key == "MOZ_PARALLEL_BUILD":
+ jobs = int(value)
+
+ if "Make" not in active_backend:
+ backend_cls = get_backend_class(active_backend)(config)
+ status = backend_cls.build(self, output, jobs, verbose, what)
+
+ if status and clobber_requested:
+ for line in CLOBBER_REQUESTED_MESSAGE.splitlines():
+ self.log(
+ logging.WARNING, "clobber", {"msg": line.rstrip()}, "{msg}"
+ )
+
+ if what and status is None:
+ # Collect target pairs.
+ target_pairs = []
+ for target in what:
+ path_arg = self._wrap_path_argument(target)
+
+ if directory is not None:
+ make_dir = os.path.join(self.topobjdir, directory)
+ make_target = target
+ else:
+ make_dir, make_target = resolve_target_to_make(
+ self.topobjdir, path_arg.relpath()
+ )
+
+ if make_dir is None and make_target is None:
+ return 1
+
+ if config.is_artifact_build and target.startswith("installers-"):
+ # See https://bugzilla.mozilla.org/show_bug.cgi?id=1387485
+ print(
+ "Localized Builds are not supported with Artifact Builds enabled.\n"
+ "You should disable Artifact Builds (Use --disable-compile-environment "
+ "in your mozconfig instead) then re-build to proceed."
+ )
+ return 1
+
+ # See bug 886162 - we don't want to "accidentally" build
+ # the entire tree (if that's really the intent, it's
+ # unlikely they would have specified a directory.)
+ if not make_dir and not make_target:
+ print(
+ "The specified directory doesn't contain a "
+ "Makefile and the first parent with one is the "
+ "root of the tree. Please specify a directory "
+ "with a Makefile or run |mach build| if you "
+ "want to build the entire tree."
+ )
+ return 1
+
+ target_pairs.append((make_dir, make_target))
+
+ # Build target pairs.
+ for make_dir, make_target in target_pairs:
+ # We don't display build status messages during partial
+ # tree builds because they aren't reliable there. This
+ # could potentially be fixed if the build monitor were more
+ # intelligent about encountering undefined state.
+ no_build_status = "1" if make_dir is not None else ""
+ tgt_env = dict(append_env or {})
+ tgt_env["NO_BUILDSTATUS_MESSAGES"] = no_build_status
+ status = self._run_make(
+ directory=make_dir,
+ target=make_target,
+ line_handler=output.on_line,
+ log=False,
+ print_directory=False,
+ ensure_exit_code=False,
+ num_jobs=jobs,
+ job_size=job_size,
+ silent=not verbose,
+ append_env=tgt_env,
+ keep_going=keep_going,
+ )
+
+ if status != 0:
+ break
+
+ elif status is None:
+ # If the backend doesn't specify a build() method, then just
+ # call client.mk directly.
+ status = self._run_client_mk(
+ line_handler=output.on_line,
+ jobs=jobs,
+ job_size=job_size,
+ verbose=verbose,
+ keep_going=keep_going,
+ append_env=append_env,
+ )
+
+ self.log(
+ logging.WARNING,
+ "warning_summary",
+ {"count": len(monitor.warnings_database)},
+ "{count} compiler warnings present.",
+ )
+
+ # Try to run the active build backend's post-build step, if possible.
+ try:
+ active_backend = config.substs.get("BUILD_BACKENDS", [None])[0]
+ if active_backend:
+ backend_cls = get_backend_class(active_backend)(config)
+ new_status = backend_cls.post_build(
+ self, output, jobs, verbose, status
+ )
+ status = new_status
+ except Exception as ex:
+ self.log(
+ logging.DEBUG,
+ "post_build",
+ {"ex": str(ex)},
+ "Unable to run active build backend's post-build step; "
+ + "failing the build due to exception: {ex}.",
+ )
+ if not status:
+ # If the underlying build provided a failing status, pass
+ # it through; otherwise, fail.
+ status = 1
+
+ record_usage = status == 0
+
+ # On automation, only record usage for plain `mach build`
+ if "MOZ_AUTOMATION" in os.environ and what:
+ record_usage = False
+
+ monitor.finish(record_usage=record_usage)
+
+ if status == 0:
+ usage = monitor.get_resource_usage()
+ if usage:
+ self.mach_context.command_attrs["usage"] = usage
+
+ # Print the collected compiler warnings. This is redundant with
+ # inline output from the compiler itself. However, unlike inline
+ # output, this list is sorted and grouped by file, making it
+ # easier to triage output.
+ #
+ # Only do this if we had a successful build. If the build failed,
+ # there are more important things in the log to look for than
+ # whatever code we warned about.
+ if not status:
+ # Suppress warnings for 3rd party projects in local builds
+ # until we suppress them for real.
+ # TODO remove entries/feature once we stop generating warnings
+ # in these directories.
+ pathToThirdparty = os.path.join(
+ self.topsrcdir, "tools", "rewriting", "ThirdPartyPaths.txt"
+ )
+
+ pathToGenerated = os.path.join(
+ self.topsrcdir, "tools", "rewriting", "Generated.txt"
+ )
+
+ if os.path.exists(pathToThirdparty):
+ with io.open(
+ pathToThirdparty, encoding="utf-8", newline="\n"
+ ) as f, io.open(pathToGenerated, encoding="utf-8", newline="\n") as g:
+ # Normalize the path (no trailing /)
+ LOCAL_SUPPRESS_DIRS = tuple(
+ [line.strip("\n/") for line in f]
+ + [line.strip("\n/") for line in g]
+ )
+ else:
+ # For application based on gecko like thunderbird
+ LOCAL_SUPPRESS_DIRS = ()
+
+ suppressed_by_dir = Counter()
+
+ THIRD_PARTY_CODE = "third-party code"
+ suppressed = set(
+ w.replace("-Wno-error=", "-W")
+ for w in substs.get("WARNINGS_CFLAGS", [])
+ + substs.get("WARNINGS_CXXFLAGS", [])
+ if w.startswith("-Wno-error=")
+ )
+ warnings = []
+ for warning in sorted(monitor.instance_warnings):
+ path = mozpath.normsep(warning["filename"])
+ if path.startswith(self.topsrcdir):
+ path = path[len(self.topsrcdir) + 1 :]
+
+ warning["normpath"] = path
+
+ if "MOZ_AUTOMATION" not in os.environ:
+ if path.startswith(LOCAL_SUPPRESS_DIRS):
+ suppressed_by_dir[THIRD_PARTY_CODE] += 1
+ continue
+
+ if warning["flag"] in suppressed:
+ suppressed_by_dir[os.path.dirname(path)] += 1
+ continue
+
+ warnings.append(warning)
+
+ if THIRD_PARTY_CODE in suppressed_by_dir:
+ suppressed_third_party_code = [
+ (THIRD_PARTY_CODE, suppressed_by_dir.pop(THIRD_PARTY_CODE))
+ ]
+ else:
+ suppressed_third_party_code = []
+ for d, count in suppressed_third_party_code + sorted(
+ suppressed_by_dir.items()
+ ):
+ self.log(
+ logging.WARNING,
+ "suppressed_warning",
+ {"dir": d, "count": count},
+ "(suppressed {count} warnings in {dir})",
+ )
+
+ for warning in warnings:
+ if warning["column"] is not None:
+ self.log(
+ logging.WARNING,
+ "compiler_warning",
+ warning,
+ "warning: {normpath}:{line}:{column} [{flag}] " "{message}",
+ )
+ else:
+ self.log(
+ logging.WARNING,
+ "compiler_warning",
+ warning,
+ "warning: {normpath}:{line} [{flag}] {message}",
+ )
+
+ high_finder, finder_percent = monitor.have_high_finder_usage()
+ if high_finder:
+ print(FINDER_SLOW_MESSAGE % finder_percent)
+
+ if config.substs.get("MOZ_USING_CCACHE"):
+ ccache_end = monitor.ccache_stats(ccache)
+ else:
+ ccache_end = None
+
+ ccache_diff = None
+ if ccache_start and ccache_end:
+ ccache_diff = ccache_end - ccache_start
+ if ccache_diff:
+ self.log(
+ logging.INFO,
+ "ccache",
+ {"msg": ccache_diff.hit_rate_message()},
+ "{msg}",
+ )
+
+ notify_minimum_time = 300
+ try:
+ notify_minimum_time = int(os.environ.get("MACH_NOTIFY_MINTIME", "300"))
+ except ValueError:
+ # Just stick with the default
+ pass
+
+ if monitor.elapsed > notify_minimum_time:
+ # Display a notification when the build completes.
+ self.notify("Build complete" if not status else "Build failed")
+
+ if status:
+ if what and any(
+ [target for target in what if target not in ("faster", "binaries")]
+ ):
+ print(
+ "Hey! Builds initiated with `mach build "
+ "$A_SPECIFIC_TARGET` may not always work, even if the "
+ "code being built is correct. Consider doing a bare "
+ "`mach build` instead."
+ )
+ return status
+
+ if monitor.have_resource_usage:
+ excessive, swap_in, swap_out = monitor.have_excessive_swapping()
+ # if excessive:
+ # print(EXCESSIVE_SWAP_MESSAGE)
+
+ print("To view resource usage of the build, run |mach " "resource-usage|.")
+
+ long_build = monitor.elapsed > 1200
+
+ if long_build:
+ output.on_line(
+ "We know it took a while, but your build finally finished successfully!"
+ )
+ if not using_sccache:
+ output.on_line(
+ "If you are building Firefox often, SCCache can save you a lot "
+ "of time. You can learn more here: "
+ "https://firefox-source-docs.mozilla.org/setup/"
+ "configuring_build_options.html#sccache"
+ )
+ else:
+ output.on_line("Your build was successful!")
+
+ # Only for full builds because incremental builders likely don't
+ # need to be burdened with this.
+ if not what:
+ try:
+ # Fennec doesn't have useful output from just building. We should
+ # arguably make the build action useful for Fennec. Another day...
+ if self.substs["MOZ_BUILD_APP"] != "mobile/android":
+ print("To take your build for a test drive, run: |mach run|")
+ app = self.substs["MOZ_BUILD_APP"]
+ if app in ("browser", "mobile/android"):
+ print(
+ "For more information on what to do now, see "
+ "https://firefox-source-docs.mozilla.org/setup/contributing_code.html" # noqa
+ )
+ except Exception:
+ # Ignore Exceptions in case we can't find config.status (such
+ # as when doing OSX Universal builds)
+ pass
+
+ return status
+
+ def configure(
+ self,
+ metrics,
+ options=None,
+ buildstatus_messages=False,
+ line_handler=None,
+ append_env=None,
+ virtualenv_topobjdir=None,
+ ):
+ # Disable indexing in objdir because it is not necessary and can slow
+ # down builds.
+ self.metrics = metrics
+ mkdir(self.topobjdir, not_indexed=True)
+ self._write_mozconfig_json()
+
+ def on_line(line):
+ self.log(logging.INFO, "build_output", {"line": line}, "{line}")
+
+ line_handler = line_handler or on_line
+
+ append_env = dict(append_env or {})
+
+ # Back when client.mk was used, `mk_add_options "export ..."` lines
+ # from the mozconfig would spill into the configure environment, so
+ # add that for backwards compatibility.
+ for line in self.mozconfig["make_extra"] or []:
+ if line.startswith("export "):
+ k, eq, v = line[len("export ") :].partition("=")
+ if eq == "=":
+ append_env[k] = v
+
+ virtualenv_topobjdir = virtualenv_topobjdir or self.topobjdir
+ build_site = CommandSiteManager.from_environment(
+ self.topsrcdir,
+ lambda: get_state_dir(specific_to_topsrcdir=True, topsrcdir=self.topsrcdir),
+ "build",
+ os.path.join(virtualenv_topobjdir, "_virtualenvs"),
+ )
+ build_site.ensure()
+
+ command = [build_site.python_path, os.path.join(self.topsrcdir, "configure.py")]
+ if options:
+ command.extend(options)
+
+ if buildstatus_messages:
+ line_handler("BUILDSTATUS TIERS configure")
+ line_handler("BUILDSTATUS TIER_START configure")
+
+ env = os.environ.copy()
+ env.update(append_env)
+
+ with subprocess.Popen(
+ command,
+ cwd=self.topobjdir,
+ env=env,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ universal_newlines=True,
+ ) as process:
+ for line in process.stdout:
+ line_handler(line.rstrip())
+ status = process.wait()
+ if buildstatus_messages:
+ line_handler("BUILDSTATUS TIER_FINISH configure")
+ if status:
+ print('*** Fix above errors and then restart with "./mach build"')
+ else:
+ print("Configure complete!")
+ print("Be sure to run |mach build| to pick up any changes")
+
+ return status
+
+ def install_tests(self):
+ """Install test files."""
+
+ if self.is_clobber_needed():
+ print(
+ INSTALL_TESTS_CLOBBER.format(
+ clobber_file=os.path.join(self.topobjdir, "CLOBBER")
+ )
+ )
+ sys.exit(1)
+
+ install_test_files(mozpath.normpath(self.topsrcdir), self.topobjdir, "_tests")
+
+ def _clobber_configure(self):
+ # This is an optimistic treatment of the CLOBBER file for when we have
+ # some trust in the build system: an update to the CLOBBER file is
+ # interpreted to mean that configure will fail during an incremental
+ # build, which is handled by removing intermediate configure artifacts
+ # and subsections of the objdir related to python and testing before
+ # proceeding.
+ clobberer = Clobberer(self.topsrcdir, self.topobjdir)
+ clobber_output = io.StringIO()
+ res = clobberer.maybe_do_clobber(os.getcwd(), False, clobber_output)
+ required, performed, message = res
+ assert not performed
+ if not required:
+ return False
+
+ def remove_objdir_path(path):
+ path = mozpath.join(self.topobjdir, path)
+ self.log(
+ logging.WARNING,
+ "clobber",
+ {"path": path},
+ "CLOBBER file has been updated, removing {path}.",
+ )
+ mozfile.remove(path)
+
+ # Remove files we think could cause "configure" clobber bugs.
+ for f in ("old-configure.vars", "config.cache", "configure.pkl"):
+ remove_objdir_path(f)
+ remove_objdir_path(mozpath.join("js", "src", f))
+
+ rm_dirs = [
+ # Stale paths in our virtualenv may cause build-backend
+ # to fail.
+ "_virtualenvs",
+ # Some tests may accumulate state in the objdir that may
+ # become invalid after srcdir changes.
+ "_tests",
+ ]
+
+ for d in rm_dirs:
+ remove_objdir_path(d)
+
+ os.utime(mozpath.join(self.topobjdir, "CLOBBER"), None)
+ return True
+
+ def _write_mozconfig_json(self):
+ mozconfig_json = os.path.join(self.topobjdir, ".mozconfig.json")
+ with FileAvoidWrite(mozconfig_json) as fh:
+ to_write = six.ensure_text(
+ json.dumps(
+ {
+ "topsrcdir": self.topsrcdir,
+ "topobjdir": self.topobjdir,
+ "mozconfig": self.mozconfig,
+ },
+ sort_keys=True,
+ indent=2,
+ )
+ )
+ # json.dumps in python2 inserts some trailing whitespace while
+ # json.dumps in python3 does not, which defeats the FileAvoidWrite
+ # mechanism. Strip the trailing whitespace to avoid rewriting this
+ # file unnecessarily.
+ to_write = "\n".join([line.rstrip() for line in to_write.splitlines()])
+ fh.write(to_write)
+
+ def _run_client_mk(
+ self,
+ target=None,
+ line_handler=None,
+ jobs=0,
+ job_size=0,
+ verbose=None,
+ keep_going=False,
+ append_env=None,
+ ):
+ append_env = dict(append_env or {})
+ append_env["TOPSRCDIR"] = self.topsrcdir
+
+ append_env["CONFIG_GUESS"] = self.resolve_config_guess()
+
+ mozconfig = self.mozconfig
+
+ mozconfig_make_lines = []
+ for arg in mozconfig["make_extra"] or []:
+ mozconfig_make_lines.append(arg)
+
+ if mozconfig["make_flags"]:
+ mozconfig_make_lines.append(
+ "MOZ_MAKE_FLAGS=%s" % " ".join(mozconfig["make_flags"])
+ )
+ objdir = mozpath.normsep(self.topobjdir)
+ mozconfig_make_lines.append("MOZ_OBJDIR=%s" % objdir)
+ mozconfig_make_lines.append("OBJDIR=%s" % objdir)
+
+ if mozconfig["path"]:
+ mozconfig_make_lines.append(
+ "FOUND_MOZCONFIG=%s" % mozpath.normsep(mozconfig["path"])
+ )
+ mozconfig_make_lines.append("export FOUND_MOZCONFIG")
+
+ # The .mozconfig.mk file only contains exported variables and lines with
+ # UPLOAD_EXTRA_FILES.
+ mozconfig_filtered_lines = [
+ line
+ for line in mozconfig_make_lines
+ # Bug 1418122 investigate why UPLOAD_EXTRA_FILES is special and
+ # remove it.
+ if line.startswith("export ") or "UPLOAD_EXTRA_FILES" in line
+ ]
+
+ mozconfig_client_mk = os.path.join(self.topobjdir, ".mozconfig-client-mk")
+ with FileAvoidWrite(mozconfig_client_mk) as fh:
+ fh.write("\n".join(mozconfig_make_lines))
+
+ mozconfig_mk = os.path.join(self.topobjdir, ".mozconfig.mk")
+ with FileAvoidWrite(mozconfig_mk) as fh:
+ fh.write("\n".join(mozconfig_filtered_lines))
+
+ # Copy the original mozconfig to the objdir.
+ mozconfig_objdir = os.path.join(self.topobjdir, ".mozconfig")
+ if mozconfig["path"]:
+ with open(mozconfig["path"], "r") as ifh:
+ with FileAvoidWrite(mozconfig_objdir) as ofh:
+ ofh.write(ifh.read())
+ else:
+ try:
+ os.unlink(mozconfig_objdir)
+ except OSError as e:
+ if e.errno != errno.ENOENT:
+ raise
+
+ if mozconfig_make_lines:
+ self.log(
+ logging.WARNING,
+ "mozconfig_content",
+ {
+ "path": mozconfig["path"],
+ "content": "\n ".join(mozconfig_make_lines),
+ },
+ "Adding make options from {path}\n {content}",
+ )
+
+ append_env["OBJDIR"] = mozpath.normsep(self.topobjdir)
+
+ return self._run_make(
+ srcdir=True,
+ filename="client.mk",
+ ensure_exit_code=False,
+ print_directory=False,
+ target=target,
+ line_handler=line_handler,
+ log=False,
+ num_jobs=jobs,
+ job_size=job_size,
+ silent=not verbose,
+ keep_going=keep_going,
+ append_env=append_env,
+ )
+
+ def _check_clobber(self, mozconfig, env):
+ """Run `Clobberer.maybe_do_clobber`, log the result and return a status bool.
+
+ Wraps the clobbering logic in `Clobberer.maybe_do_clobber` to provide logging
+ and handling of the `AUTOCLOBBER` mozconfig option.
+
+ Return a bool indicating whether the clobber reached an error state. For example,
+ return `True` if the clobber was required but not completed, and return `False` if
+ the clobber was not required and not completed.
+ """
+ auto_clobber = any(
+ [
+ env.get("AUTOCLOBBER", False),
+ (mozconfig["env"] or {}).get("added", {}).get("AUTOCLOBBER", False),
+ "AUTOCLOBBER=1" in (mozconfig["make_extra"] or []),
+ ]
+ )
+ from mozbuild.base import BuildEnvironmentNotFoundException
+
+ substs = dict()
+ try:
+ substs = self.substs
+ except BuildEnvironmentNotFoundException:
+ # We'll just use an empty substs if there is no config.
+ pass
+ clobberer = Clobberer(self.topsrcdir, self.topobjdir, substs)
+ clobber_output = six.StringIO()
+ res = clobberer.maybe_do_clobber(os.getcwd(), auto_clobber, clobber_output)
+ clobber_output.seek(0)
+ for line in clobber_output.readlines():
+ self.log(logging.WARNING, "clobber", {"msg": line.rstrip()}, "{msg}")
+
+ clobber_required, clobber_performed, clobber_message = res
+ if clobber_required and not clobber_performed:
+ for line in clobber_message.splitlines():
+ self.log(logging.WARNING, "clobber", {"msg": line.rstrip()}, "{msg}")
+ return True
+
+ if clobber_performed and env.get("TINDERBOX_OUTPUT"):
+ self.log(
+ logging.WARNING,
+ "clobber",
+ {"msg": "TinderboxPrint: auto clobber"},
+ "{msg}",
+ )
+
+ return False
diff --git a/python/mozbuild/mozbuild/controller/clobber.py b/python/mozbuild/mozbuild/controller/clobber.py
new file mode 100644
index 0000000000..3deba54d75
--- /dev/null
+++ b/python/mozbuild/mozbuild/controller/clobber.py
@@ -0,0 +1,249 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+r"""This module contains code for managing clobbering of the tree."""
+
+import errno
+import os
+import subprocess
+import sys
+from textwrap import TextWrapper
+
+from mozfile.mozfile import remove as mozfileremove
+
+CLOBBER_MESSAGE = "".join(
+ [
+ TextWrapper().fill(line) + "\n"
+ for line in """
+The CLOBBER file has been updated, indicating that an incremental build since \
+your last build will probably not work. A full/clobber build is required.
+
+The reason for the clobber is:
+
+{clobber_reason}
+
+Clobbering can be performed automatically. However, we didn't automatically \
+clobber this time because:
+
+{no_reason}
+
+The easiest and fastest way to clobber is to run:
+
+ $ mach clobber
+
+If you know this clobber doesn't apply to you or you're feeling lucky -- \
+Well, are ya? -- you can ignore this clobber requirement by running:
+
+ $ touch {clobber_file}
+""".splitlines()
+ ]
+)
+
+
+class Clobberer(object):
+ def __init__(self, topsrcdir, topobjdir, substs=None):
+ """Create a new object to manage clobbering the tree.
+
+ It is bound to a top source directory and to a specific object
+ directory.
+ """
+ assert os.path.isabs(topsrcdir)
+ assert os.path.isabs(topobjdir)
+
+ self.topsrcdir = os.path.normpath(topsrcdir)
+ self.topobjdir = os.path.normpath(topobjdir)
+ self.src_clobber = os.path.join(topsrcdir, "CLOBBER")
+ self.obj_clobber = os.path.join(topobjdir, "CLOBBER")
+ if substs:
+ self.substs = substs
+ else:
+ self.substs = dict()
+
+ # Try looking for mozilla/CLOBBER, for comm-central
+ if not os.path.isfile(self.src_clobber):
+ comm_clobber = os.path.join(topsrcdir, "mozilla", "CLOBBER")
+ if os.path.isfile(comm_clobber):
+ self.src_clobber = comm_clobber
+
+ def clobber_needed(self):
+ """Returns a bool indicating whether a tree clobber is required."""
+
+ # No object directory clobber file means we're good.
+ if not os.path.exists(self.obj_clobber):
+ return False
+
+ # No source directory clobber means we're running from a source package
+ # that doesn't use clobbering.
+ if not os.path.exists(self.src_clobber):
+ return False
+
+ # Object directory clobber older than current is fine.
+ if os.path.getmtime(self.src_clobber) <= os.path.getmtime(self.obj_clobber):
+
+ return False
+
+ return True
+
+ def clobber_cause(self):
+ """Obtain the cause why a clobber is required.
+
+ This reads the cause from the CLOBBER file.
+
+ This returns a list of lines describing why the clobber was required.
+ Each line is stripped of leading and trailing whitespace.
+ """
+ with open(self.src_clobber, "rt") as fh:
+ lines = [l.strip() for l in fh.readlines()]
+ return [l for l in lines if l and not l.startswith("#")]
+
+ def have_winrm(self):
+ # `winrm -h` should print 'winrm version ...' and exit 1
+ try:
+ p = subprocess.Popen(
+ ["winrm.exe", "-h"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT
+ )
+ return p.wait() == 1 and p.stdout.read().startswith("winrm")
+ except Exception:
+ return False
+
+ def collect_subdirs(self, root, exclude):
+ """Gathers a list of subdirectories excluding specified items."""
+ paths = []
+ try:
+ for p in os.listdir(root):
+ if p not in exclude:
+ paths.append(os.path.join(root, p))
+ except OSError as e:
+ if e.errno != errno.ENOENT:
+ raise
+
+ return paths
+
+ def delete_dirs(self, root, paths_to_delete):
+ """Deletes the given subdirectories in an optimal way."""
+ procs = []
+ for p in sorted(paths_to_delete):
+ path = os.path.join(root, p)
+ if (
+ sys.platform.startswith("win")
+ and self.have_winrm()
+ and os.path.isdir(path)
+ ):
+ procs.append(subprocess.Popen(["winrm", "-rf", path]))
+ else:
+ # We use mozfile because it is faster than shutil.rmtree().
+ mozfileremove(path)
+
+ for p in procs:
+ p.wait()
+
+ def remove_objdir(self, full=True):
+ """Remove the object directory.
+
+ ``full`` controls whether to fully delete the objdir. If False,
+ some directories (e.g. Visual Studio Project Files) will not be
+ deleted.
+ """
+ # Determine where cargo build artifacts are stored
+ RUST_TARGET_VARS = ("RUST_HOST_TARGET", "RUST_TARGET")
+ rust_targets = set(
+ [self.substs[x] for x in RUST_TARGET_VARS if x in self.substs]
+ )
+ rust_build_kind = "release"
+ if self.substs.get("MOZ_DEBUG_RUST"):
+ rust_build_kind = "debug"
+
+ # Top-level files and directories to not clobber by default.
+ no_clobber = {".mozbuild", "msvc", "_virtualenvs"}
+
+ # Hold off on clobbering cargo build artifacts
+ no_clobber |= rust_targets
+
+ if full:
+ paths = [self.topobjdir]
+ else:
+ paths = self.collect_subdirs(self.topobjdir, no_clobber)
+
+ self.delete_dirs(self.topobjdir, paths)
+
+ # Now handle cargo's build artifacts and skip removing the incremental
+ # compilation cache.
+ for target in rust_targets:
+ cargo_path = os.path.join(self.topobjdir, target, rust_build_kind)
+ paths = self.collect_subdirs(
+ cargo_path,
+ {
+ "incremental",
+ },
+ )
+ self.delete_dirs(cargo_path, paths)
+
+ def maybe_do_clobber(self, cwd, allow_auto=False, fh=sys.stderr):
+ """Perform a clobber if it is required. Maybe.
+
+ This is the API the build system invokes to determine if a clobber
+ is needed and to automatically perform that clobber if we can.
+
+ This returns a tuple of (bool, bool, str). The elements are:
+
+ - Whether a clobber was/is required.
+ - Whether a clobber was performed.
+ - The reason why the clobber failed or could not be performed. This
+ will be None if no clobber is required or if we clobbered without
+ error.
+ """
+ assert cwd
+ cwd = os.path.normpath(cwd)
+
+ if not self.clobber_needed():
+ print("Clobber not needed.", file=fh)
+ return False, False, None
+
+ # So a clobber is needed. We only perform a clobber if we are
+ # allowed to perform an automatic clobber (off by default) and if the
+ # current directory is not under the object directory. The latter is
+ # because operating systems, filesystems, and shell can throw fits
+ # if the current working directory is deleted from under you. While it
+ # can work in some scenarios, we take the conservative approach and
+ # never try.
+ if not allow_auto:
+ return (
+ True,
+ False,
+ self._message(
+ "Automatic clobbering is not enabled\n"
+ ' (add "mk_add_options AUTOCLOBBER=1" to your '
+ "mozconfig)."
+ ),
+ )
+
+ if cwd.startswith(self.topobjdir) and cwd != self.topobjdir:
+ return (
+ True,
+ False,
+ self._message(
+ "Cannot clobber while the shell is inside the object directory."
+ ),
+ )
+
+ print("Automatically clobbering %s" % self.topobjdir, file=fh)
+ try:
+ self.remove_objdir(False)
+ print("Successfully completed auto clobber.", file=fh)
+ return True, True, None
+ except (IOError) as error:
+ return (
+ True,
+ False,
+ self._message("Error when automatically clobbering: " + str(error)),
+ )
+
+ def _message(self, reason):
+ lines = [" " + line for line in self.clobber_cause()]
+
+ return CLOBBER_MESSAGE.format(
+ clobber_reason="\n".join(lines),
+ no_reason=" " + reason,
+ clobber_file=self.obj_clobber,
+ )
diff --git a/python/mozbuild/mozbuild/doctor.py b/python/mozbuild/mozbuild/doctor.py
new file mode 100644
index 0000000000..649b50200d
--- /dev/null
+++ b/python/mozbuild/mozbuild/doctor.py
@@ -0,0 +1,605 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, # You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import enum
+import locale
+import os
+import socket
+import subprocess
+import sys
+from pathlib import Path
+from typing import Callable, List, Optional, Union
+
+import attr
+import mozpack.path as mozpath
+import mozversioncontrol
+import psutil
+import requests
+from packaging.version import Version
+
+# Minimum recommended logical processors in system.
+PROCESSORS_THRESHOLD = 4
+
+# Minimum recommended total system memory, in gigabytes.
+MEMORY_THRESHOLD = 7.4
+
+# Minimum recommended free space on each disk, in gigabytes.
+FREESPACE_THRESHOLD = 10
+
+# Latest MozillaBuild version.
+LATEST_MOZILLABUILD_VERSION = Version("4.0")
+
+DISABLE_LASTACCESS_WIN = """
+Disable the last access time feature?
+This improves the speed of file and
+directory access by deferring Last Access Time modification on disk by up to an
+hour. Backup programs that rely on this feature may be affected.
+https://technet.microsoft.com/en-us/library/cc785435.aspx
+"""
+
+COMPILED_LANGUAGE_FILE_EXTENSIONS = [
+ ".cc",
+ ".cxx",
+ ".c",
+ ".cpp",
+ ".h",
+ ".hpp",
+ ".rs",
+ ".rlib",
+ ".mk",
+]
+
+
+def get_mount_point(path: str) -> str:
+ """Return the mount point for a given path."""
+ while path != "/" and not os.path.ismount(path):
+ path = mozpath.abspath(mozpath.join(path, os.pardir))
+ return path
+
+
+class CheckStatus(enum.Enum):
+ # Check is okay.
+ OK = enum.auto()
+ # We found an issue.
+ WARNING = enum.auto()
+ # We found an issue that will break build/configure/etc.
+ FATAL = enum.auto()
+ # The check was skipped.
+ SKIPPED = enum.auto()
+
+
+@attr.s
+class DoctorCheck:
+ # Name of the check.
+ name = attr.ib()
+ # Lines to display on screen.
+ display_text = attr.ib()
+ # `CheckStatus` for this given check.
+ status = attr.ib()
+ # Function to be called to fix the issues, if applicable.
+ fix = attr.ib(default=None)
+
+
+CHECKS = {}
+
+
+def check(func: Callable):
+ """Decorator that registers a function as a doctor check.
+
+ The function should return a `DoctorCheck` or be an iterator of
+ checks.
+ """
+ CHECKS[func.__name__] = func
+
+
+@check
+def dns(**kwargs) -> DoctorCheck:
+ """Check DNS is queryable."""
+ try:
+ socket.getaddrinfo("mozilla.org", 80)
+ return DoctorCheck(
+ name="dns",
+ status=CheckStatus.OK,
+ display_text=["DNS query for mozilla.org completed successfully."],
+ )
+
+ except socket.gaierror:
+ return DoctorCheck(
+ name="dns",
+ status=CheckStatus.FATAL,
+ display_text=["Could not query DNS for mozilla.org."],
+ )
+
+
+@check
+def internet(**kwargs) -> DoctorCheck:
+ """Check the internet is reachable via HTTPS."""
+ try:
+ resp = requests.get("https://mozilla.org")
+ resp.raise_for_status()
+
+ return DoctorCheck(
+ name="internet",
+ status=CheckStatus.OK,
+ display_text=["Internet is reachable."],
+ )
+
+ except Exception:
+ return DoctorCheck(
+ name="internet",
+ status=CheckStatus.FATAL,
+ display_text=["Could not reach a known website via HTTPS."],
+ )
+
+
+@check
+def ssh(**kwargs) -> DoctorCheck:
+ """Check the status of `ssh hg.mozilla.org` for common errors."""
+ try:
+ # We expect this command to return exit code 1 even when we hit
+ # the successful code path, since we don't specify a `pash` command.
+ proc = subprocess.run(
+ ["ssh", "hg.mozilla.org"],
+ encoding="utf-8",
+ stderr=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ )
+
+ # Command output from a successful `pash` run.
+ if "has privileges to access Mercurial over" in proc.stdout:
+ return DoctorCheck(
+ name="ssh",
+ status=CheckStatus.OK,
+ display_text=["SSH is properly configured for access to hg."],
+ )
+
+ if "Permission denied" in proc.stdout:
+ # Parse proc.stdout for username, which looks like:
+ # `<username>@hg.mozilla.org: Permission denied (reason)`
+ login_string = proc.stdout.split()[0]
+ username, _host = login_string.split("@hg.mozilla.org")
+
+ # `<username>` should be an email.
+ if "@" not in username:
+ return DoctorCheck(
+ name="ssh",
+ status=CheckStatus.FATAL,
+ display_text=[
+ "SSH username `{}` is not an email address.".format(username),
+ "hg.mozilla.org logins should be in the form `user@domain.com`.",
+ ],
+ )
+
+ return DoctorCheck(
+ name="ssh",
+ status=CheckStatus.WARNING,
+ display_text=[
+ "SSH username `{}` does not have permission to push to "
+ "hg.mozilla.org.".format(username)
+ ],
+ )
+
+ if "Mercurial access is currently disabled on your account" in proc.stdout:
+ return DoctorCheck(
+ name="ssh",
+ status=CheckStatus.FATAL,
+ display_text=[
+ "You previously had push access to hgmo, but due to inactivity",
+ "your access was revoked. Please file a bug in Bugzilla under",
+ "`Infrastructure & Operations :: Infrastructure: LDAP` to request",
+ "access.",
+ ],
+ )
+
+ return DoctorCheck(
+ name="ssh",
+ status=CheckStatus.WARNING,
+ display_text=[
+ "Unexpected output from `ssh hg.mozilla.org`:",
+ proc.stdout,
+ ],
+ )
+
+ except subprocess.CalledProcessError:
+ return DoctorCheck(
+ name="ssh",
+ status=CheckStatus.WARNING,
+ display_text=["Could not run `ssh hg.mozilla.org`."],
+ )
+
+
+@check
+def cpu(**kwargs) -> DoctorCheck:
+ """Check the host machine has the recommended processing power to develop Firefox."""
+ cpu_count = psutil.cpu_count()
+ if cpu_count < PROCESSORS_THRESHOLD:
+ status = CheckStatus.WARNING
+ desc = "%d logical processors detected, <%d" % (cpu_count, PROCESSORS_THRESHOLD)
+ else:
+ status = CheckStatus.OK
+ desc = "%d logical processors detected, >=%d" % (
+ cpu_count,
+ PROCESSORS_THRESHOLD,
+ )
+
+ return DoctorCheck(name="cpu", display_text=[desc], status=status)
+
+
+@check
+def memory(**kwargs) -> DoctorCheck:
+ """Check the host machine has the recommended memory to develop Firefox."""
+ memory = psutil.virtual_memory().total
+ # Convert to gigabytes.
+ memory_GB = memory / 1024 ** 3.0
+ if memory_GB < MEMORY_THRESHOLD:
+ status = CheckStatus.WARNING
+ desc = "%.1fGB of physical memory, <%.1fGB" % (memory_GB, MEMORY_THRESHOLD)
+ else:
+ status = CheckStatus.OK
+ desc = "%.1fGB of physical memory, >%.1fGB" % (memory_GB, MEMORY_THRESHOLD)
+
+ return DoctorCheck(name="memory", display_text=[desc], status=status)
+
+
+@check
+def storage_freespace(topsrcdir: str, topobjdir: str, **kwargs) -> List[DoctorCheck]:
+ """Check the host machine has the recommended disk space to develop Firefox."""
+ topsrcdir_mount = get_mount_point(topsrcdir)
+ topobjdir_mount = get_mount_point(topobjdir)
+
+ mounts = [
+ ("topsrcdir", topsrcdir, topsrcdir_mount),
+ ("topobjdir", topobjdir, topobjdir_mount),
+ ]
+
+ mountpoint_line = topsrcdir_mount != topobjdir_mount
+ checks = []
+
+ for purpose, path, mount in mounts:
+ if not mountpoint_line:
+ mountpoint_line = True
+ continue
+
+ desc = ["%s = %s" % (purpose, path)]
+
+ try:
+ usage = psutil.disk_usage(mount)
+ freespace, size = usage.free, usage.total
+ freespace_GB = freespace / 1024 ** 3
+ size_GB = size / 1024 ** 3
+ if freespace_GB < FREESPACE_THRESHOLD:
+ status = CheckStatus.WARNING
+ desc.append(
+ "mountpoint = %s\n%dGB of %dGB free, <%dGB"
+ % (mount, freespace_GB, size_GB, FREESPACE_THRESHOLD)
+ )
+ else:
+ status = CheckStatus.OK
+ desc.append(
+ "mountpoint = %s\n%dGB of %dGB free, >=%dGB"
+ % (mount, freespace_GB, size_GB, FREESPACE_THRESHOLD)
+ )
+
+ except OSError:
+ status = CheckStatus.FATAL
+ desc.append("path invalid")
+
+ checks.append(
+ DoctorCheck(name="%s mount check" % mount, status=status, display_text=desc)
+ )
+
+ return checks
+
+
+def fix_lastaccess_win():
+ """Run `fsutil` to fix lastaccess behaviour."""
+ try:
+ print("Disabling filesystem lastaccess")
+
+ command = ["fsutil", "behavior", "set", "disablelastaccess", "1"]
+ subprocess.check_output(command)
+
+ print("Filesystem lastaccess disabled.")
+
+ except subprocess.CalledProcessError:
+ print("Could not disable filesystem lastaccess.")
+
+
+@check
+def fs_lastaccess(
+ topsrcdir: str, topobjdir: str, **kwargs
+) -> Union[DoctorCheck, List[DoctorCheck]]:
+ """Check for the `lastaccess` behaviour on the filsystem, which can slow
+ down filesystem operations."""
+ if sys.platform.startswith("win"):
+ # See 'fsutil behavior':
+ # https://technet.microsoft.com/en-us/library/cc785435.aspx
+ try:
+ command = ["fsutil", "behavior", "query", "disablelastaccess"]
+ fsutil_output = subprocess.check_output(command, encoding="utf-8")
+ disablelastaccess = int(fsutil_output.partition("=")[2][1])
+ except subprocess.CalledProcessError:
+ return DoctorCheck(
+ name="lastaccess",
+ status=CheckStatus.WARNING,
+ display_text=["unable to check lastaccess behavior"],
+ )
+
+ if disablelastaccess in {1, 3}:
+ return DoctorCheck(
+ name="lastaccess",
+ status=CheckStatus.OK,
+ display_text=["lastaccess disabled systemwide"],
+ )
+ elif disablelastaccess in {0, 2}:
+ return DoctorCheck(
+ name="lastaccess",
+ status=CheckStatus.WARNING,
+ display_text=["lastaccess enabled"],
+ fix=fix_lastaccess_win,
+ )
+
+ # `disablelastaccess` should be a value between 0-3.
+ return DoctorCheck(
+ name="lastaccess",
+ status=CheckStatus.WARNING,
+ display_text=["Could not parse `fsutil` for lastaccess behavior."],
+ )
+
+ elif any(
+ sys.platform.startswith(prefix) for prefix in ["freebsd", "linux", "openbsd"]
+ ):
+ topsrcdir_mount = get_mount_point(topsrcdir)
+ topobjdir_mount = get_mount_point(topobjdir)
+ mounts = [
+ ("topsrcdir", topsrcdir, topsrcdir_mount),
+ ("topobjdir", topobjdir, topobjdir_mount),
+ ]
+
+ common_mountpoint = topsrcdir_mount == topobjdir_mount
+
+ mount_checks = []
+ for _purpose, _path, mount in mounts:
+ mount_checks.append(check_mount_lastaccess(mount))
+ if common_mountpoint:
+ break
+
+ return mount_checks
+
+ # Return "SKIPPED" if this test is not relevant.
+ return DoctorCheck(
+ name="lastaccess",
+ display_text=["lastaccess not relevant for this platform."],
+ status=CheckStatus.SKIPPED,
+ )
+
+
+def check_mount_lastaccess(mount: str) -> DoctorCheck:
+ """Check `lastaccess` behaviour for a Linux mount."""
+ partitions = psutil.disk_partitions(all=True)
+ atime_opts = {"atime", "noatime", "relatime", "norelatime"}
+ option = ""
+ fstype = ""
+ for partition in partitions:
+ if partition.mountpoint == mount:
+ mount_opts = set(partition.opts.split(","))
+ intersection = list(atime_opts & mount_opts)
+ fstype = partition.fstype
+ if len(intersection) == 1:
+ option = intersection[0]
+ break
+
+ if fstype == "tmpfs":
+ status = CheckStatus.OK
+ desc = "%s is a tmpfs so noatime/reltime is not needed" % (mount)
+ elif not option:
+ status = CheckStatus.WARNING
+ if sys.platform.startswith("linux"):
+ option = "noatime/relatime"
+ else:
+ option = "noatime"
+ desc = "%s has no explicit %s mount option" % (mount, option)
+ elif option == "atime" or option == "norelatime":
+ status = CheckStatus.WARNING
+ desc = "%s has %s mount option" % (mount, option)
+ elif option == "noatime" or option == "relatime":
+ status = CheckStatus.OK
+ desc = "%s has %s mount option" % (mount, option)
+
+ return DoctorCheck(
+ name="%s mount lastaccess" % mount, status=status, display_text=[desc]
+ )
+
+
+@check
+def mozillabuild(**kwargs) -> DoctorCheck:
+ """Check that MozillaBuild is the latest version."""
+ if not sys.platform.startswith("win"):
+ return DoctorCheck(
+ name="mozillabuild",
+ status=CheckStatus.SKIPPED,
+ display_text=["Non-Windows platform, MozillaBuild not relevant"],
+ )
+
+ MOZILLABUILD = mozpath.normpath(os.environ.get("MOZILLABUILD", ""))
+ if not MOZILLABUILD or not os.path.exists(MOZILLABUILD):
+ return DoctorCheck(
+ name="mozillabuild",
+ status=CheckStatus.WARNING,
+ display_text=["Not running under MozillaBuild."],
+ )
+
+ try:
+ with open(mozpath.join(MOZILLABUILD, "VERSION"), "r") as fh:
+ local_version = fh.readline()
+
+ if not local_version:
+ return DoctorCheck(
+ name="mozillabuild",
+ status=CheckStatus.WARNING,
+ display_text=["Could not get local MozillaBuild version."],
+ )
+
+ if Version(local_version) < LATEST_MOZILLABUILD_VERSION:
+ status = CheckStatus.WARNING
+ desc = "MozillaBuild %s in use, <%s" % (
+ local_version,
+ LATEST_MOZILLABUILD_VERSION,
+ )
+
+ else:
+ status = CheckStatus.OK
+ desc = "MozillaBuild %s in use" % local_version
+
+ except (IOError, ValueError):
+ status = CheckStatus.FATAL
+ desc = "MozillaBuild version not found"
+
+ return DoctorCheck(name="mozillabuild", status=status, display_text=[desc])
+
+
+@check
+def bad_locale_utf8(**kwargs) -> DoctorCheck:
+ """Check to detect the invalid locale `UTF-8` on pre-3.8 Python."""
+ if sys.version_info >= (3, 8):
+ return DoctorCheck(
+ name="utf8 locale",
+ status=CheckStatus.SKIPPED,
+ display_text=["Python version has fixed utf-8 locale bug."],
+ )
+
+ try:
+ # This line will attempt to get and parse the locale.
+ locale.getdefaultlocale()
+
+ return DoctorCheck(
+ name="utf8 locale",
+ status=CheckStatus.OK,
+ display_text=["Python's locale is set to a valid value."],
+ )
+ except ValueError:
+ return DoctorCheck(
+ name="utf8 locale",
+ status=CheckStatus.FATAL,
+ display_text=[
+ "Your Python is using an invalid value for its locale.",
+ "Either update Python to version 3.8+, or set the following variables in ",
+ "your environment:",
+ " export LC_ALL=en_US.UTF-8",
+ " export LANG=en_US.UTF-8",
+ ],
+ )
+
+
+@check
+def artifact_build(
+ topsrcdir: str, configure_args: Optional[List[str]], **kwargs
+) -> DoctorCheck:
+ """Check that if Artifact Builds are enabled, that no
+ source files that would not be compiled are changed"""
+
+ if configure_args is None or "--enable-artifact-builds" not in configure_args:
+ return DoctorCheck(
+ name="artifact_build",
+ status=CheckStatus.SKIPPED,
+ display_text=[
+ "Artifact Builds are not enabled. No need to proceed checking for changed files."
+ ],
+ )
+
+ repo = mozversioncontrol.get_repository_object(topsrcdir)
+ changed_files = [
+ Path(file)
+ for file in set(repo.get_outgoing_files()) | set(repo.get_changed_files())
+ ]
+
+ compiled_language_files_changed = ""
+ for file in changed_files:
+ if (
+ file.suffix in COMPILED_LANGUAGE_FILE_EXTENSIONS
+ or file.stem.lower() == "makefile"
+ and not file.suffix == ".py"
+ ):
+ compiled_language_files_changed += ' - "' + str(file) + '"\n'
+
+ if compiled_language_files_changed:
+ return DoctorCheck(
+ name="artifact_build",
+ status=CheckStatus.FATAL,
+ display_text=[
+ "Artifact Builds are enabled, but the following files from compiled languages "
+ f"have been modified: \n{compiled_language_files_changed}\nThese files will "
+ "not be compiled, and your changes will not be realized in the build output."
+ "\n\nIf you want these changes to be realized, you should re-run './mach "
+ 'boostrap` and select a build that does not state "Artifact Mode".'
+ "\nFor additional information on Artifact Builds see: "
+ "https://firefox-source-docs.mozilla.org/contributing/build/"
+ "artifact_builds.html"
+ ],
+ )
+
+ return DoctorCheck(
+ name="artifact_build",
+ status=CheckStatus.OK,
+ display_text=["No Artifact Build conflicts found."],
+ )
+
+
+def run_doctor(fix: bool = False, verbose: bool = False, **kwargs) -> int:
+ """Run the doctor checks.
+
+ If `fix` is `True`, run fixing functions for issues that can be resolved
+ automatically.
+
+ By default, only print output from checks that result in a warning or
+ fatal issue. `verbose` will cause all output to be printed to the screen.
+ """
+ issues_found = False
+
+ fixes = []
+ for _name, check_func in CHECKS.items():
+ results = check_func(**kwargs)
+
+ if isinstance(results, DoctorCheck):
+ results = [results]
+
+ for result in results:
+ if result.status == CheckStatus.SKIPPED and not verbose:
+ continue
+
+ if result.status != CheckStatus.OK:
+ # If we ever have a non-OK status, we shouldn't print
+ # the "No issues detected" line.
+ issues_found = True
+
+ if result.status != CheckStatus.OK or verbose:
+ print("\n".join(result.display_text))
+
+ if result.fix:
+ fixes.append(result.fix)
+
+ if not issues_found:
+ print("No issues detected.")
+ return 0
+
+ # If we can fix something but the user didn't ask us to, advise
+ # them to run with `--fix`.
+ if not fix:
+ if fixes:
+ print(
+ "Some of the issues found can be fixed; run "
+ "`./mach doctor --fix` to fix them."
+ )
+ return 1
+
+ # Attempt to run the fix functions.
+ fixer_fail = 0
+ for fixer in fixes:
+ try:
+ fixer()
+ except Exception:
+ fixer_fail = 1
+ pass
+
+ return fixer_fail
diff --git a/python/mozbuild/mozbuild/dotproperties.py b/python/mozbuild/mozbuild/dotproperties.py
new file mode 100644
index 0000000000..9b615cc43f
--- /dev/null
+++ b/python/mozbuild/mozbuild/dotproperties.py
@@ -0,0 +1,86 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This file contains utility functions for reading .properties files
+
+import codecs
+import re
+import sys
+
+import six
+
+if sys.version_info[0] == 3:
+ str_type = str
+else:
+ str_type = basestring
+
+
+class DotProperties:
+ r"""A thin representation of a key=value .properties file."""
+
+ def __init__(self, file=None):
+ self._properties = {}
+ if file:
+ self.update(file)
+
+ def update(self, file):
+ """Updates properties from a file name or file-like object.
+
+ Ignores empty lines and comment lines."""
+
+ if isinstance(file, str_type):
+ f = codecs.open(file, "r", "utf-8")
+ else:
+ f = file
+
+ for l in f.readlines():
+ line = l.strip()
+ if not line or line.startswith("#"):
+ continue
+ (k, v) = re.split("\s*=\s*", line, 1)
+ self._properties[k] = v
+
+ def get(self, key, default=None):
+ return self._properties.get(key, default)
+
+ def get_list(self, prefix):
+ """Turns {'list.0':'foo', 'list.1':'bar'} into ['foo', 'bar'].
+
+ Returns [] to indicate an empty or missing list."""
+
+ if not prefix.endswith("."):
+ prefix = prefix + "."
+ indexes = []
+ for k, v in six.iteritems(self._properties):
+ if not k.startswith(prefix):
+ continue
+ key = k[len(prefix) :]
+ if "." in key:
+ # We have something like list.sublist.0.
+ continue
+ indexes.append(int(key))
+ return [self._properties[prefix + str(index)] for index in sorted(indexes)]
+
+ def get_dict(self, prefix, required_keys=[]):
+ """Turns {'foo.title':'title', ...} into {'title':'title', ...}.
+
+ If ``|required_keys|`` is present, it must be an iterable of required key
+ names. If a required key is not present, ValueError is thrown.
+
+ Returns {} to indicate an empty or missing dict."""
+
+ if not prefix.endswith("."):
+ prefix = prefix + "."
+
+ D = dict(
+ (k[len(prefix) :], v)
+ for k, v in six.iteritems(self._properties)
+ if k.startswith(prefix) and "." not in k[len(prefix) :]
+ )
+
+ for required_key in required_keys:
+ if required_key not in D:
+ raise ValueError("Required key %s not present" % required_key)
+
+ return D
diff --git a/python/mozbuild/mozbuild/faster_daemon.py b/python/mozbuild/mozbuild/faster_daemon.py
new file mode 100644
index 0000000000..13fb07a79c
--- /dev/null
+++ b/python/mozbuild/mozbuild/faster_daemon.py
@@ -0,0 +1,328 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+Use pywatchman to watch source directories and perform partial
+``mach build faster`` builds.
+"""
+
+import datetime
+import sys
+import time
+
+import mozpack.path as mozpath
+
+# Watchman integration cribbed entirely from
+# https://github.com/facebook/watchman/blob/19aebfebb0b5b0b5174b3914a879370ffc5dac37/python/bin/watchman-wait
+import pywatchman
+from mozpack.copier import FileCopier
+from mozpack.manifests import InstallManifest
+
+import mozbuild.util
+from mozbuild.backend import get_backend_class
+
+
+def print_line(prefix, m, now=None):
+ now = now or datetime.datetime.utcnow()
+ print("[%s %sZ] %s" % (prefix, now.isoformat(), m))
+
+
+def print_copy_result(elapsed, destdir, result, verbose=True):
+ COMPLETE = (
+ "Elapsed: {elapsed:.2f}s; From {dest}: Kept {existing} existing; "
+ "Added/updated {updated}; "
+ "Removed {rm_files} files and {rm_dirs} directories."
+ )
+
+ print_line(
+ "watch",
+ COMPLETE.format(
+ elapsed=elapsed,
+ dest=destdir,
+ existing=result.existing_files_count,
+ updated=result.updated_files_count,
+ rm_files=result.removed_files_count,
+ rm_dirs=result.removed_directories_count,
+ ),
+ )
+
+
+class FasterBuildException(Exception):
+ def __init__(self, message, cause):
+ Exception.__init__(self, message)
+ self.cause = cause
+
+
+class FasterBuildChange(object):
+ def __init__(self):
+ self.unrecognized = set()
+ self.input_to_outputs = {}
+ self.output_to_inputs = {}
+
+
+class Daemon(object):
+ def __init__(self, config_environment):
+ self.config_environment = config_environment
+ self._client = None
+
+ @property
+ def defines(self):
+ defines = dict(self.config_environment.acdefines)
+ # These additions work around warts in the build system: see
+ # http://searchfox.org/mozilla-central/rev/ad093e98f42338effe2e2513e26c3a311dd96422/config/faster/rules.mk#92-93
+ defines.update(
+ {
+ "AB_CD": "en-US",
+ }
+ )
+ return defines
+
+ @mozbuild.util.memoized_property
+ def file_copier(self):
+ # TODO: invalidate the file copier when the build system
+ # itself changes, i.e., the underlying unified manifest
+ # changes.
+ file_copier = FileCopier()
+
+ unified_manifest = InstallManifest(
+ mozpath.join(
+ self.config_environment.topobjdir, "faster", "unified_install_dist_bin"
+ )
+ )
+
+ unified_manifest.populate_registry(file_copier, defines_override=self.defines)
+
+ return file_copier
+
+ def subscribe_to_topsrcdir(self):
+ self.subscribe_to_dir("topsrcdir", self.config_environment.topsrcdir)
+
+ def subscribe_to_dir(self, name, dir_to_watch):
+ query = {
+ "empty_on_fresh_instance": True,
+ "expression": [
+ "allof",
+ ["type", "f"],
+ [
+ "not",
+ [
+ "anyof",
+ ["dirname", ".hg"],
+ ["name", ".hg", "wholename"],
+ ["dirname", ".git"],
+ ["name", ".git", "wholename"],
+ ],
+ ],
+ ],
+ "fields": ["name"],
+ }
+ watch = self.client.query("watch-project", dir_to_watch)
+ if "warning" in watch:
+ print("WARNING: ", watch["warning"], file=sys.stderr)
+
+ root = watch["watch"]
+ if "relative_path" in watch:
+ query["relative_root"] = watch["relative_path"]
+
+ # Get the initial clock value so that we only get updates.
+ # Wait 30s to allow for slow Windows IO. See
+ # https://facebook.github.io/watchman/docs/cmd/clock.html.
+ query["since"] = self.client.query("clock", root, {"sync_timeout": 30000})[
+ "clock"
+ ]
+
+ return self.client.query("subscribe", root, name, query)
+
+ def changed_files(self):
+ # In theory we can parse just the result variable here, but
+ # the client object will accumulate all subscription results
+ # over time, so we ask it to remove and return those values.
+ files = set()
+
+ data = self.client.getSubscription("topsrcdir")
+ if data:
+ for dat in data:
+ files |= set(
+ [
+ mozpath.normpath(
+ mozpath.join(self.config_environment.topsrcdir, f)
+ )
+ for f in dat.get("files", [])
+ ]
+ )
+
+ return files
+
+ def incremental_copy(self, copier, force=False, verbose=True):
+ # Just like the 'repackage' target in browser/app/Makefile.in.
+ if "cocoa" == self.config_environment.substs["MOZ_WIDGET_TOOLKIT"]:
+ bundledir = mozpath.join(
+ self.config_environment.topobjdir,
+ "dist",
+ self.config_environment.substs["MOZ_MACBUNDLE_NAME"],
+ "Contents",
+ "Resources",
+ )
+ start = time.monotonic()
+ result = copier.copy(
+ bundledir,
+ skip_if_older=not force,
+ remove_unaccounted=False,
+ remove_all_directory_symlinks=False,
+ remove_empty_directories=False,
+ )
+ print_copy_result(
+ time.monotonic() - start, bundledir, result, verbose=verbose
+ )
+
+ destdir = mozpath.join(self.config_environment.topobjdir, "dist", "bin")
+ start = time.monotonic()
+ result = copier.copy(
+ destdir,
+ skip_if_older=not force,
+ remove_unaccounted=False,
+ remove_all_directory_symlinks=False,
+ remove_empty_directories=False,
+ )
+ print_copy_result(time.monotonic() - start, destdir, result, verbose=verbose)
+
+ def input_changes(self, verbose=True):
+ """
+ Return an iterator of `FasterBuildChange` instances as inputs
+ to the faster build system change.
+ """
+
+ # TODO: provide the debug diagnostics we want: this print is
+ # not immediately before the watch.
+ if verbose:
+ print_line("watch", "Connecting to watchman")
+ # TODO: figure out why a large timeout is required for the
+ # client, and a robust strategy for retrying timed out
+ # requests.
+ self.client = pywatchman.client(timeout=5.0)
+
+ try:
+ if verbose:
+ print_line("watch", "Checking watchman capabilities")
+ # TODO: restrict these capabilities to the minimal set.
+ self.client.capabilityCheck(
+ required=[
+ "clock-sync-timeout",
+ "cmd-watch-project",
+ "term-dirname",
+ "wildmatch",
+ ]
+ )
+
+ if verbose:
+ print_line(
+ "watch",
+ "Subscribing to {}".format(self.config_environment.topsrcdir),
+ )
+ self.subscribe_to_topsrcdir()
+ if verbose:
+ print_line(
+ "watch", "Watching {}".format(self.config_environment.topsrcdir)
+ )
+
+ input_to_outputs = self.file_copier.input_to_outputs_tree()
+ for input, outputs in input_to_outputs.items():
+ if not outputs:
+ raise Exception(
+ "Refusing to watch input ({}) with no outputs".format(input)
+ )
+
+ while True:
+ try:
+ self.client.receive()
+
+ changed = self.changed_files()
+ if not changed:
+ continue
+
+ result = FasterBuildChange()
+
+ for change in changed:
+ if change in input_to_outputs:
+ result.input_to_outputs[change] = set(
+ input_to_outputs[change]
+ )
+ else:
+ result.unrecognized.add(change)
+
+ for input, outputs in result.input_to_outputs.items():
+ for output in outputs:
+ if output not in result.output_to_inputs:
+ result.output_to_inputs[output] = set()
+ result.output_to_inputs[output].add(input)
+
+ yield result
+
+ except pywatchman.SocketTimeout:
+ # Let's check to see if we're still functional.
+ self.client.query("version")
+
+ except pywatchman.CommandError as e:
+ # Abstract away pywatchman errors.
+ raise FasterBuildException(
+ e,
+ "Command error using pywatchman to watch {}".format(
+ self.config_environment.topsrcdir
+ ),
+ )
+
+ except pywatchman.SocketTimeout as e:
+ # Abstract away pywatchman errors.
+ raise FasterBuildException(
+ e,
+ "Socket timeout using pywatchman to watch {}".format(
+ self.config_environment.topsrcdir
+ ),
+ )
+
+ finally:
+ self.client.close()
+
+ def output_changes(self, verbose=True):
+ """
+ Return an iterator of `FasterBuildChange` instances as outputs
+ from the faster build system are updated.
+ """
+ for change in self.input_changes(verbose=verbose):
+ now = datetime.datetime.utcnow()
+
+ for unrecognized in sorted(change.unrecognized):
+ print_line("watch", "! {}".format(unrecognized), now=now)
+
+ all_outputs = set()
+ for input in sorted(change.input_to_outputs):
+ outputs = change.input_to_outputs[input]
+
+ print_line("watch", "< {}".format(input), now=now)
+ for output in sorted(outputs):
+ print_line("watch", "> {}".format(output), now=now)
+ all_outputs |= outputs
+
+ if all_outputs:
+ partial_copier = FileCopier()
+ for output in all_outputs:
+ partial_copier.add(output, self.file_copier[output])
+
+ self.incremental_copy(partial_copier, force=True, verbose=verbose)
+ yield change
+
+ def watch(self, verbose=True):
+ try:
+ active_backend = self.config_environment.substs.get(
+ "BUILD_BACKENDS", [None]
+ )[0]
+ if active_backend:
+ backend_cls = get_backend_class(active_backend)(self.config_environment)
+ except Exception:
+ backend_cls = None
+
+ for change in self.output_changes(verbose=verbose):
+ # Try to run the active build backend's post-build step, if possible.
+ if backend_cls:
+ backend_cls.post_build(self.config_environment, None, 1, False, 0)
diff --git a/python/mozbuild/mozbuild/frontend/__init__.py b/python/mozbuild/mozbuild/frontend/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/frontend/__init__.py
diff --git a/python/mozbuild/mozbuild/frontend/context.py b/python/mozbuild/mozbuild/frontend/context.py
new file mode 100644
index 0000000000..1e241c5656
--- /dev/null
+++ b/python/mozbuild/mozbuild/frontend/context.py
@@ -0,0 +1,3144 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+######################################################################
+# DO NOT UPDATE THIS FILE WITHOUT SIGN-OFF FROM A BUILD MODULE PEER. #
+######################################################################
+
+r"""This module contains the data structure (context) holding the configuration
+from a moz.build. The data emitted by the frontend derives from those contexts.
+
+It also defines the set of variables and functions available in moz.build.
+If you are looking for the absolute authority on what moz.build files can
+contain, you've come to the right place.
+"""
+
+import itertools
+import operator
+import os
+from collections import Counter, OrderedDict
+from types import FunctionType
+
+import mozpack.path as mozpath
+import six
+
+from mozbuild.util import (
+ HierarchicalStringList,
+ ImmutableStrictOrderingOnAppendList,
+ KeyedDefaultDict,
+ List,
+ ReadOnlyKeyedDefaultDict,
+ StrictOrderingOnAppendList,
+ StrictOrderingOnAppendListWithAction,
+ StrictOrderingOnAppendListWithFlagsFactory,
+ TypedList,
+ TypedNamedTuple,
+ memoize,
+ memoized_property,
+)
+
+from .. import schedules
+from ..testing import read_manifestparser_manifest, read_reftest_manifest
+
+
+class ContextDerivedValue(object):
+ """Classes deriving from this one receive a special treatment in a
+ Context. See Context documentation.
+ """
+
+ __slots__ = ()
+
+
+class Context(KeyedDefaultDict):
+ """Represents a moz.build configuration context.
+
+ Instances of this class are filled by the execution of sandboxes.
+ At the core, a Context is a dict, with a defined set of possible keys we'll
+ call variables. Each variable is associated with a type.
+
+ When reading a value for a given key, we first try to read the existing
+ value. If a value is not found and it is defined in the allowed variables
+ set, we return a new instance of the class for that variable. We don't
+ assign default instances until they are accessed because this makes
+ debugging the end-result much simpler. Instead of a data structure with
+ lots of empty/default values, you have a data structure with only the
+ values that were read or touched.
+
+ Instances of variables classes are created by invoking ``class_name()``,
+ except when class_name derives from ``ContextDerivedValue`` or
+ ``SubContext``, in which case ``class_name(instance_of_the_context)`` or
+ ``class_name(self)`` is invoked. A value is added to those calls when
+ instances are created during assignment (setitem).
+
+ allowed_variables is a dict of the variables that can be set and read in
+ this context instance. Keys in this dict are the strings representing keys
+ in this context which are valid. Values are tuples of stored type,
+ assigned type, default value, a docstring describing the purpose of the
+ variable, and a tier indicator (see comment above the VARIABLES declaration
+ in this module).
+
+ config is the ConfigEnvironment for this context.
+ """
+
+ def __init__(self, allowed_variables={}, config=None, finder=None):
+ self._allowed_variables = allowed_variables
+ self.main_path = None
+ self.current_path = None
+ # There aren't going to be enough paths for the performance of scanning
+ # a list to be a problem.
+ self._all_paths = []
+ self.config = config
+ self._sandbox = None
+ self._finder = finder
+ KeyedDefaultDict.__init__(self, self._factory)
+
+ def push_source(self, path):
+ """Adds the given path as source of the data from this context and make
+ it the current path for the context."""
+ assert os.path.isabs(path)
+ if not self.main_path:
+ self.main_path = path
+ else:
+ # Callers shouldn't push after main_path has been popped.
+ assert self.current_path
+ self.current_path = path
+ # The same file can be pushed twice, so don't remove any previous
+ # occurrence.
+ self._all_paths.append(path)
+
+ def pop_source(self):
+ """Get back to the previous current path for the context."""
+ assert self.main_path
+ assert self.current_path
+ last = self._all_paths.pop()
+ # Keep the popped path in the list of all paths, but before the main
+ # path so that it's not popped again.
+ self._all_paths.insert(0, last)
+ if last == self.main_path:
+ self.current_path = None
+ else:
+ self.current_path = self._all_paths[-1]
+ return last
+
+ def add_source(self, path):
+ """Adds the given path as source of the data from this context."""
+ assert os.path.isabs(path)
+ if not self.main_path:
+ self.main_path = self.current_path = path
+ # Insert at the beginning of the list so that it's always before the
+ # main path.
+ if path not in self._all_paths:
+ self._all_paths.insert(0, path)
+
+ @property
+ def error_is_fatal(self):
+ """Returns True if the error function should be fatal."""
+ return self.config and getattr(self.config, "error_is_fatal", True)
+
+ @property
+ def all_paths(self):
+ """Returns all paths ever added to the context."""
+ return set(self._all_paths)
+
+ @property
+ def source_stack(self):
+ """Returns the current stack of pushed sources."""
+ if not self.current_path:
+ return []
+ return self._all_paths[self._all_paths.index(self.main_path) :]
+
+ @memoized_property
+ def objdir(self):
+ return mozpath.join(self.config.topobjdir, self.relobjdir).rstrip("/")
+
+ @memoize
+ def _srcdir(self, path):
+ return mozpath.join(self.config.topsrcdir, self._relsrcdir(path)).rstrip("/")
+
+ @property
+ def srcdir(self):
+ return self._srcdir(self.current_path or self.main_path)
+
+ @memoize
+ def _relsrcdir(self, path):
+ return mozpath.relpath(mozpath.dirname(path), self.config.topsrcdir)
+
+ @property
+ def relsrcdir(self):
+ assert self.main_path
+ return self._relsrcdir(self.current_path or self.main_path)
+
+ @memoized_property
+ def relobjdir(self):
+ assert self.main_path
+ return mozpath.relpath(mozpath.dirname(self.main_path), self.config.topsrcdir)
+
+ def _factory(self, key):
+ """Function called when requesting a missing key."""
+ defaults = self._allowed_variables.get(key)
+ if not defaults:
+ raise KeyError("global_ns", "get_unknown", key)
+
+ # If the default is specifically a lambda (or, rather, any function
+ # --but not a class that can be called), then it is actually a rule to
+ # generate the default that should be used.
+ default = defaults[0]
+ if issubclass(default, ContextDerivedValue):
+ return default(self)
+ else:
+ return default()
+
+ def _validate(self, key, value, is_template=False):
+ """Validates whether the key is allowed and if the value's type
+ matches.
+ """
+ stored_type, input_type, docs = self._allowed_variables.get(
+ key, (None, None, None)
+ )
+
+ if stored_type is None or not is_template and key in TEMPLATE_VARIABLES:
+ raise KeyError("global_ns", "set_unknown", key, value)
+
+ # If the incoming value is not the type we store, we try to convert
+ # it to that type. This relies on proper coercion rules existing. This
+ # is the responsibility of whoever defined the symbols: a type should
+ # not be in the allowed set if the constructor function for the stored
+ # type does not accept an instance of that type.
+ if not isinstance(value, (stored_type, input_type)):
+ raise ValueError("global_ns", "set_type", key, value, input_type)
+
+ return stored_type
+
+ def __setitem__(self, key, value):
+ stored_type = self._validate(key, value)
+
+ if not isinstance(value, stored_type):
+ if issubclass(stored_type, ContextDerivedValue):
+ value = stored_type(self, value)
+ else:
+ value = stored_type(value)
+
+ return KeyedDefaultDict.__setitem__(self, key, value)
+
+ def update(self, iterable={}, **kwargs):
+ """Like dict.update(), but using the context's setitem.
+
+ This function is transactional: if setitem fails for one of the values,
+ the context is not updated at all."""
+ if isinstance(iterable, dict):
+ iterable = iterable.items()
+
+ update = {}
+ for key, value in itertools.chain(iterable, kwargs.items()):
+ stored_type = self._validate(key, value)
+ # Don't create an instance of stored_type if coercion is needed,
+ # until all values are validated.
+ update[key] = (value, stored_type)
+ for key, (value, stored_type) in update.items():
+ if not isinstance(value, stored_type):
+ update[key] = stored_type(value)
+ else:
+ update[key] = value
+ KeyedDefaultDict.update(self, update)
+
+
+class TemplateContext(Context):
+ def __init__(self, template=None, allowed_variables={}, config=None):
+ self.template = template
+ super(TemplateContext, self).__init__(allowed_variables, config)
+
+ def _validate(self, key, value):
+ return Context._validate(self, key, value, True)
+
+
+class SubContext(Context, ContextDerivedValue):
+ """A Context derived from another Context.
+
+ Sub-contexts are intended to be used as context managers.
+
+ Sub-contexts inherit paths and other relevant state from the parent
+ context.
+ """
+
+ def __init__(self, parent):
+ assert isinstance(parent, Context)
+
+ Context.__init__(self, allowed_variables=self.VARIABLES, config=parent.config)
+
+ # Copy state from parent.
+ for p in parent.source_stack:
+ self.push_source(p)
+ self._sandbox = parent._sandbox
+
+ def __enter__(self):
+ if not self._sandbox or self._sandbox() is None:
+ raise Exception("a sandbox is required")
+
+ self._sandbox().push_subcontext(self)
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ self._sandbox().pop_subcontext(self)
+
+
+class InitializedDefines(ContextDerivedValue, OrderedDict):
+ def __init__(self, context, value=None):
+ OrderedDict.__init__(self)
+ for define in context.config.substs.get("MOZ_DEBUG_DEFINES", ()):
+ self[define] = 1
+ if value:
+ if not isinstance(value, OrderedDict):
+ raise ValueError("Can only initialize with another OrderedDict")
+ self.update(value)
+
+ def update(self, *other, **kwargs):
+ # Since iteration over non-ordered dicts is non-deterministic, this dict
+ # will be populated in an unpredictable order unless the argument to
+ # update() is also ordered. (It's important that we maintain this
+ # invariant so we can be sure that running `./mach build-backend` twice
+ # in a row without updating any files in the workspace generates exactly
+ # the same output.)
+ if kwargs:
+ raise ValueError("Cannot call update() with kwargs")
+ if other:
+ if not isinstance(other[0], OrderedDict):
+ raise ValueError("Can only call update() with another OrderedDict")
+ return super(InitializedDefines, self).update(*other, **kwargs)
+ raise ValueError("No arguments passed to update()")
+
+
+class BaseCompileFlags(ContextDerivedValue, dict):
+ def __init__(self, context):
+ self._context = context
+
+ klass_name = self.__class__.__name__
+ for k, v, build_vars in self.flag_variables:
+ if not isinstance(k, six.text_type):
+ raise ValueError("Flag %s for %s is not a string" % (k, klass_name))
+ if not isinstance(build_vars, tuple):
+ raise ValueError(
+ "Build variables `%s` for %s in %s is not a tuple"
+ % (build_vars, k, klass_name)
+ )
+
+ self._known_keys = set(k for k, v, _ in self.flag_variables)
+
+ # Providing defaults here doesn't play well with multiple templates
+ # modifying COMPILE_FLAGS from the same moz.build, because the merge
+ # done after the template runs can't tell which values coming from
+ # a template were set and which were provided as defaults.
+ template_name = getattr(context, "template", None)
+ if template_name in (None, "Gyp"):
+ dict.__init__(
+ self,
+ (
+ (k, v if v is None else TypedList(six.text_type)(v))
+ for k, v, _ in self.flag_variables
+ ),
+ )
+ else:
+ dict.__init__(self)
+
+
+class HostCompileFlags(BaseCompileFlags):
+ def __init__(self, context):
+ self._context = context
+ main_src_dir = mozpath.dirname(context.main_path)
+
+ self.flag_variables = (
+ (
+ "HOST_CXXFLAGS",
+ context.config.substs.get("HOST_CXXFLAGS"),
+ ("HOST_CXXFLAGS", "HOST_CXX_LDFLAGS"),
+ ),
+ (
+ "HOST_CFLAGS",
+ context.config.substs.get("HOST_CFLAGS"),
+ ("HOST_CFLAGS", "HOST_C_LDFLAGS"),
+ ),
+ (
+ "HOST_OPTIMIZE",
+ self._optimize_flags(),
+ ("HOST_CFLAGS", "HOST_CXXFLAGS", "HOST_C_LDFLAGS", "HOST_CXX_LDFLAGS"),
+ ),
+ ("RTL", None, ("HOST_CFLAGS", "HOST_C_LDFLAGS")),
+ ("HOST_DEFINES", None, ("HOST_CFLAGS", "HOST_CXXFLAGS")),
+ ("MOZBUILD_HOST_CFLAGS", [], ("HOST_CFLAGS", "HOST_C_LDFLAGS")),
+ ("MOZBUILD_HOST_CXXFLAGS", [], ("HOST_CXXFLAGS", "HOST_CXX_LDFLAGS")),
+ (
+ "BASE_INCLUDES",
+ ["-I%s" % main_src_dir, "-I%s" % context.objdir],
+ ("HOST_CFLAGS", "HOST_CXXFLAGS"),
+ ),
+ ("LOCAL_INCLUDES", None, ("HOST_CFLAGS", "HOST_CXXFLAGS")),
+ (
+ "EXTRA_INCLUDES",
+ ["-I%s/dist/include" % context.config.topobjdir],
+ ("HOST_CFLAGS", "HOST_CXXFLAGS"),
+ ),
+ (
+ "WARNINGS_CFLAGS",
+ context.config.substs.get("WARNINGS_HOST_CFLAGS"),
+ ("HOST_CFLAGS",),
+ ),
+ (
+ "WARNINGS_CXXFLAGS",
+ context.config.substs.get("WARNINGS_HOST_CXXFLAGS"),
+ ("HOST_CXXFLAGS",),
+ ),
+ )
+ BaseCompileFlags.__init__(self, context)
+
+ def _optimize_flags(self):
+ optimize_flags = []
+ if self._context.config.substs.get("CROSS_COMPILE"):
+ optimize_flags += self._context.config.substs.get("HOST_OPTIMIZE_FLAGS")
+ elif self._context.config.substs.get("MOZ_OPTIMIZE"):
+ optimize_flags += self._context.config.substs.get("MOZ_OPTIMIZE_FLAGS")
+ return optimize_flags
+
+
+class AsmFlags(BaseCompileFlags):
+ def __init__(self, context):
+ self._context = context
+ self.flag_variables = (
+ ("DEFINES", None, ("SFLAGS",)),
+ ("LIBRARY_DEFINES", None, ("SFLAGS",)),
+ ("OS", context.config.substs.get("ASFLAGS"), ("ASFLAGS", "SFLAGS")),
+ ("DEBUG", self._debug_flags(), ("ASFLAGS", "SFLAGS")),
+ ("LOCAL_INCLUDES", None, ("SFLAGS",)),
+ ("MOZBUILD", None, ("ASFLAGS", "SFLAGS")),
+ )
+ BaseCompileFlags.__init__(self, context)
+
+ def _debug_flags(self):
+ debug_flags = []
+ if self._context.config.substs.get(
+ "MOZ_DEBUG"
+ ) or self._context.config.substs.get("MOZ_DEBUG_SYMBOLS"):
+ if self._context.get("USE_NASM"):
+ if self._context.config.substs.get("OS_ARCH") == "WINNT":
+ debug_flags += ["-F", "cv8"]
+ elif self._context.config.substs.get("OS_ARCH") != "Darwin":
+ debug_flags += ["-F", "dwarf"]
+ elif (
+ self._context.config.substs.get("OS_ARCH") == "WINNT"
+ and self._context.config.substs.get("CPU_ARCH") == "aarch64"
+ ):
+ # armasm64 accepts a paucity of options compared to ml/ml64.
+ pass
+ else:
+ debug_flags += self._context.config.substs.get(
+ "MOZ_DEBUG_FLAGS", ""
+ ).split()
+ return debug_flags
+
+
+class LinkFlags(BaseCompileFlags):
+ def __init__(self, context):
+ self._context = context
+
+ self.flag_variables = (
+ ("OS", self._os_ldflags(), ("LDFLAGS",)),
+ (
+ "MOZ_HARDENING_LDFLAGS",
+ context.config.substs.get("MOZ_HARDENING_LDFLAGS"),
+ ("LDFLAGS",),
+ ),
+ ("DEFFILE", None, ("LDFLAGS",)),
+ ("MOZBUILD", None, ("LDFLAGS",)),
+ (
+ "FIX_LINK_PATHS",
+ context.config.substs.get("MOZ_FIX_LINK_PATHS"),
+ ("LDFLAGS",),
+ ),
+ (
+ "OPTIMIZE",
+ (
+ context.config.substs.get("MOZ_OPTIMIZE_LDFLAGS", [])
+ if context.config.substs.get("MOZ_OPTIMIZE")
+ else []
+ ),
+ ("LDFLAGS",),
+ ),
+ (
+ "CETCOMPAT",
+ (
+ context.config.substs.get("MOZ_CETCOMPAT_LDFLAGS")
+ if context.config.substs.get("NIGHTLY_BUILD")
+ else []
+ ),
+ ("LDFLAGS",),
+ ),
+ )
+ BaseCompileFlags.__init__(self, context)
+
+ def _os_ldflags(self):
+ flags = self._context.config.substs.get("OS_LDFLAGS", [])[:]
+
+ if self._context.config.substs.get(
+ "MOZ_DEBUG"
+ ) or self._context.config.substs.get("MOZ_DEBUG_SYMBOLS"):
+ flags += self._context.config.substs.get("MOZ_DEBUG_LDFLAGS", [])
+
+ # TODO: This is pretty convoluted, and isn't really a per-context thing,
+ # configure would be a better place to aggregate these.
+ if all(
+ [
+ self._context.config.substs.get("OS_ARCH") == "WINNT",
+ not self._context.config.substs.get("GNU_CC"),
+ not self._context.config.substs.get("MOZ_DEBUG"),
+ ]
+ ):
+
+ if self._context.config.substs.get("MOZ_OPTIMIZE"):
+ flags.append("-OPT:REF,ICF")
+
+ return flags
+
+
+class TargetCompileFlags(BaseCompileFlags):
+ """Base class that encapsulates some common logic between CompileFlags and
+ WasmCompileFlags.
+ """
+
+ def _debug_flags(self):
+ if self._context.config.substs.get(
+ "MOZ_DEBUG"
+ ) or self._context.config.substs.get("MOZ_DEBUG_SYMBOLS"):
+ return self._context.config.substs.get("MOZ_DEBUG_FLAGS", "").split()
+ return []
+
+ def _warnings_as_errors(self):
+ warnings_as_errors = self._context.config.substs.get("WARNINGS_AS_ERRORS")
+ if warnings_as_errors:
+ return [warnings_as_errors]
+
+ def _optimize_flags(self):
+ if not self._context.config.substs.get("MOZ_OPTIMIZE"):
+ return []
+ optimize_flags = None
+ if self._context.config.substs.get("MOZ_PGO"):
+ optimize_flags = self._context.config.substs.get("MOZ_PGO_OPTIMIZE_FLAGS")
+ if not optimize_flags:
+ # If MOZ_PGO_OPTIMIZE_FLAGS is empty we fall back to
+ # MOZ_OPTIMIZE_FLAGS. Presently this occurs on Windows.
+ optimize_flags = self._context.config.substs.get("MOZ_OPTIMIZE_FLAGS")
+ return optimize_flags
+
+ def __setitem__(self, key, value):
+ if key not in self._known_keys:
+ raise ValueError(
+ "Invalid value. `%s` is not a compile flags " "category." % key
+ )
+ if key in self and self[key] is None:
+ raise ValueError(
+ "`%s` may not be set in COMPILE_FLAGS from moz.build, this "
+ "value is resolved from the emitter." % key
+ )
+ if not (
+ isinstance(value, list)
+ and all(isinstance(v, six.string_types) for v in value)
+ ):
+ raise ValueError(
+ "A list of strings must be provided as a value for a compile "
+ "flags category."
+ )
+ dict.__setitem__(self, key, value)
+
+
+class CompileFlags(TargetCompileFlags):
+ def __init__(self, context):
+ main_src_dir = mozpath.dirname(context.main_path)
+ self._context = context
+
+ self.flag_variables = (
+ ("STL", context.config.substs.get("STL_FLAGS"), ("CXXFLAGS",)),
+ (
+ "VISIBILITY",
+ context.config.substs.get("VISIBILITY_FLAGS"),
+ ("CXXFLAGS", "CFLAGS"),
+ ),
+ (
+ "MOZ_HARDENING_CFLAGS",
+ context.config.substs.get("MOZ_HARDENING_CFLAGS"),
+ ("CXXFLAGS", "CFLAGS", "CXX_LDFLAGS", "C_LDFLAGS"),
+ ),
+ ("DEFINES", None, ("CXXFLAGS", "CFLAGS")),
+ ("LIBRARY_DEFINES", None, ("CXXFLAGS", "CFLAGS")),
+ (
+ "BASE_INCLUDES",
+ ["-I%s" % main_src_dir, "-I%s" % context.objdir],
+ ("CXXFLAGS", "CFLAGS"),
+ ),
+ ("LOCAL_INCLUDES", None, ("CXXFLAGS", "CFLAGS")),
+ (
+ "EXTRA_INCLUDES",
+ ["-I%s/dist/include" % context.config.topobjdir],
+ ("CXXFLAGS", "CFLAGS"),
+ ),
+ (
+ "OS_INCLUDES",
+ list(
+ itertools.chain(
+ *(
+ context.config.substs.get(v, [])
+ for v in (
+ "NSPR_CFLAGS",
+ "NSS_CFLAGS",
+ "MOZ_JPEG_CFLAGS",
+ "MOZ_PNG_CFLAGS",
+ "MOZ_ZLIB_CFLAGS",
+ "MOZ_PIXMAN_CFLAGS",
+ "MOZ_ICU_CFLAGS",
+ )
+ )
+ )
+ ),
+ ("CXXFLAGS", "CFLAGS"),
+ ),
+ ("RTL", None, ("CXXFLAGS", "CFLAGS")),
+ (
+ "OS_COMPILE_CFLAGS",
+ context.config.substs.get("OS_COMPILE_CFLAGS"),
+ ("CFLAGS",),
+ ),
+ (
+ "OS_COMPILE_CXXFLAGS",
+ context.config.substs.get("OS_COMPILE_CXXFLAGS"),
+ ("CXXFLAGS",),
+ ),
+ (
+ "OS_CPPFLAGS",
+ context.config.substs.get("OS_CPPFLAGS"),
+ ("CXXFLAGS", "CFLAGS", "CXX_LDFLAGS", "C_LDFLAGS"),
+ ),
+ (
+ "OS_CFLAGS",
+ context.config.substs.get("OS_CFLAGS"),
+ ("CFLAGS", "C_LDFLAGS"),
+ ),
+ (
+ "OS_CXXFLAGS",
+ context.config.substs.get("OS_CXXFLAGS"),
+ ("CXXFLAGS", "CXX_LDFLAGS"),
+ ),
+ (
+ "DEBUG",
+ self._debug_flags(),
+ ("CFLAGS", "CXXFLAGS", "CXX_LDFLAGS", "C_LDFLAGS"),
+ ),
+ (
+ "CLANG_PLUGIN",
+ context.config.substs.get("CLANG_PLUGIN_FLAGS"),
+ ("CFLAGS", "CXXFLAGS", "CXX_LDFLAGS", "C_LDFLAGS"),
+ ),
+ (
+ "OPTIMIZE",
+ self._optimize_flags(),
+ ("CFLAGS", "CXXFLAGS", "CXX_LDFLAGS", "C_LDFLAGS"),
+ ),
+ (
+ "FRAMEPTR",
+ context.config.substs.get("MOZ_FRAMEPTR_FLAGS"),
+ ("CFLAGS", "CXXFLAGS", "CXX_LDFLAGS", "C_LDFLAGS"),
+ ),
+ (
+ "WARNINGS_AS_ERRORS",
+ self._warnings_as_errors(),
+ ("CXXFLAGS", "CFLAGS", "CXX_LDFLAGS", "C_LDFLAGS"),
+ ),
+ (
+ "WARNINGS_CFLAGS",
+ context.config.substs.get("WARNINGS_CFLAGS"),
+ ("CFLAGS",),
+ ),
+ (
+ "WARNINGS_CXXFLAGS",
+ context.config.substs.get("WARNINGS_CXXFLAGS"),
+ ("CXXFLAGS",),
+ ),
+ ("MOZBUILD_CFLAGS", None, ("CFLAGS",)),
+ ("MOZBUILD_CXXFLAGS", None, ("CXXFLAGS",)),
+ (
+ "COVERAGE",
+ context.config.substs.get("COVERAGE_CFLAGS"),
+ ("CXXFLAGS", "CFLAGS"),
+ ),
+ (
+ "PASS_MANAGER",
+ context.config.substs.get("MOZ_PASS_MANAGER_FLAGS"),
+ ("CXXFLAGS", "CFLAGS"),
+ ),
+ (
+ "FILE_PREFIX_MAP",
+ context.config.substs.get("MOZ_FILE_PREFIX_MAP_FLAGS"),
+ ("CXXFLAGS", "CFLAGS"),
+ ),
+ (
+ # See bug 414641
+ "NO_STRICT_ALIASING",
+ ["-fno-strict-aliasing"],
+ ("CXXFLAGS", "CFLAGS"),
+ ),
+ (
+ # Disable floating-point contraction by default.
+ "FP_CONTRACT",
+ (
+ ["-Xclang"]
+ if context.config.substs.get("CC_TYPE") == "clang-cl"
+ else []
+ )
+ + ["-ffp-contract=off"],
+ ("CXXFLAGS", "CFLAGS"),
+ ),
+ )
+
+ TargetCompileFlags.__init__(self, context)
+
+
+class WasmFlags(TargetCompileFlags):
+ def __init__(self, context):
+ main_src_dir = mozpath.dirname(context.main_path)
+ self._context = context
+
+ self.flag_variables = (
+ ("LIBRARY_DEFINES", None, ("WASM_CXXFLAGS", "WASM_CFLAGS")),
+ (
+ "BASE_INCLUDES",
+ ["-I%s" % main_src_dir, "-I%s" % context.objdir],
+ ("WASM_CXXFLAGS", "WASM_CFLAGS"),
+ ),
+ ("LOCAL_INCLUDES", None, ("WASM_CXXFLAGS", "WASM_CFLAGS")),
+ (
+ "EXTRA_INCLUDES",
+ ["-I%s/dist/include" % context.config.topobjdir],
+ ("WASM_CXXFLAGS", "WASM_CFLAGS"),
+ ),
+ (
+ "OS_INCLUDES",
+ list(
+ itertools.chain(
+ *(
+ context.config.substs.get(v, [])
+ for v in (
+ "NSPR_CFLAGS",
+ "NSS_CFLAGS",
+ "MOZ_JPEG_CFLAGS",
+ "MOZ_PNG_CFLAGS",
+ "MOZ_ZLIB_CFLAGS",
+ "MOZ_PIXMAN_CFLAGS",
+ )
+ )
+ )
+ ),
+ ("WASM_CXXFLAGS", "WASM_CFLAGS"),
+ ),
+ ("DEBUG", self._debug_flags(), ("WASM_CFLAGS", "WASM_CXXFLAGS")),
+ (
+ "CLANG_PLUGIN",
+ context.config.substs.get("CLANG_PLUGIN_FLAGS"),
+ ("WASM_CFLAGS", "WASM_CXXFLAGS"),
+ ),
+ ("OPTIMIZE", self._optimize_flags(), ("WASM_CFLAGS", "WASM_CXXFLAGS")),
+ (
+ "WARNINGS_AS_ERRORS",
+ self._warnings_as_errors(),
+ ("WASM_CXXFLAGS", "WASM_CFLAGS"),
+ ),
+ ("MOZBUILD_CFLAGS", None, ("WASM_CFLAGS",)),
+ ("MOZBUILD_CXXFLAGS", None, ("WASM_CXXFLAGS",)),
+ ("WASM_CFLAGS", context.config.substs.get("WASM_CFLAGS"), ("WASM_CFLAGS",)),
+ (
+ "WASM_CXXFLAGS",
+ context.config.substs.get("WASM_CXXFLAGS"),
+ ("WASM_CXXFLAGS",),
+ ),
+ ("WASM_DEFINES", None, ("WASM_CFLAGS", "WASM_CXXFLAGS")),
+ ("MOZBUILD_WASM_CFLAGS", None, ("WASM_CFLAGS",)),
+ ("MOZBUILD_WASM_CXXFLAGS", None, ("WASM_CXXFLAGS",)),
+ (
+ "NEWPM",
+ context.config.substs.get("MOZ_NEW_PASS_MANAGER_FLAGS"),
+ ("WASM_CFLAGS", "WASM_CXXFLAGS"),
+ ),
+ (
+ "FILE_PREFIX_MAP",
+ context.config.substs.get("MOZ_FILE_PREFIX_MAP_FLAGS"),
+ ("WASM_CFLAGS", "WASM_CXXFLAGS"),
+ ),
+ ("STL", context.config.substs.get("STL_FLAGS"), ("WASM_CXXFLAGS",)),
+ )
+
+ TargetCompileFlags.__init__(self, context)
+
+ def _debug_flags(self):
+ substs = self._context.config.substs
+ if substs.get("MOZ_DEBUG") or substs.get("MOZ_DEBUG_SYMBOLS"):
+ return ["-g"]
+ return []
+
+ def _optimize_flags(self):
+ if not self._context.config.substs.get("MOZ_OPTIMIZE"):
+ return []
+
+ # We don't want `MOZ_{PGO_,}OPTIMIZE_FLAGS here because they may contain
+ # optimization flags that aren't suitable for wasm (e.g. -freorder-blocks).
+ # Just optimize for size in all cases; we may want to make this
+ # configurable.
+ return ["-Os"]
+
+
+class FinalTargetValue(ContextDerivedValue, six.text_type):
+ def __new__(cls, context, value=""):
+ if not value:
+ value = "dist/"
+ if context["XPI_NAME"]:
+ value += "xpi-stage/" + context["XPI_NAME"]
+ else:
+ value += "bin"
+ if context["DIST_SUBDIR"]:
+ value += "/" + context["DIST_SUBDIR"]
+ return six.text_type.__new__(cls, value)
+
+
+def Enum(*values):
+ assert len(values)
+ default = values[0]
+
+ class EnumClass(object):
+ def __new__(cls, value=None):
+ if value is None:
+ return default
+ if value in values:
+ return value
+ raise ValueError(
+ "Invalid value. Allowed values are: %s"
+ % ", ".join(repr(v) for v in values)
+ )
+
+ return EnumClass
+
+
+class PathMeta(type):
+ """Meta class for the Path family of classes.
+
+ It handles calling __new__ with the right arguments in cases where a Path
+ is instantiated with another instance of Path instead of having received a
+ context.
+
+ It also makes Path(context, value) instantiate one of the
+ subclasses depending on the value, allowing callers to do
+ standard type checking (isinstance(path, ObjDirPath)) instead
+ of checking the value itself (path.startswith('!')).
+ """
+
+ def __call__(cls, context, value=None):
+ if isinstance(context, Path):
+ assert value is None
+ value = context
+ context = context.context
+ else:
+ assert isinstance(context, Context)
+ if isinstance(value, Path):
+ context = value.context
+ if not issubclass(cls, (SourcePath, ObjDirPath, AbsolutePath)):
+ if value.startswith("!"):
+ cls = ObjDirPath
+ elif value.startswith("%"):
+ cls = AbsolutePath
+ else:
+ cls = SourcePath
+ return super(PathMeta, cls).__call__(context, value)
+
+
+class Path(six.with_metaclass(PathMeta, ContextDerivedValue, six.text_type)):
+ """Stores and resolves a source path relative to a given context
+
+ This class is used as a backing type for some of the sandbox variables.
+ It expresses paths relative to a context. Supported paths are:
+ - '/topsrcdir/relative/paths'
+ - 'srcdir/relative/paths'
+ - '!/topobjdir/relative/paths'
+ - '!objdir/relative/paths'
+ - '%/filesystem/absolute/paths'
+ """
+
+ def __new__(cls, context, value=None):
+ self = super(Path, cls).__new__(cls, value)
+ self.context = context
+ self.srcdir = context.srcdir
+ return self
+
+ def join(self, *p):
+ """ContextDerived equivalent of `mozpath.join(self, *p)`, returning a
+ new Path instance.
+ """
+ return Path(self.context, mozpath.join(self, *p))
+
+ def __cmp__(self, other):
+ # We expect this function to never be called to avoid issues in the
+ # switch from Python 2 to 3.
+ raise AssertionError()
+
+ def _cmp(self, other, op):
+ if isinstance(other, Path) and self.srcdir != other.srcdir:
+ return op(self.full_path, other.full_path)
+ return op(six.text_type(self), other)
+
+ def __eq__(self, other):
+ return self._cmp(other, operator.eq)
+
+ def __ne__(self, other):
+ return self._cmp(other, operator.ne)
+
+ def __lt__(self, other):
+ return self._cmp(other, operator.lt)
+
+ def __gt__(self, other):
+ return self._cmp(other, operator.gt)
+
+ def __le__(self, other):
+ return self._cmp(other, operator.le)
+
+ def __ge__(self, other):
+ return self._cmp(other, operator.ge)
+
+ def __repr__(self):
+ return "<%s (%s)%s>" % (self.__class__.__name__, self.srcdir, self)
+
+ def __hash__(self):
+ return hash(self.full_path)
+
+ @memoized_property
+ def target_basename(self):
+ return mozpath.basename(self.full_path)
+
+
+class SourcePath(Path):
+ """Like Path, but limited to paths in the source directory."""
+
+ def __new__(cls, context, value=None):
+ if value.startswith("!"):
+ raise ValueError(f'Object directory paths are not allowed\nPath: "{value}"')
+ if value.startswith("%"):
+ raise ValueError(
+ f'Filesystem absolute paths are not allowed\nPath: "{value}"'
+ )
+ self = super(SourcePath, cls).__new__(cls, context, value)
+
+ if value.startswith("/"):
+ path = None
+ if not path or not os.path.exists(path):
+ path = mozpath.join(context.config.topsrcdir, value[1:])
+ else:
+ path = mozpath.join(self.srcdir, value)
+ self.full_path = mozpath.normpath(path)
+ return self
+
+ @memoized_property
+ def translated(self):
+ """Returns the corresponding path in the objdir.
+
+ Ideally, we wouldn't need this function, but the fact that both source
+ path under topsrcdir and the external source dir end up mixed in the
+ objdir (aka pseudo-rework), this is needed.
+ """
+ return ObjDirPath(self.context, "!%s" % self).full_path
+
+
+class RenamedSourcePath(SourcePath):
+ """Like SourcePath, but with a different base name when installed.
+
+ The constructor takes a tuple of (source, target_basename).
+
+ This class is not meant to be exposed to moz.build sandboxes as of now,
+ and is not supported by the RecursiveMake backend.
+ """
+
+ def __new__(cls, context, value):
+ assert isinstance(value, tuple)
+ source, target_basename = value
+ self = super(RenamedSourcePath, cls).__new__(cls, context, source)
+ self._target_basename = target_basename
+ return self
+
+ @property
+ def target_basename(self):
+ return self._target_basename
+
+
+class ObjDirPath(Path):
+ """Like Path, but limited to paths in the object directory."""
+
+ def __new__(cls, context, value=None):
+ if not value.startswith("!"):
+ raise ValueError("Object directory paths must start with ! prefix")
+ self = super(ObjDirPath, cls).__new__(cls, context, value)
+
+ if value.startswith("!/"):
+ path = mozpath.join(context.config.topobjdir, value[2:])
+ else:
+ path = mozpath.join(context.objdir, value[1:])
+ self.full_path = mozpath.normpath(path)
+ return self
+
+
+class AbsolutePath(Path):
+ """Like Path, but allows arbitrary paths outside the source and object directories."""
+
+ def __new__(cls, context, value=None):
+ if not value.startswith("%"):
+ raise ValueError("Absolute paths must start with % prefix")
+ if not os.path.isabs(value[1:]):
+ raise ValueError("Path '%s' is not absolute" % value[1:])
+ self = super(AbsolutePath, cls).__new__(cls, context, value)
+ self.full_path = mozpath.normpath(value[1:])
+ return self
+
+
+@memoize
+def ContextDerivedTypedList(klass, base_class=List):
+ """Specialized TypedList for use with ContextDerivedValue types."""
+ assert issubclass(klass, ContextDerivedValue)
+
+ class _TypedList(ContextDerivedValue, TypedList(klass, base_class)):
+ def __init__(self, context, iterable=[], **kwargs):
+ self.context = context
+ super(_TypedList, self).__init__(iterable, **kwargs)
+
+ def normalize(self, e):
+ if not isinstance(e, klass):
+ e = klass(self.context, e)
+ return e
+
+ return _TypedList
+
+
+@memoize
+def ContextDerivedTypedListWithItems(type, base_class=List):
+ """Specialized TypedList for use with ContextDerivedValue types."""
+
+ class _TypedListWithItems(ContextDerivedTypedList(type, base_class)):
+ def __getitem__(self, name):
+ name = self.normalize(name)
+ return super(_TypedListWithItems, self).__getitem__(name)
+
+ return _TypedListWithItems
+
+
+@memoize
+def ContextDerivedTypedRecord(*fields):
+ """Factory for objects with certain properties and dynamic
+ type checks.
+
+ This API is extremely similar to the TypedNamedTuple API,
+ except that properties may be mutated. This supports syntax like:
+
+ .. code-block:: python
+
+ VARIABLE_NAME.property += [
+ 'item1',
+ 'item2',
+ ]
+ """
+
+ class _TypedRecord(ContextDerivedValue):
+ __slots__ = tuple([name for name, _ in fields])
+
+ def __init__(self, context):
+ for fname, ftype in self._fields.items():
+ if issubclass(ftype, ContextDerivedValue):
+ setattr(self, fname, self._fields[fname](context))
+ else:
+ setattr(self, fname, self._fields[fname]())
+
+ def __setattr__(self, name, value):
+ if name in self._fields and not isinstance(value, self._fields[name]):
+ value = self._fields[name](value)
+ object.__setattr__(self, name, value)
+
+ _TypedRecord._fields = dict(fields)
+ return _TypedRecord
+
+
+class Schedules(object):
+ """Similar to a ContextDerivedTypedRecord, but with different behavior
+ for the properties:
+
+ * VAR.inclusive can only be appended to (+=), and can only contain values
+ from mozbuild.schedules.INCLUSIVE_COMPONENTS
+
+ * VAR.exclusive can only be assigned to (no +=), and can only contain
+ values from mozbuild.schedules.ALL_COMPONENTS
+ """
+
+ __slots__ = ("_exclusive", "_inclusive")
+
+ def __init__(self, inclusive=None, exclusive=None):
+ if inclusive is None:
+ self._inclusive = TypedList(Enum(*schedules.INCLUSIVE_COMPONENTS))()
+ else:
+ self._inclusive = inclusive
+ if exclusive is None:
+ self._exclusive = ImmutableStrictOrderingOnAppendList(
+ schedules.EXCLUSIVE_COMPONENTS
+ )
+ else:
+ self._exclusive = exclusive
+
+ # inclusive is mutable but cannot be assigned to (+= only)
+ @property
+ def inclusive(self):
+ return self._inclusive
+
+ @inclusive.setter
+ def inclusive(self, value):
+ if value is not self._inclusive:
+ raise AttributeError("Cannot assign to this value - use += instead")
+ unexpected = [v for v in value if v not in schedules.INCLUSIVE_COMPONENTS]
+ if unexpected:
+ raise Exception(
+ "unexpected inclusive component(s) " + ", ".join(unexpected)
+ )
+
+ # exclusive is immutable but can be set (= only)
+ @property
+ def exclusive(self):
+ return self._exclusive
+
+ @exclusive.setter
+ def exclusive(self, value):
+ if not isinstance(value, (tuple, list)):
+ raise Exception("expected a tuple or list")
+ unexpected = [v for v in value if v not in schedules.ALL_COMPONENTS]
+ if unexpected:
+ raise Exception(
+ "unexpected exclusive component(s) " + ", ".join(unexpected)
+ )
+ self._exclusive = ImmutableStrictOrderingOnAppendList(sorted(value))
+
+ # components provides a synthetic summary of all components
+ @property
+ def components(self):
+ return list(sorted(set(self._inclusive) | set(self._exclusive)))
+
+ # The `Files` context uses | to combine SCHEDULES from multiple levels; at this
+ # point the immutability is no longer needed so we use plain lists
+ def __or__(self, other):
+ inclusive = self._inclusive + other._inclusive
+ if other._exclusive == self._exclusive:
+ exclusive = self._exclusive
+ elif self._exclusive == schedules.EXCLUSIVE_COMPONENTS:
+ exclusive = other._exclusive
+ elif other._exclusive == schedules.EXCLUSIVE_COMPONENTS:
+ exclusive = self._exclusive
+ else:
+ # in a case where two SCHEDULES.exclusive set different values, take
+ # the later one; this acts the way we expect assignment to work.
+ exclusive = other._exclusive
+ return Schedules(inclusive=inclusive, exclusive=exclusive)
+
+
+@memoize
+def ContextDerivedTypedHierarchicalStringList(type):
+ """Specialized HierarchicalStringList for use with ContextDerivedValue
+ types."""
+
+ class _TypedListWithItems(ContextDerivedValue, HierarchicalStringList):
+ __slots__ = ("_strings", "_children", "_context")
+
+ def __init__(self, context):
+ self._strings = ContextDerivedTypedList(type, StrictOrderingOnAppendList)(
+ context
+ )
+ self._children = {}
+ self._context = context
+
+ def _get_exportvariable(self, name):
+ child = self._children.get(name)
+ if not child:
+ child = self._children[name] = _TypedListWithItems(self._context)
+ return child
+
+ return _TypedListWithItems
+
+
+def OrderedPathListWithAction(action):
+ """Returns a class which behaves as a StrictOrderingOnAppendList, but
+ invokes the given callable with each input and a context as it is
+ read, storing a tuple including the result and the original item.
+
+ This used to extend moz.build reading to make more data available in
+ filesystem-reading mode.
+ """
+
+ class _OrderedListWithAction(
+ ContextDerivedTypedList(SourcePath, StrictOrderingOnAppendListWithAction)
+ ):
+ def __init__(self, context, *args):
+ def _action(item):
+ return item, action(context, item)
+
+ super(_OrderedListWithAction, self).__init__(context, action=_action, *args)
+
+ return _OrderedListWithAction
+
+
+ManifestparserManifestList = OrderedPathListWithAction(read_manifestparser_manifest)
+ReftestManifestList = OrderedPathListWithAction(read_reftest_manifest)
+
+BugzillaComponent = TypedNamedTuple(
+ "BugzillaComponent", [("product", six.text_type), ("component", six.text_type)]
+)
+SchedulingComponents = ContextDerivedTypedRecord(
+ ("inclusive", TypedList(six.text_type, StrictOrderingOnAppendList)),
+ ("exclusive", TypedList(six.text_type, StrictOrderingOnAppendList)),
+)
+
+GeneratedFilesList = StrictOrderingOnAppendListWithFlagsFactory(
+ {"script": six.text_type, "inputs": list, "force": bool, "flags": list}
+)
+
+
+class Files(SubContext):
+ """Metadata attached to files.
+
+ It is common to want to annotate files with metadata, such as which
+ Bugzilla component tracks issues with certain files. This sub-context is
+ where we stick that metadata.
+
+ The argument to this sub-context is a file matching pattern that is applied
+ against the host file's directory. If the pattern matches a file whose info
+ is currently being sought, the metadata attached to this instance will be
+ applied to that file.
+
+ Patterns are collections of filename characters with ``/`` used as the
+ directory separate (UNIX-style paths) and ``*`` and ``**`` used to denote
+ wildcard matching.
+
+ Patterns without the ``*`` character are literal matches and will match at
+ most one entity.
+
+ Patterns with ``*`` or ``**`` are wildcard matches. ``*`` matches files
+ at least within a single directory. ``**`` matches files across several
+ directories.
+
+ ``foo.html``
+ Will match only the ``foo.html`` file in the current directory.
+ ``*.jsm``
+ Will match all ``.jsm`` files in the current directory.
+ ``**/*.cpp``
+ Will match all ``.cpp`` files in this and all child directories.
+ ``foo/*.css``
+ Will match all ``.css`` files in the ``foo/`` directory.
+ ``bar/*``
+ Will match all files in the ``bar/`` directory and all of its
+ children directories.
+ ``bar/**``
+ This is equivalent to ``bar/*`` above.
+ ``bar/**/foo``
+ Will match all ``foo`` files in the ``bar/`` directory and all of its
+ children directories.
+
+ The difference in behavior between ``*`` and ``**`` is only evident if
+ a pattern follows the ``*`` or ``**``. A pattern ending with ``*`` is
+ greedy. ``**`` is needed when you need an additional pattern after the
+ wildcard. e.g. ``**/foo``.
+ """
+
+ VARIABLES = {
+ "BUG_COMPONENT": (
+ BugzillaComponent,
+ tuple,
+ """The bug component that tracks changes to these files.
+
+ Values are a 2-tuple of unicode describing the Bugzilla product and
+ component. e.g. ``('Firefox Build System', 'General')``.
+ """,
+ ),
+ "FINAL": (
+ bool,
+ bool,
+ """Mark variable assignments as finalized.
+
+ During normal processing, values from newer Files contexts
+ overwrite previously set values. Last write wins. This behavior is
+ not always desired. ``FINAL`` provides a mechanism to prevent
+ further updates to a variable.
+
+ When ``FINAL`` is set, the value of all variables defined in this
+ context are marked as frozen and all subsequent writes to them
+ are ignored during metadata reading.
+
+ See :ref:`mozbuild_files_metadata_finalizing` for more info.
+ """,
+ ),
+ "SCHEDULES": (
+ Schedules,
+ list,
+ """Maps source files to the CI tasks that should be scheduled when
+ they change. The tasks are grouped by named components, and those
+ names appear again in the taskgraph configuration
+ `($topsrcdir/taskgraph/).
+
+ Some components are "inclusive", meaning that changes to most files
+ do not schedule them, aside from those described in a Files
+ subcontext. For example, py-lint tasks need not be scheduled for
+ most changes, but should be scheduled when any Python file changes.
+ Such components are named by appending to `SCHEDULES.inclusive`:
+
+ with Files('**.py'):
+ SCHEDULES.inclusive += ['py-lint']
+
+ Other components are 'exclusive', meaning that changes to most
+ files schedule them, but some files affect only one or two
+ components. For example, most files schedule builds and tests of
+ Firefox for Android, OS X, Windows, and Linux, but files under
+ `mobile/android/` affect Android builds and tests exclusively, so
+ builds for other operating systems are not needed. Test suites
+ provide another example: most files schedule reftests, but changes
+ to reftest scripts need only schedule reftests and no other suites.
+
+ Exclusive components are named by setting `SCHEDULES.exclusive`:
+
+ with Files('mobile/android/**'):
+ SCHEDULES.exclusive = ['android']
+ """,
+ ),
+ }
+
+ def __init__(self, parent, *patterns):
+ super(Files, self).__init__(parent)
+ self.patterns = patterns
+ self.finalized = set()
+
+ def __iadd__(self, other):
+ assert isinstance(other, Files)
+
+ for k, v in other.items():
+ if k == "SCHEDULES" and "SCHEDULES" in self:
+ self["SCHEDULES"] = self["SCHEDULES"] | v
+ continue
+
+ # Ignore updates to finalized flags.
+ if k in self.finalized:
+ continue
+
+ # Only finalize variables defined in this instance.
+ if k == "FINAL":
+ self.finalized |= set(other) - {"FINAL"}
+ continue
+
+ self[k] = v
+
+ return self
+
+ def asdict(self):
+ """Return this instance as a dict with built-in data structures.
+
+ Call this to obtain an object suitable for serializing.
+ """
+ d = {}
+ if "BUG_COMPONENT" in self:
+ bc = self["BUG_COMPONENT"]
+ d["bug_component"] = (bc.product, bc.component)
+
+ return d
+
+ @staticmethod
+ def aggregate(files):
+ """Given a mapping of path to Files, obtain aggregate results.
+
+ Consumers may want to extract useful information from a collection of
+ Files describing paths. e.g. given the files info data for N paths,
+ recommend a single bug component based on the most frequent one. This
+ function provides logic for deriving aggregate knowledge from a
+ collection of path File metadata.
+
+ Note: the intent of this function is to operate on the result of
+ :py:func:`mozbuild.frontend.reader.BuildReader.files_info`. The
+ :py:func:`mozbuild.frontend.context.Files` instances passed in are
+ thus the "collapsed" (``__iadd__``ed) results of all ``Files`` from all
+ moz.build files relevant to a specific path, not individual ``Files``
+ instances from a single moz.build file.
+ """
+ d = {}
+
+ bug_components = Counter()
+
+ for f in files.values():
+ bug_component = f.get("BUG_COMPONENT")
+ if bug_component:
+ bug_components[bug_component] += 1
+
+ d["bug_component_counts"] = []
+ for c, count in bug_components.most_common():
+ component = (c.product, c.component)
+ d["bug_component_counts"].append((c, count))
+
+ if "recommended_bug_component" not in d:
+ d["recommended_bug_component"] = component
+ recommended_count = count
+ elif count == recommended_count:
+ # Don't recommend a component if it doesn't have a clear lead.
+ d["recommended_bug_component"] = None
+
+ # In case no bug components.
+ d.setdefault("recommended_bug_component", None)
+
+ return d
+
+
+# This defines functions that create sub-contexts.
+#
+# Values are classes that are SubContexts. The class name will be turned into
+# a function that when called emits an instance of that class.
+#
+# Arbitrary arguments can be passed to the class constructor. The first
+# argument is always the parent context. It is up to each class to perform
+# argument validation.
+SUBCONTEXTS = [Files]
+
+for cls in SUBCONTEXTS:
+ if not issubclass(cls, SubContext):
+ raise ValueError("SUBCONTEXTS entry not a SubContext class: %s" % cls)
+
+ if not hasattr(cls, "VARIABLES"):
+ raise ValueError("SUBCONTEXTS entry does not have VARIABLES: %s" % cls)
+
+SUBCONTEXTS = {cls.__name__: cls for cls in SUBCONTEXTS}
+
+
+# This defines the set of mutable global variables.
+#
+# Each variable is a tuple of:
+#
+# (storage_type, input_types, docs)
+
+VARIABLES = {
+ "SOURCES": (
+ ContextDerivedTypedListWithItems(
+ Path,
+ StrictOrderingOnAppendListWithFlagsFactory({"no_pgo": bool, "flags": List}),
+ ),
+ list,
+ """Source code files.
+
+ This variable contains a list of source code files to compile.
+ Accepts assembler, C, C++, Objective C/C++.
+ """,
+ ),
+ "FILES_PER_UNIFIED_FILE": (
+ int,
+ int,
+ """The number of source files to compile into each unified source file.
+
+ """,
+ ),
+ "IS_RUST_LIBRARY": (
+ bool,
+ bool,
+ """Whether the current library defined by this moz.build is built by Rust.
+
+ The library defined by this moz.build should have a build definition in
+ a Cargo.toml file that exists in this moz.build's directory.
+ """,
+ ),
+ "IS_GKRUST": (
+ bool,
+ bool,
+ """Whether the current library defined by this moz.build is gkrust.
+
+ Indicates whether the current library contains rust for libxul.
+ """,
+ ),
+ "RUST_LIBRARY_FEATURES": (
+ List,
+ list,
+ """Cargo features to activate for this library.
+
+ This variable should not be used directly; you should be using the
+ RustLibrary template instead.
+ """,
+ ),
+ "HOST_RUST_LIBRARY_FEATURES": (
+ List,
+ list,
+ """Cargo features to activate for this host library.
+
+ This variable should not be used directly; you should be using the
+ HostRustLibrary template instead.
+ """,
+ ),
+ "RUST_TESTS": (
+ TypedList(six.text_type),
+ list,
+ """Names of Rust tests to build and run via `cargo test`.
+ """,
+ ),
+ "RUST_TEST_FEATURES": (
+ TypedList(six.text_type),
+ list,
+ """Cargo features to activate for RUST_TESTS.
+ """,
+ ),
+ "UNIFIED_SOURCES": (
+ ContextDerivedTypedList(Path, StrictOrderingOnAppendList),
+ list,
+ """Source code files that can be compiled together.
+
+ This variable contains a list of source code files to compile,
+ that can be concatenated all together and built as a single source
+ file. This can help make the build faster and reduce the debug info
+ size.
+ """,
+ ),
+ "GENERATED_FILES": (
+ GeneratedFilesList,
+ list,
+ """Generic generated files.
+
+ Unless you have a reason not to, use the GeneratedFile template rather
+ than referencing GENERATED_FILES directly. The GeneratedFile template
+ has all the same arguments as the attributes listed below (``script``,
+ ``inputs``, ``flags``, ``force``), plus an additional ``entry_point``
+ argument to specify a particular function to run in the given script.
+
+ This variable contains a list of files for the build system to
+ generate at export time. The generation method may be declared
+ with optional ``script``, ``inputs``, ``flags``, and ``force``
+ attributes on individual entries.
+ If the optional ``script`` attribute is not present on an entry, it
+ is assumed that rules for generating the file are present in
+ the associated Makefile.in.
+
+ Example::
+
+ GENERATED_FILES += ['bar.c', 'baz.c', 'foo.c']
+ bar = GENERATED_FILES['bar.c']
+ bar.script = 'generate.py'
+ bar.inputs = ['datafile-for-bar']
+ foo = GENERATED_FILES['foo.c']
+ foo.script = 'generate.py'
+ foo.inputs = ['datafile-for-foo']
+
+ This definition will generate bar.c by calling the main method of
+ generate.py with a open (for writing) file object for bar.c, and
+ the string ``datafile-for-bar``. In a similar fashion, the main
+ method of generate.py will also be called with an open
+ (for writing) file object for foo.c and the string
+ ``datafile-for-foo``. Please note that only string arguments are
+ supported for passing to scripts, and that all arguments provided
+ to the script should be filenames relative to the directory in which
+ the moz.build file is located.
+
+ To enable using the same script for generating multiple files with
+ slightly different non-filename parameters, alternative entry points
+ into ``script`` can be specified::
+
+ GENERATED_FILES += ['bar.c']
+ bar = GENERATED_FILES['bar.c']
+ bar.script = 'generate.py:make_bar'
+
+ The chosen script entry point may optionally return a set of strings,
+ indicating extra files the output depends on.
+
+ When the ``flags`` attribute is present, the given list of flags is
+ passed as extra arguments following the inputs.
+
+ When the ``force`` attribute is present, the file is generated every
+ build, regardless of whether it is stale. This is special to the
+ RecursiveMake backend and intended for special situations only (e.g.,
+ localization). Please consult a build peer (on the #build channel at
+ https://chat.mozilla.org) before using ``force``.
+ """,
+ ),
+ "DEFINES": (
+ InitializedDefines,
+ dict,
+ """Dictionary of compiler defines to declare.
+
+ These are passed in to the compiler as ``-Dkey='value'`` for string
+ values, ``-Dkey=value`` for numeric values, or ``-Dkey`` if the
+ value is True. Note that for string values, the outer-level of
+ single-quotes will be consumed by the shell. If you want to have
+ a string-literal in the program, the value needs to have
+ double-quotes.
+
+ Example::
+
+ DEFINES['NS_NO_XPCOM'] = True
+ DEFINES['MOZ_EXTENSIONS_DB_SCHEMA'] = 15
+ DEFINES['DLL_SUFFIX'] = '".so"'
+
+ This will result in the compiler flags ``-DNS_NO_XPCOM``,
+ ``-DMOZ_EXTENSIONS_DB_SCHEMA=15``, and ``-DDLL_SUFFIX='".so"'``,
+ respectively.
+
+ Note that these entries are not necessarily passed to the assembler.
+ Whether they are depends on the type of assembly file. As an
+ alternative, you may add a ``-DKEY=value`` entry to ``ASFLAGS``.
+ """,
+ ),
+ "DELAYLOAD_DLLS": (
+ List,
+ list,
+ """Delay-loaded DLLs.
+
+ This variable contains a list of DLL files which the module being linked
+ should load lazily. This only has an effect when building with MSVC.
+ """,
+ ),
+ "DIRS": (
+ ContextDerivedTypedList(SourcePath),
+ list,
+ """Child directories to descend into looking for build frontend files.
+
+ This works similarly to the ``DIRS`` variable in make files. Each str
+ value in the list is the name of a child directory. When this file is
+ done parsing, the build reader will descend into each listed directory
+ and read the frontend file there. If there is no frontend file, an error
+ is raised.
+
+ Values are relative paths. They can be multiple directory levels
+ above or below. Use ``..`` for parent directories and ``/`` for path
+ delimiters.
+ """,
+ ),
+ "FINAL_TARGET_FILES": (
+ ContextDerivedTypedHierarchicalStringList(Path),
+ list,
+ """List of files to be installed into the application directory.
+
+ ``FINAL_TARGET_FILES`` will copy (or symlink, if the platform supports it)
+ the contents of its files to the directory specified by
+ ``FINAL_TARGET`` (typically ``dist/bin``). Files that are destined for a
+ subdirectory can be specified by accessing a field, or as a dict access.
+ For example, to export ``foo.png`` to the top-level directory and
+ ``bar.svg`` to the directory ``images/do-not-use``, append to
+ ``FINAL_TARGET_FILES`` like so::
+
+ FINAL_TARGET_FILES += ['foo.png']
+ FINAL_TARGET_FILES.images['do-not-use'] += ['bar.svg']
+ """,
+ ),
+ "FINAL_TARGET_PP_FILES": (
+ ContextDerivedTypedHierarchicalStringList(Path),
+ list,
+ """Like ``FINAL_TARGET_FILES``, with preprocessing.
+ """,
+ ),
+ "LOCALIZED_FILES": (
+ ContextDerivedTypedHierarchicalStringList(Path),
+ list,
+ """List of locale-dependent files to be installed into the application
+ directory.
+
+ This functions similarly to ``FINAL_TARGET_FILES``, but the files are
+ sourced from the locale directory and will vary per localization.
+ For an en-US build, this is functionally equivalent to
+ ``FINAL_TARGET_FILES``. For a build with ``--enable-ui-locale``,
+ the file will be taken from ``$LOCALE_SRCDIR``, with the leading
+ ``en-US`` removed. For a l10n repack of an en-US build, the file
+ will be taken from the first location where it exists from:
+ * the merged locale directory if it exists
+ * ``$LOCALE_SRCDIR`` with the leading ``en-US`` removed
+ * the in-tree en-US location
+
+ Source directory paths specified here must must include a leading ``en-US``.
+ Wildcards are allowed, and will be expanded at the time of locale packaging to match
+ files in the locale directory.
+
+ Object directory paths are allowed here only if the path matches an entry in
+ ``LOCALIZED_GENERATED_FILES``.
+
+ Files that are missing from a locale will typically have the en-US
+ version used, but for wildcard expansions only files from the
+ locale directory will be used, even if that means no files will
+ be copied.
+
+ Example::
+
+ LOCALIZED_FILES.foo += [
+ 'en-US/foo.js',
+ 'en-US/things/*.ini',
+ ]
+
+ If this was placed in ``toolkit/locales/moz.build``, it would copy
+ ``toolkit/locales/en-US/foo.js`` and
+ ``toolkit/locales/en-US/things/*.ini`` to ``$(DIST)/bin/foo`` in an
+ en-US build, and in a build of a different locale (or a repack),
+ it would copy ``$(LOCALE_SRCDIR)/toolkit/foo.js`` and
+ ``$(LOCALE_SRCDIR)/toolkit/things/*.ini``.
+ """,
+ ),
+ "LOCALIZED_PP_FILES": (
+ ContextDerivedTypedHierarchicalStringList(Path),
+ list,
+ """Like ``LOCALIZED_FILES``, with preprocessing.
+
+ Note that the ``AB_CD`` define is available and expands to the current
+ locale being packaged, as with preprocessed entries in jar manifests.
+ """,
+ ),
+ "LOCALIZED_GENERATED_FILES": (
+ GeneratedFilesList,
+ list,
+ """Like ``GENERATED_FILES``, but for files whose content varies based on the locale in use.
+
+ For simple cases of text substitution, prefer ``LOCALIZED_PP_FILES``.
+
+ Refer to the documentation of ``GENERATED_FILES``; for the most part things work the same.
+ The two major differences are:
+ 1. The function in the Python script will be passed an additional keyword argument `locale`
+ which provides the locale in use, i.e. ``en-US``.
+ 2. The ``inputs`` list may contain paths to files that will be taken from the locale
+ source directory (see ``LOCALIZED_FILES`` for a discussion of the specifics). Paths
+ in ``inputs`` starting with ``en-US/`` or containing ``locales/en-US/`` are considered
+ localized files.
+
+ To place the generated output file in a specific location, list its objdir path in
+ ``LOCALIZED_FILES``.
+
+ In addition, ``LOCALIZED_GENERATED_FILES`` can use the special substitutions ``{AB_CD}``
+ and ``{AB_rCD}`` in their output paths. ``{AB_CD}`` expands to the current locale during
+ multi-locale builds and single-locale repacks and ``{AB_rCD}`` expands to an
+ Android-specific encoding of the current locale. Both expand to the empty string when the
+ current locale is ``en-US``.
+ """,
+ ),
+ "OBJDIR_FILES": (
+ ContextDerivedTypedHierarchicalStringList(Path),
+ list,
+ """List of files to be installed anywhere in the objdir. Use sparingly.
+
+ ``OBJDIR_FILES`` is similar to FINAL_TARGET_FILES, but it allows copying
+ anywhere in the object directory. This is intended for various one-off
+ cases, not for general use. If you wish to add entries to OBJDIR_FILES,
+ please consult a build peer (on the #build channel at https://chat.mozilla.org).
+ """,
+ ),
+ "OBJDIR_PP_FILES": (
+ ContextDerivedTypedHierarchicalStringList(Path),
+ list,
+ """Like ``OBJDIR_FILES``, with preprocessing. Use sparingly.
+ """,
+ ),
+ "FINAL_LIBRARY": (
+ six.text_type,
+ six.text_type,
+ """Library in which the objects of the current directory will be linked.
+
+ This variable contains the name of a library, defined elsewhere with
+ ``LIBRARY_NAME``, in which the objects of the current directory will be
+ linked.
+ """,
+ ),
+ "CPP_UNIT_TESTS": (
+ StrictOrderingOnAppendList,
+ list,
+ """Compile a list of C++ unit test names.
+
+ Each name in this variable corresponds to an executable built from the
+ corresponding source file with the same base name.
+
+ If the configuration token ``BIN_SUFFIX`` is set, its value will be
+ automatically appended to each name. If a name already ends with
+ ``BIN_SUFFIX``, the name will remain unchanged.
+ """,
+ ),
+ "FORCE_SHARED_LIB": (
+ bool,
+ bool,
+ """Whether the library in this directory is a shared library.
+ """,
+ ),
+ "FORCE_STATIC_LIB": (
+ bool,
+ bool,
+ """Whether the library in this directory is a static library.
+ """,
+ ),
+ "USE_STATIC_LIBS": (
+ bool,
+ bool,
+ """Whether the code in this directory is a built against the static
+ runtime library.
+
+ This variable only has an effect when building with MSVC.
+ """,
+ ),
+ "HOST_SOURCES": (
+ ContextDerivedTypedList(Path, StrictOrderingOnAppendList),
+ list,
+ """Source code files to compile with the host compiler.
+
+ This variable contains a list of source code files to compile.
+ with the host compiler.
+ """,
+ ),
+ "WASM_SOURCES": (
+ ContextDerivedTypedList(Path, StrictOrderingOnAppendList),
+ list,
+ """Source code files to compile with the wasm compiler.
+ """,
+ ),
+ "HOST_LIBRARY_NAME": (
+ six.text_type,
+ six.text_type,
+ """Name of target library generated when cross compiling.
+ """,
+ ),
+ "LIBRARY_DEFINES": (
+ OrderedDict,
+ dict,
+ """Dictionary of compiler defines to declare for the entire library.
+
+ This variable works like DEFINES, except that declarations apply to all
+ libraries that link into this library via FINAL_LIBRARY.
+ """,
+ ),
+ "LIBRARY_NAME": (
+ six.text_type,
+ six.text_type,
+ """The code name of the library generated for a directory.
+
+ By default STATIC_LIBRARY_NAME and SHARED_LIBRARY_NAME take this name.
+ In ``example/components/moz.build``,::
+
+ LIBRARY_NAME = 'xpcomsample'
+
+ would generate ``example/components/libxpcomsample.so`` on Linux, or
+ ``example/components/xpcomsample.lib`` on Windows.
+ """,
+ ),
+ "SHARED_LIBRARY_NAME": (
+ six.text_type,
+ six.text_type,
+ """The name of the static library generated for a directory, if it needs to
+ differ from the library code name.
+
+ Implies FORCE_SHARED_LIB.
+ """,
+ ),
+ "SANDBOXED_WASM_LIBRARY_NAME": (
+ six.text_type,
+ six.text_type,
+ """The name of the static sandboxed wasm library generated for a directory.
+ """,
+ ),
+ "SHARED_LIBRARY_OUTPUT_CATEGORY": (
+ six.text_type,
+ six.text_type,
+ """The output category for this context's shared library. If set this will
+ correspond to the build command that will build this shared library, and
+ the library will not be built as part of the default build.
+ """,
+ ),
+ "RUST_LIBRARY_OUTPUT_CATEGORY": (
+ six.text_type,
+ six.text_type,
+ """The output category for this context's rust library. If set this will
+ correspond to the build command that will build this rust library, and
+ the library will not be built as part of the default build.
+ """,
+ ),
+ "IS_FRAMEWORK": (
+ bool,
+ bool,
+ """Whether the library to build should be built as a framework on OSX.
+
+ This implies the name of the library won't be prefixed nor suffixed.
+ Implies FORCE_SHARED_LIB.
+ """,
+ ),
+ "STATIC_LIBRARY_NAME": (
+ six.text_type,
+ six.text_type,
+ """The name of the static library generated for a directory, if it needs to
+ differ from the library code name.
+
+ Implies FORCE_STATIC_LIB.
+ """,
+ ),
+ "USE_LIBS": (
+ StrictOrderingOnAppendList,
+ list,
+ """List of libraries to link to programs and libraries.
+ """,
+ ),
+ "HOST_USE_LIBS": (
+ StrictOrderingOnAppendList,
+ list,
+ """List of libraries to link to host programs and libraries.
+ """,
+ ),
+ "HOST_OS_LIBS": (
+ List,
+ list,
+ """List of system libraries for host programs and libraries.
+ """,
+ ),
+ "LOCAL_INCLUDES": (
+ ContextDerivedTypedList(Path, StrictOrderingOnAppendList),
+ list,
+ """Additional directories to be searched for include files by the compiler.
+ """,
+ ),
+ "NO_PGO": (
+ bool,
+ bool,
+ """Whether profile-guided optimization is disable in this directory.
+ """,
+ ),
+ "OS_LIBS": (
+ List,
+ list,
+ """System link libraries.
+
+ This variable contains a list of system libaries to link against.
+ """,
+ ),
+ "RCFILE": (
+ Path,
+ six.text_type,
+ """The program .rc file.
+
+ This variable can only be used on Windows.
+ """,
+ ),
+ "RCINCLUDE": (
+ Path,
+ six.text_type,
+ """The resource script file to be included in the default .res file.
+
+ This variable can only be used on Windows.
+ """,
+ ),
+ "DEFFILE": (
+ Path,
+ six.text_type,
+ """The program .def (module definition) file.
+
+ This variable can only be used on Windows.
+ """,
+ ),
+ "SYMBOLS_FILE": (
+ Path,
+ six.text_type,
+ """A file containing a list of symbols to export from a shared library.
+
+ The given file contains a list of symbols to be exported, and is
+ preprocessed.
+ A special marker "@DATA@" must be added after a symbol name if it
+ points to data instead of code, so that the Windows linker can treat
+ them correctly.
+ """,
+ ),
+ "SIMPLE_PROGRAMS": (
+ StrictOrderingOnAppendList,
+ list,
+ """Compile a list of executable names.
+
+ Each name in this variable corresponds to an executable built from the
+ corresponding source file with the same base name.
+
+ If the configuration token ``BIN_SUFFIX`` is set, its value will be
+ automatically appended to each name. If a name already ends with
+ ``BIN_SUFFIX``, the name will remain unchanged.
+ """,
+ ),
+ "SONAME": (
+ six.text_type,
+ six.text_type,
+ """The soname of the shared object currently being linked
+
+ soname is the "logical name" of a shared object, often used to provide
+ version backwards compatibility. This variable makes sense only for
+ shared objects, and is supported only on some unix platforms.
+ """,
+ ),
+ "HOST_SIMPLE_PROGRAMS": (
+ StrictOrderingOnAppendList,
+ list,
+ """Compile a list of host executable names.
+
+ Each name in this variable corresponds to a hosst executable built
+ from the corresponding source file with the same base name.
+
+ If the configuration token ``HOST_BIN_SUFFIX`` is set, its value will
+ be automatically appended to each name. If a name already ends with
+ ``HOST_BIN_SUFFIX``, the name will remain unchanged.
+ """,
+ ),
+ "RUST_PROGRAMS": (
+ StrictOrderingOnAppendList,
+ list,
+ """Compile a list of Rust host executable names.
+
+ Each name in this variable corresponds to an executable built from
+ the Cargo.toml in the same directory.
+ """,
+ ),
+ "HOST_RUST_PROGRAMS": (
+ StrictOrderingOnAppendList,
+ list,
+ """Compile a list of Rust executable names.
+
+ Each name in this variable corresponds to an executable built from
+ the Cargo.toml in the same directory.
+ """,
+ ),
+ "CONFIGURE_SUBST_FILES": (
+ ContextDerivedTypedList(SourcePath, StrictOrderingOnAppendList),
+ list,
+ """Output files that will be generated using configure-like substitution.
+
+ This is a substitute for ``AC_OUTPUT`` in autoconf. For each path in this
+ list, we will search for a file in the srcdir having the name
+ ``{path}.in``. The contents of this file will be read and variable
+ patterns like ``@foo@`` will be substituted with the values of the
+ ``AC_SUBST`` variables declared during configure.
+ """,
+ ),
+ "CONFIGURE_DEFINE_FILES": (
+ ContextDerivedTypedList(SourcePath, StrictOrderingOnAppendList),
+ list,
+ """Output files generated from configure/config.status.
+
+ This is a substitute for ``AC_CONFIG_HEADER`` in autoconf. This is very
+ similar to ``CONFIGURE_SUBST_FILES`` except the generation logic takes
+ into account the values of ``AC_DEFINE`` instead of ``AC_SUBST``.
+ """,
+ ),
+ "EXPORTS": (
+ ContextDerivedTypedHierarchicalStringList(Path),
+ list,
+ """List of files to be exported, and in which subdirectories.
+
+ ``EXPORTS`` is generally used to list the include files to be exported to
+ ``dist/include``, but it can be used for other files as well. This variable
+ behaves as a list when appending filenames for export in the top-level
+ directory. Files can also be appended to a field to indicate which
+ subdirectory they should be exported to. For example, to export
+ ``foo.h`` to the top-level directory, and ``bar.h`` to ``mozilla/dom/``,
+ append to ``EXPORTS`` like so::
+
+ EXPORTS += ['foo.h']
+ EXPORTS.mozilla.dom += ['bar.h']
+
+ Entries in ``EXPORTS`` are paths, so objdir paths may be used, but
+ any files listed from the objdir must also be listed in
+ ``GENERATED_FILES``.
+ """,
+ ),
+ "PROGRAM": (
+ six.text_type,
+ six.text_type,
+ """Compiled executable name.
+
+ If the configuration token ``BIN_SUFFIX`` is set, its value will be
+ automatically appended to ``PROGRAM``. If ``PROGRAM`` already ends with
+ ``BIN_SUFFIX``, ``PROGRAM`` will remain unchanged.
+ """,
+ ),
+ "HOST_PROGRAM": (
+ six.text_type,
+ six.text_type,
+ """Compiled host executable name.
+
+ If the configuration token ``HOST_BIN_SUFFIX`` is set, its value will be
+ automatically appended to ``HOST_PROGRAM``. If ``HOST_PROGRAM`` already
+ ends with ``HOST_BIN_SUFFIX``, ``HOST_PROGRAM`` will remain unchanged.
+ """,
+ ),
+ "DIST_INSTALL": (
+ Enum(None, False, True),
+ bool,
+ """Whether to install certain files into the dist directory.
+
+ By default, some files types are installed in the dist directory, and
+ some aren't. Set this variable to True to force the installation of
+ some files that wouldn't be installed by default. Set this variable to
+ False to force to not install some files that would be installed by
+ default.
+
+ This is confusing for historical reasons, but eventually, the behavior
+ will be made explicit.
+ """,
+ ),
+ "JAR_MANIFESTS": (
+ ContextDerivedTypedList(SourcePath, StrictOrderingOnAppendList),
+ list,
+ """JAR manifest files that should be processed as part of the build.
+
+ JAR manifests are files in the tree that define how to package files
+ into JARs and how chrome registration is performed. For more info,
+ see :ref:`jar_manifests`.
+ """,
+ ),
+ # IDL Generation.
+ "XPIDL_SOURCES": (
+ ContextDerivedTypedList(SourcePath, StrictOrderingOnAppendList),
+ list,
+ """XPCOM Interface Definition Files (xpidl).
+
+ This is a list of files that define XPCOM interface definitions.
+ Entries must be files that exist. Entries are almost certainly ``.idl``
+ files.
+ """,
+ ),
+ "XPIDL_MODULE": (
+ six.text_type,
+ six.text_type,
+ """XPCOM Interface Definition Module Name.
+
+ This is the name of the ``.xpt`` file that is created by linking
+ ``XPIDL_SOURCES`` together. If unspecified, it defaults to be the same
+ as ``MODULE``.
+ """,
+ ),
+ "XPCOM_MANIFESTS": (
+ ContextDerivedTypedList(SourcePath, StrictOrderingOnAppendList),
+ list,
+ """XPCOM Component Manifest Files.
+
+ This is a list of files that define XPCOM components to be added
+ to the component registry.
+ """,
+ ),
+ "PREPROCESSED_IPDL_SOURCES": (
+ StrictOrderingOnAppendList,
+ list,
+ """Preprocessed IPDL source files.
+
+ These files will be preprocessed, then parsed and converted to
+ ``.cpp`` files.
+ """,
+ ),
+ "IPDL_SOURCES": (
+ StrictOrderingOnAppendList,
+ list,
+ """IPDL source files.
+
+ These are ``.ipdl`` files that will be parsed and converted to
+ ``.cpp`` files.
+ """,
+ ),
+ "WEBIDL_FILES": (
+ StrictOrderingOnAppendList,
+ list,
+ """WebIDL source files.
+
+ These will be parsed and converted to ``.cpp`` and ``.h`` files.
+ """,
+ ),
+ "GENERATED_EVENTS_WEBIDL_FILES": (
+ StrictOrderingOnAppendList,
+ list,
+ """WebIDL source files for generated events.
+
+ These will be parsed and converted to ``.cpp`` and ``.h`` files.
+ """,
+ ),
+ "TEST_WEBIDL_FILES": (
+ StrictOrderingOnAppendList,
+ list,
+ """Test WebIDL source files.
+
+ These will be parsed and converted to ``.cpp`` and ``.h`` files
+ if tests are enabled.
+ """,
+ ),
+ "GENERATED_WEBIDL_FILES": (
+ StrictOrderingOnAppendList,
+ list,
+ """Generated WebIDL source files.
+
+ These will be generated from some other files.
+ """,
+ ),
+ "PREPROCESSED_TEST_WEBIDL_FILES": (
+ StrictOrderingOnAppendList,
+ list,
+ """Preprocessed test WebIDL source files.
+
+ These will be preprocessed, then parsed and converted to .cpp
+ and ``.h`` files if tests are enabled.
+ """,
+ ),
+ "PREPROCESSED_WEBIDL_FILES": (
+ StrictOrderingOnAppendList,
+ list,
+ """Preprocessed WebIDL source files.
+
+ These will be preprocessed before being parsed and converted.
+ """,
+ ),
+ "WEBIDL_EXAMPLE_INTERFACES": (
+ StrictOrderingOnAppendList,
+ list,
+ """Names of example WebIDL interfaces to build as part of the build.
+
+ Names in this list correspond to WebIDL interface names defined in
+ WebIDL files included in the build from one of the \*WEBIDL_FILES
+ variables.
+ """,
+ ),
+ # Test declaration.
+ "A11Y_MANIFESTS": (
+ ManifestparserManifestList,
+ list,
+ """List of manifest files defining a11y tests.
+ """,
+ ),
+ "BROWSER_CHROME_MANIFESTS": (
+ ManifestparserManifestList,
+ list,
+ """List of manifest files defining browser chrome tests.
+ """,
+ ),
+ "ANDROID_INSTRUMENTATION_MANIFESTS": (
+ ManifestparserManifestList,
+ list,
+ """List of manifest files defining Android instrumentation tests.
+ """,
+ ),
+ "FIREFOX_UI_FUNCTIONAL_MANIFESTS": (
+ ManifestparserManifestList,
+ list,
+ """List of manifest files defining firefox-ui-functional tests.
+ """,
+ ),
+ "MARIONETTE_LAYOUT_MANIFESTS": (
+ ManifestparserManifestList,
+ list,
+ """List of manifest files defining marionette-layout tests.
+ """,
+ ),
+ "MARIONETTE_UNIT_MANIFESTS": (
+ ManifestparserManifestList,
+ list,
+ """List of manifest files defining marionette-unit tests.
+ """,
+ ),
+ "METRO_CHROME_MANIFESTS": (
+ ManifestparserManifestList,
+ list,
+ """List of manifest files defining metro browser chrome tests.
+ """,
+ ),
+ "MOCHITEST_CHROME_MANIFESTS": (
+ ManifestparserManifestList,
+ list,
+ """List of manifest files defining mochitest chrome tests.
+ """,
+ ),
+ "MOCHITEST_MANIFESTS": (
+ ManifestparserManifestList,
+ list,
+ """List of manifest files defining mochitest tests.
+ """,
+ ),
+ "REFTEST_MANIFESTS": (
+ ReftestManifestList,
+ list,
+ """List of manifest files defining reftests.
+
+ These are commonly named reftest.list.
+ """,
+ ),
+ "CRASHTEST_MANIFESTS": (
+ ReftestManifestList,
+ list,
+ """List of manifest files defining crashtests.
+
+ These are commonly named crashtests.list.
+ """,
+ ),
+ "XPCSHELL_TESTS_MANIFESTS": (
+ ManifestparserManifestList,
+ list,
+ """List of manifest files defining xpcshell tests.
+ """,
+ ),
+ "PYTHON_UNITTEST_MANIFESTS": (
+ ManifestparserManifestList,
+ list,
+ """List of manifest files defining python unit tests.
+ """,
+ ),
+ "PERFTESTS_MANIFESTS": (
+ ManifestparserManifestList,
+ list,
+ """List of manifest files defining MozPerftest performance tests.
+ """,
+ ),
+ "CRAMTEST_MANIFESTS": (
+ ManifestparserManifestList,
+ list,
+ """List of manifest files defining cram unit tests.
+ """,
+ ),
+ "TELEMETRY_TESTS_CLIENT_MANIFESTS": (
+ ManifestparserManifestList,
+ list,
+ """List of manifest files defining telemetry client tests.
+ """,
+ ),
+ # The following variables are used to control the target of installed files.
+ "XPI_NAME": (
+ six.text_type,
+ six.text_type,
+ """The name of an extension XPI to generate.
+
+ When this variable is present, the results of this directory will end up
+ being packaged into an extension instead of the main dist/bin results.
+ """,
+ ),
+ "DIST_SUBDIR": (
+ six.text_type,
+ six.text_type,
+ """The name of an alternate directory to install files to.
+
+ When this variable is present, the results of this directory will end up
+ being placed in the $(DIST_SUBDIR) subdirectory of where it would
+ otherwise be placed.
+ """,
+ ),
+ "FINAL_TARGET": (
+ FinalTargetValue,
+ six.text_type,
+ """The name of the directory to install targets to.
+
+ The directory is relative to the top of the object directory. The
+ default value is dependent on the values of XPI_NAME and DIST_SUBDIR. If
+ neither are present, the result is dist/bin. If XPI_NAME is present, the
+ result is dist/xpi-stage/$(XPI_NAME). If DIST_SUBDIR is present, then
+ the $(DIST_SUBDIR) directory of the otherwise default value is used.
+ """,
+ ),
+ "USE_EXTENSION_MANIFEST": (
+ bool,
+ bool,
+ """Controls the name of the manifest for JAR files.
+
+ By default, the name of the manifest is ${JAR_MANIFEST}.manifest.
+ Setting this variable to ``True`` changes the name of the manifest to
+ chrome.manifest.
+ """,
+ ),
+ "GYP_DIRS": (
+ StrictOrderingOnAppendListWithFlagsFactory(
+ {
+ "variables": dict,
+ "input": six.text_type,
+ "sandbox_vars": dict,
+ "no_chromium": bool,
+ "no_unified": bool,
+ "non_unified_sources": StrictOrderingOnAppendList,
+ "action_overrides": dict,
+ }
+ ),
+ list,
+ """Defines a list of object directories handled by gyp configurations.
+
+ Elements of this list give the relative object directory. For each
+ element of the list, GYP_DIRS may be accessed as a dictionary
+ (GYP_DIRS[foo]). The object this returns has attributes that need to be
+ set to further specify gyp processing:
+ - input, gives the path to the root gyp configuration file for that
+ object directory.
+ - variables, a dictionary containing variables and values to pass
+ to the gyp processor.
+ - sandbox_vars, a dictionary containing variables and values to
+ pass to the mozbuild processor on top of those derived from gyp
+ configuration.
+ - no_chromium, a boolean which if set to True disables some
+ special handling that emulates gyp_chromium.
+ - no_unified, a boolean which if set to True disables source
+ file unification entirely.
+ - non_unified_sources, a list containing sources files, relative to
+ the current moz.build, that should be excluded from source file
+ unification.
+ - action_overrides, a dict of action_name to values of the `script`
+ attribute to use for GENERATED_FILES for the specified action.
+
+ Typical use looks like:
+ GYP_DIRS += ['foo', 'bar']
+ GYP_DIRS['foo'].input = 'foo/foo.gyp'
+ GYP_DIRS['foo'].variables = {
+ 'foo': 'bar',
+ (...)
+ }
+ (...)
+ """,
+ ),
+ "SPHINX_TREES": (
+ dict,
+ dict,
+ """Describes what the Sphinx documentation tree will look like.
+
+ Keys are relative directories inside the final Sphinx documentation
+ tree to install files into. Values are directories (relative to this
+ file) whose content to copy into the Sphinx documentation tree.
+ """,
+ ),
+ "SPHINX_PYTHON_PACKAGE_DIRS": (
+ StrictOrderingOnAppendList,
+ list,
+ """Directories containing Python packages that Sphinx documents.
+ """,
+ ),
+ "COMPILE_FLAGS": (
+ CompileFlags,
+ dict,
+ """Recipe for compile flags for this context. Not to be manipulated
+ directly.
+ """,
+ ),
+ "LINK_FLAGS": (
+ LinkFlags,
+ dict,
+ """Recipe for linker flags for this context. Not to be manipulated
+ directly.
+ """,
+ ),
+ "WASM_FLAGS": (
+ WasmFlags,
+ dict,
+ """Recipe for wasm flags for this context. Not to be
+ manipulated directly.
+ """,
+ ),
+ "ASM_FLAGS": (
+ AsmFlags,
+ dict,
+ """Recipe for linker flags for this context. Not to be
+ manipulated directly.
+ """,
+ ),
+ "CFLAGS": (
+ List,
+ list,
+ """Flags passed to the C compiler for all of the C source files
+ declared in this directory.
+
+ Note that the ordering of flags matters here, these flags will be
+ added to the compiler's command line in the same order as they
+ appear in the moz.build file.
+ """,
+ ),
+ "CXXFLAGS": (
+ List,
+ list,
+ """Flags passed to the C++ compiler for all of the C++ source files
+ declared in this directory.
+
+ Note that the ordering of flags matters here; these flags will be
+ added to the compiler's command line in the same order as they
+ appear in the moz.build file.
+ """,
+ ),
+ "HOST_COMPILE_FLAGS": (
+ HostCompileFlags,
+ dict,
+ """Recipe for host compile flags for this context. Not to be manipulated
+ directly.
+ """,
+ ),
+ "HOST_DEFINES": (
+ InitializedDefines,
+ dict,
+ """Dictionary of compiler defines to declare for host compilation.
+ See ``DEFINES`` for specifics.
+ """,
+ ),
+ "WASM_CFLAGS": (
+ List,
+ list,
+ """Flags passed to the C-to-wasm compiler for all of the C
+ source files declared in this directory.
+
+ Note that the ordering of flags matters here, these flags will be
+ added to the compiler's command line in the same order as they
+ appear in the moz.build file.
+ """,
+ ),
+ "WASM_CXXFLAGS": (
+ List,
+ list,
+ """Flags passed to the C++-to-wasm compiler for all of the
+ C++ source files declared in this directory.
+
+ Note that the ordering of flags matters here; these flags will be
+ added to the compiler's command line in the same order as they
+ appear in the moz.build file.
+ """,
+ ),
+ "WASM_DEFINES": (
+ InitializedDefines,
+ dict,
+ """Dictionary of compiler defines to declare for wasm compilation.
+ See ``DEFINES`` for specifics.
+ """,
+ ),
+ "CMFLAGS": (
+ List,
+ list,
+ """Flags passed to the Objective-C compiler for all of the Objective-C
+ source files declared in this directory.
+
+ Note that the ordering of flags matters here; these flags will be
+ added to the compiler's command line in the same order as they
+ appear in the moz.build file.
+ """,
+ ),
+ "CMMFLAGS": (
+ List,
+ list,
+ """Flags passed to the Objective-C++ compiler for all of the
+ Objective-C++ source files declared in this directory.
+
+ Note that the ordering of flags matters here; these flags will be
+ added to the compiler's command line in the same order as they
+ appear in the moz.build file.
+ """,
+ ),
+ "ASFLAGS": (
+ List,
+ list,
+ """Flags passed to the assembler for all of the assembly source files
+ declared in this directory.
+
+ Note that the ordering of flags matters here; these flags will be
+ added to the assembler's command line in the same order as they
+ appear in the moz.build file.
+ """,
+ ),
+ "HOST_CFLAGS": (
+ List,
+ list,
+ """Flags passed to the host C compiler for all of the C source files
+ declared in this directory.
+
+ Note that the ordering of flags matters here, these flags will be
+ added to the compiler's command line in the same order as they
+ appear in the moz.build file.
+ """,
+ ),
+ "HOST_CXXFLAGS": (
+ List,
+ list,
+ """Flags passed to the host C++ compiler for all of the C++ source files
+ declared in this directory.
+
+ Note that the ordering of flags matters here; these flags will be
+ added to the compiler's command line in the same order as they
+ appear in the moz.build file.
+ """,
+ ),
+ "LDFLAGS": (
+ List,
+ list,
+ """Flags passed to the linker when linking all of the libraries and
+ executables declared in this directory.
+
+ Note that the ordering of flags matters here; these flags will be
+ added to the linker's command line in the same order as they
+ appear in the moz.build file.
+ """,
+ ),
+ "EXTRA_DSO_LDOPTS": (
+ List,
+ list,
+ """Flags passed to the linker when linking a shared library.
+
+ Note that the ordering of flags matter here, these flags will be
+ added to the linker's command line in the same order as they
+ appear in the moz.build file.
+ """,
+ ),
+ "WIN32_EXE_LDFLAGS": (
+ List,
+ list,
+ """Flags passed to the linker when linking a Windows .exe executable
+ declared in this directory.
+
+ Note that the ordering of flags matter here, these flags will be
+ added to the linker's command line in the same order as they
+ appear in the moz.build file.
+
+ This variable only has an effect on Windows.
+ """,
+ ),
+ "TEST_HARNESS_FILES": (
+ ContextDerivedTypedHierarchicalStringList(Path),
+ list,
+ """List of files to be installed for test harnesses.
+
+ ``TEST_HARNESS_FILES`` can be used to install files to any directory
+ under $objdir/_tests. Files can be appended to a field to indicate
+ which subdirectory they should be exported to. For example,
+ to export ``foo.py`` to ``_tests/foo``, append to
+ ``TEST_HARNESS_FILES`` like so::
+ TEST_HARNESS_FILES.foo += ['foo.py']
+
+ Files from topsrcdir and the objdir can also be installed by prefixing
+ the path(s) with a '/' character and a '!' character, respectively::
+ TEST_HARNESS_FILES.path += ['/build/bar.py', '!quux.py']
+ """,
+ ),
+ "NO_EXPAND_LIBS": (
+ bool,
+ bool,
+ """Forces to build a real static library, and no corresponding fake
+ library.
+ """,
+ ),
+ "USE_NASM": (
+ bool,
+ bool,
+ """Use the nasm assembler to assemble assembly files from SOURCES.
+
+ By default, the build will use the toolchain assembler, $(AS), to
+ assemble source files in assembly language (.s or .asm files). Setting
+ this value to ``True`` will cause it to use nasm instead.
+
+ If nasm is not available on this system, or does not support the
+ current target architecture, an error will be raised.
+ """,
+ ),
+ "USE_INTEGRATED_CLANGCL_AS": (
+ bool,
+ bool,
+ """Use the integrated clang-cl assembler to assemble assembly files from SOURCES.
+
+ This allows using clang-cl to assemble assembly files which is useful
+ on platforms like aarch64 where the alternative is to have to run a
+ pre-processor to generate files with suitable syntax.
+ """,
+ ),
+}
+
+# Sanity check: we don't want any variable above to have a list as storage type.
+for name, (storage_type, input_types, docs) in VARIABLES.items():
+ if storage_type == list:
+ raise RuntimeError('%s has a "list" storage type. Use "List" instead.' % name)
+
+# Set of variables that are only allowed in templates:
+TEMPLATE_VARIABLES = {
+ "CPP_UNIT_TESTS",
+ "FORCE_SHARED_LIB",
+ "HOST_PROGRAM",
+ "HOST_LIBRARY_NAME",
+ "HOST_SIMPLE_PROGRAMS",
+ "IS_FRAMEWORK",
+ "IS_GKRUST",
+ "LIBRARY_NAME",
+ "PROGRAM",
+ "SIMPLE_PROGRAMS",
+}
+
+# Add a note to template variable documentation.
+for name in TEMPLATE_VARIABLES:
+ if name not in VARIABLES:
+ raise RuntimeError("%s is in TEMPLATE_VARIABLES but not in VARIABLES." % name)
+ storage_type, input_types, docs = VARIABLES[name]
+ docs += "This variable is only available in templates.\n"
+ VARIABLES[name] = (storage_type, input_types, docs)
+
+
+# The set of functions exposed to the sandbox.
+#
+# Each entry is a tuple of:
+#
+# (function returning the corresponding function from a given sandbox,
+# (argument types), docs)
+#
+# The first element is an attribute on Sandbox that should be a function type.
+#
+FUNCTIONS = {
+ "include": (
+ lambda self: self._include,
+ (SourcePath,),
+ """Include another mozbuild file in the context of this one.
+
+ This is similar to a ``#include`` in C languages. The filename passed to
+ the function will be read and its contents will be evaluated within the
+ context of the calling file.
+
+ If a relative path is given, it is evaluated as relative to the file
+ currently being processed. If there is a chain of multiple include(),
+ the relative path computation is from the most recent/active file.
+
+ If an absolute path is given, it is evaluated from ``TOPSRCDIR``. In
+ other words, ``include('/foo')`` references the path
+ ``TOPSRCDIR + '/foo'``.
+
+ Example usage
+ ^^^^^^^^^^^^^
+
+ Include ``sibling.build`` from the current directory.::
+
+ include('sibling.build')
+
+ Include ``foo.build`` from a path within the top source directory::
+
+ include('/elsewhere/foo.build')
+ """,
+ ),
+ "export": (
+ lambda self: self._export,
+ (str,),
+ """Make the specified variable available to all child directories.
+
+ The variable specified by the argument string is added to the
+ environment of all directories specified in the DIRS and TEST_DIRS
+ variables. If those directories themselves have child directories,
+ the variable will be exported to all of them.
+
+ The value used for the variable is the final value at the end of the
+ moz.build file, so it is possible (but not recommended style) to place
+ the export before the definition of the variable.
+
+ This function is limited to the upper-case variables that have special
+ meaning in moz.build files.
+
+ NOTE: Please consult with a build peer (on the #build channel at
+ https://chat.mozilla.org) before adding a new use of this function.
+
+ Example usage
+ ^^^^^^^^^^^^^
+
+ To make all children directories install as the given extension::
+
+ XPI_NAME = 'cool-extension'
+ export('XPI_NAME')
+ """,
+ ),
+ "warning": (
+ lambda self: self._warning,
+ (str,),
+ """Issue a warning.
+
+ Warnings are string messages that are printed during execution.
+
+ Warnings are ignored during execution.
+ """,
+ ),
+ "error": (
+ lambda self: self._error,
+ (str,),
+ """Issue a fatal error.
+
+ If this function is called, processing is aborted immediately.
+ """,
+ ),
+ "template": (
+ lambda self: self._template_decorator,
+ (FunctionType,),
+ """Decorator for template declarations.
+
+ Templates are a special kind of functions that can be declared in
+ mozbuild files. Uppercase variables assigned in the function scope
+ are considered to be the result of the template.
+
+ Contrary to traditional python functions:
+ - return values from template functions are ignored,
+ - template functions don't have access to the global scope.
+
+ Example template
+ ^^^^^^^^^^^^^^^^
+
+ The following ``Program`` template sets two variables ``PROGRAM`` and
+ ``USE_LIBS``. ``PROGRAM`` is set to the argument given on the template
+ invocation, and ``USE_LIBS`` to contain "mozglue"::
+
+ @template
+ def Program(name):
+ PROGRAM = name
+ USE_LIBS += ['mozglue']
+
+ Template invocation
+ ^^^^^^^^^^^^^^^^^^^
+
+ A template is invoked in the form of a function call::
+
+ Program('myprog')
+
+ The result of the template, being all the uppercase variable it sets
+ is mixed to the existing set of variables defined in the mozbuild file
+ invoking the template::
+
+ FINAL_TARGET = 'dist/other'
+ USE_LIBS += ['mylib']
+ Program('myprog')
+ USE_LIBS += ['otherlib']
+
+ The above mozbuild results in the following variables set:
+
+ - ``FINAL_TARGET`` is 'dist/other'
+ - ``USE_LIBS`` is ['mylib', 'mozglue', 'otherlib']
+ - ``PROGRAM`` is 'myprog'
+
+ """,
+ ),
+}
+
+
+TestDirsPlaceHolder = List()
+
+
+# Special variables. These complement VARIABLES.
+#
+# Each entry is a tuple of:
+#
+# (function returning the corresponding value from a given context, type, docs)
+#
+SPECIAL_VARIABLES = {
+ "TOPSRCDIR": (
+ lambda context: context.config.topsrcdir,
+ str,
+ """Constant defining the top source directory.
+
+ The top source directory is the parent directory containing the source
+ code and all build files. It is typically the root directory of a
+ cloned repository.
+ """,
+ ),
+ "TOPOBJDIR": (
+ lambda context: context.config.topobjdir,
+ str,
+ """Constant defining the top object directory.
+
+ The top object directory is the parent directory which will contain
+ the output of the build. This is commonly referred to as "the object
+ directory."
+ """,
+ ),
+ "RELATIVEDIR": (
+ lambda context: context.relsrcdir,
+ str,
+ """Constant defining the relative path of this file.
+
+ The relative path is from ``TOPSRCDIR``. This is defined as relative
+ to the main file being executed, regardless of whether additional
+ files have been included using ``include()``.
+ """,
+ ),
+ "SRCDIR": (
+ lambda context: context.srcdir,
+ str,
+ """Constant defining the source directory of this file.
+
+ This is the path inside ``TOPSRCDIR`` where this file is located. It
+ is the same as ``TOPSRCDIR + RELATIVEDIR``.
+ """,
+ ),
+ "OBJDIR": (
+ lambda context: context.objdir,
+ str,
+ """The path to the object directory for this file.
+
+ Is is the same as ``TOPOBJDIR + RELATIVEDIR``.
+ """,
+ ),
+ "CONFIG": (
+ lambda context: ReadOnlyKeyedDefaultDict(
+ lambda key: context.config.substs.get(key)
+ ),
+ dict,
+ """Dictionary containing the current configuration variables.
+
+ All the variables defined by the configuration system are available
+ through this object. e.g. ``ENABLE_TESTS``, ``CFLAGS``, etc.
+
+ Values in this container are read-only. Attempts at changing values
+ will result in a run-time error.
+
+ Access to an unknown variable will return None.
+ """,
+ ),
+ "EXTRA_COMPONENTS": (
+ lambda context: context["FINAL_TARGET_FILES"].components._strings,
+ list,
+ """Additional component files to distribute.
+
+ This variable contains a list of files to copy into
+ ``$(FINAL_TARGET)/components/``.
+ """,
+ ),
+ "EXTRA_PP_COMPONENTS": (
+ lambda context: context["FINAL_TARGET_PP_FILES"].components._strings,
+ list,
+ """Javascript XPCOM files.
+
+ This variable contains a list of files to preprocess. Generated
+ files will be installed in the ``/components`` directory of the distribution.
+ """,
+ ),
+ "JS_PREFERENCE_FILES": (
+ lambda context: context["FINAL_TARGET_FILES"].defaults.pref._strings,
+ list,
+ """Exported JavaScript files.
+
+ A list of files copied into the dist directory for packaging and installation.
+ Path will be defined for gre or application prefs dir based on what is building.
+ """,
+ ),
+ "JS_PREFERENCE_PP_FILES": (
+ lambda context: context["FINAL_TARGET_PP_FILES"].defaults.pref._strings,
+ list,
+ """Like JS_PREFERENCE_FILES, preprocessed..
+ """,
+ ),
+ "RESOURCE_FILES": (
+ lambda context: context["FINAL_TARGET_FILES"].res,
+ list,
+ """List of resources to be exported, and in which subdirectories.
+
+ ``RESOURCE_FILES`` is used to list the resource files to be exported to
+ ``dist/bin/res``, but it can be used for other files as well. This variable
+ behaves as a list when appending filenames for resources in the top-level
+ directory. Files can also be appended to a field to indicate which
+ subdirectory they should be exported to. For example, to export
+ ``foo.res`` to the top-level directory, and ``bar.res`` to ``fonts/``,
+ append to ``RESOURCE_FILES`` like so::
+
+ RESOURCE_FILES += ['foo.res']
+ RESOURCE_FILES.fonts += ['bar.res']
+ """,
+ ),
+ "CONTENT_ACCESSIBLE_FILES": (
+ lambda context: context["FINAL_TARGET_FILES"].contentaccessible,
+ list,
+ """List of files which can be accessed by web content through resource:// URIs.
+
+ ``CONTENT_ACCESSIBLE_FILES`` is used to list the files to be exported
+ to ``dist/bin/contentaccessible``. Files can also be appended to a
+ field to indicate which subdirectory they should be exported to.
+ """,
+ ),
+ "EXTRA_JS_MODULES": (
+ lambda context: context["FINAL_TARGET_FILES"].modules,
+ list,
+ """Additional JavaScript files to distribute.
+
+ This variable contains a list of files to copy into
+ ``$(FINAL_TARGET)/modules.
+ """,
+ ),
+ "EXTRA_PP_JS_MODULES": (
+ lambda context: context["FINAL_TARGET_PP_FILES"].modules,
+ list,
+ """Additional JavaScript files to distribute.
+
+ This variable contains a list of files to copy into
+ ``$(FINAL_TARGET)/modules``, after preprocessing.
+ """,
+ ),
+ "TESTING_JS_MODULES": (
+ lambda context: context["TEST_HARNESS_FILES"].modules,
+ list,
+ """JavaScript modules to install in the test-only destination.
+
+ Some JavaScript modules (JSMs) are test-only and not distributed
+ with Firefox. This variable defines them.
+
+ To install modules in a subdirectory, use properties of this
+ variable to control the final destination. e.g.
+
+ ``TESTING_JS_MODULES.foo += ['module.jsm']``.
+ """,
+ ),
+ "TEST_DIRS": (
+ lambda context: context["DIRS"]
+ if context.config.substs.get("ENABLE_TESTS")
+ else TestDirsPlaceHolder,
+ list,
+ """Like DIRS but only for directories that contain test-only code.
+
+ If tests are not enabled, this variable will be ignored.
+
+ This variable may go away once the transition away from Makefiles is
+ complete.
+ """,
+ ),
+}
+
+# Deprecation hints.
+DEPRECATION_HINTS = {
+ "ASM_FLAGS": """
+ Please use
+
+ ASFLAGS
+
+ instead of manipulating ASM_FLAGS directly.
+ """,
+ "CPP_UNIT_TESTS": """
+ Please use'
+
+ CppUnitTests(['foo', 'bar'])
+
+ instead of
+
+ CPP_UNIT_TESTS += ['foo', 'bar']
+ """,
+ "DISABLE_STL_WRAPPING": """
+ Please use
+
+ DisableStlWrapping()
+
+ instead of
+
+ DISABLE_STL_WRAPPING = True
+ """,
+ "HOST_PROGRAM": """
+ Please use
+
+ HostProgram('foo')
+
+ instead of
+
+ HOST_PROGRAM = 'foo'
+ """,
+ "HOST_LIBRARY_NAME": """
+ Please use
+
+ HostLibrary('foo')
+
+ instead of
+
+ HOST_LIBRARY_NAME = 'foo'
+ """,
+ "HOST_SIMPLE_PROGRAMS": """
+ Please use
+
+ HostSimplePrograms(['foo', 'bar'])
+
+ instead of
+
+ HOST_SIMPLE_PROGRAMS += ['foo', 'bar']"
+ """,
+ "LIBRARY_NAME": """
+ Please use
+
+ Library('foo')
+
+ instead of
+
+ LIBRARY_NAME = 'foo'
+ """,
+ "NO_VISIBILITY_FLAGS": """
+ Please use
+
+ NoVisibilityFlags()
+
+ instead of
+
+ NO_VISIBILITY_FLAGS = True
+ """,
+ "PROGRAM": """
+ Please use
+
+ Program('foo')
+
+ instead of
+
+ PROGRAM = 'foo'"
+ """,
+ "SIMPLE_PROGRAMS": """
+ Please use
+
+ SimplePrograms(['foo', 'bar'])
+
+ instead of
+
+ SIMPLE_PROGRAMS += ['foo', 'bar']"
+ """,
+ "ALLOW_COMPILER_WARNINGS": """
+ Please use
+
+ AllowCompilerWarnings()
+
+ instead of
+
+ ALLOW_COMPILER_WARNINGS = True
+ """,
+ "FORCE_SHARED_LIB": """
+ Please use
+
+ SharedLibrary('foo')
+
+ instead of
+
+ Library('foo') [ or LIBRARY_NAME = 'foo' ]
+ FORCE_SHARED_LIB = True
+ """,
+ "IS_FRAMEWORK": """
+ Please use
+
+ Framework('foo')
+
+ instead of
+
+ Library('foo') [ or LIBRARY_NAME = 'foo' ]
+ IS_FRAMEWORK = True
+ """,
+ "IS_GKRUST": """
+ Please use
+
+ RustLibrary('gkrust', ... is_gkrust=True)
+
+ instead of
+
+ RustLibrary('gkrust') [ or LIBRARY_NAME = 'gkrust' ]
+ IS_GKRUST = True
+ """,
+ "TOOL_DIRS": "Please use the DIRS variable instead.",
+ "TEST_TOOL_DIRS": "Please use the TEST_DIRS variable instead.",
+ "PARALLEL_DIRS": "Please use the DIRS variable instead.",
+ "NO_DIST_INSTALL": """
+ Please use
+
+ DIST_INSTALL = False
+
+ instead of
+
+ NO_DIST_INSTALL = True
+ """,
+ "GENERATED_SOURCES": """
+ Please use
+
+ SOURCES += [ '!foo.cpp' ]
+
+ instead of
+
+ GENERATED_SOURCES += [ 'foo.cpp']
+ """,
+ "GENERATED_INCLUDES": """
+ Please use
+
+ LOCAL_INCLUDES += [ '!foo' ]
+
+ instead of
+
+ GENERATED_INCLUDES += [ 'foo' ]
+ """,
+ "DIST_FILES": """
+ Please use
+
+ FINAL_TARGET_PP_FILES += [ 'foo' ]
+
+ instead of
+
+ DIST_FILES += [ 'foo' ]
+ """,
+}
+
+# Make sure that all template variables have a deprecation hint.
+for name in TEMPLATE_VARIABLES:
+ if name not in DEPRECATION_HINTS:
+ raise RuntimeError("Missing deprecation hint for %s" % name)
diff --git a/python/mozbuild/mozbuild/frontend/data.py b/python/mozbuild/mozbuild/frontend/data.py
new file mode 100644
index 0000000000..84a47f90cf
--- /dev/null
+++ b/python/mozbuild/mozbuild/frontend/data.py
@@ -0,0 +1,1369 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+r"""Data structures representing Mozilla's source tree.
+
+The frontend files are parsed into static data structures. These data
+structures are defined in this module.
+
+All data structures of interest are children of the TreeMetadata class.
+
+Logic for populating these data structures is not defined in this class.
+Instead, what we have here are dumb container classes. The emitter module
+contains the code for converting executed mozbuild files into these data
+structures.
+"""
+
+from collections import OrderedDict, defaultdict
+
+import mozpack.path as mozpath
+import six
+from mozpack.chrome.manifest import ManifestEntry
+
+from mozbuild.frontend.context import ObjDirPath, SourcePath
+
+from ..testing import all_test_flavors
+from ..util import group_unified_files
+from .context import FinalTargetValue
+
+
+class TreeMetadata(object):
+ """Base class for all data being captured."""
+
+ __slots__ = ()
+
+ def to_dict(self):
+ return {k.lower(): getattr(self, k) for k in self.DICT_ATTRS}
+
+
+class ContextDerived(TreeMetadata):
+ """Build object derived from a single Context instance.
+
+ It holds fields common to all context derived classes. This class is likely
+ never instantiated directly but is instead derived from.
+ """
+
+ __slots__ = (
+ "context_main_path",
+ "context_all_paths",
+ "topsrcdir",
+ "topobjdir",
+ "relsrcdir",
+ "srcdir",
+ "objdir",
+ "config",
+ "_context",
+ )
+
+ def __init__(self, context):
+ TreeMetadata.__init__(self)
+
+ # Capture the files that were evaluated to fill this context.
+ self.context_main_path = context.main_path
+ self.context_all_paths = context.all_paths
+
+ # Basic directory state.
+ self.topsrcdir = context.config.topsrcdir
+ self.topobjdir = context.config.topobjdir
+
+ self.relsrcdir = context.relsrcdir
+ self.srcdir = context.srcdir
+ self.objdir = context.objdir
+
+ self.config = context.config
+
+ self._context = context
+
+ @property
+ def install_target(self):
+ return self._context["FINAL_TARGET"]
+
+ @property
+ def installed(self):
+ return self._context["DIST_INSTALL"] is not False
+
+ @property
+ def defines(self):
+ defines = self._context["DEFINES"]
+ return Defines(self._context, defines) if defines else None
+
+ @property
+ def relobjdir(self):
+ return mozpath.relpath(self.objdir, self.topobjdir)
+
+
+class HostMixin(object):
+ @property
+ def defines(self):
+ defines = self._context["HOST_DEFINES"]
+ return HostDefines(self._context, defines) if defines else None
+
+
+class DirectoryTraversal(ContextDerived):
+ """Describes how directory traversal for building should work.
+
+ This build object is likely only of interest to the recursive make backend.
+ Other build backends should (ideally) not attempt to mimic the behavior of
+ the recursive make backend. The only reason this exists is to support the
+ existing recursive make backend while the transition to mozbuild frontend
+ files is complete and we move to a more optimal build backend.
+
+ Fields in this class correspond to similarly named variables in the
+ frontend files.
+ """
+
+ __slots__ = ("dirs",)
+
+ def __init__(self, context):
+ ContextDerived.__init__(self, context)
+
+ self.dirs = []
+
+
+class BaseConfigSubstitution(ContextDerived):
+ """Base class describing autogenerated files as part of config.status."""
+
+ __slots__ = ("input_path", "output_path", "relpath")
+
+ def __init__(self, context):
+ ContextDerived.__init__(self, context)
+
+ self.input_path = None
+ self.output_path = None
+ self.relpath = None
+
+
+class ConfigFileSubstitution(BaseConfigSubstitution):
+ """Describes a config file that will be generated using substitutions."""
+
+
+class VariablePassthru(ContextDerived):
+ """A dict of variables to pass through to backend.mk unaltered.
+
+ The purpose of this object is to facilitate rapid transitioning of
+ variables from Makefile.in to moz.build. In the ideal world, this class
+ does not exist and every variable has a richer class representing it.
+ As long as we rely on this class, we lose the ability to have flexibility
+ in our build backends since we will continue to be tied to our rules.mk.
+ """
+
+ __slots__ = "variables"
+
+ def __init__(self, context):
+ ContextDerived.__init__(self, context)
+ self.variables = {}
+
+
+class ComputedFlags(ContextDerived):
+ """Aggregate flags for consumption by various backends."""
+
+ __slots__ = ("flags",)
+
+ def __init__(self, context, reader_flags):
+ ContextDerived.__init__(self, context)
+ self.flags = reader_flags
+
+ def resolve_flags(self, key, value):
+ # Bypass checks done by CompileFlags that would keep us from
+ # setting a value here.
+ dict.__setitem__(self.flags, key, value)
+
+ def get_flags(self):
+ flags = defaultdict(list)
+ for key, _, dest_vars in self.flags.flag_variables:
+ value = self.flags.get(key)
+ if value:
+ for dest_var in dest_vars:
+ flags[dest_var].extend(value)
+ return sorted(flags.items())
+
+
+class XPIDLModule(ContextDerived):
+ """Describes an XPIDL module to be compiled."""
+
+ __slots__ = ("name", "idl_files")
+
+ def __init__(self, context, name, idl_files):
+ ContextDerived.__init__(self, context)
+
+ assert all(isinstance(idl, SourcePath) for idl in idl_files)
+ self.name = name
+ self.idl_files = idl_files
+
+
+class BaseDefines(ContextDerived):
+ """Context derived container object for DEFINES/HOST_DEFINES,
+ which are OrderedDicts.
+ """
+
+ __slots__ = "defines"
+
+ def __init__(self, context, defines):
+ ContextDerived.__init__(self, context)
+ self.defines = defines
+
+ def get_defines(self):
+ for define, value in six.iteritems(self.defines):
+ if value is True:
+ yield ("-D%s" % define)
+ elif value is False:
+ yield ("-U%s" % define)
+ else:
+ yield ("-D%s=%s" % (define, value))
+
+ def update(self, more_defines):
+ if isinstance(more_defines, Defines):
+ self.defines.update(more_defines.defines)
+ else:
+ self.defines.update(more_defines)
+
+
+class Defines(BaseDefines):
+ pass
+
+
+class HostDefines(BaseDefines):
+ pass
+
+
+class WasmDefines(BaseDefines):
+ pass
+
+
+class WebIDLCollection(ContextDerived):
+ """Collects WebIDL info referenced during the build."""
+
+ def __init__(self, context):
+ ContextDerived.__init__(self, context)
+ self.sources = set()
+ self.generated_sources = set()
+ self.generated_events_sources = set()
+ self.preprocessed_sources = set()
+ self.test_sources = set()
+ self.preprocessed_test_sources = set()
+ self.example_interfaces = set()
+
+ def all_regular_sources(self):
+ return (
+ self.sources
+ | self.generated_sources
+ | self.generated_events_sources
+ | self.preprocessed_sources
+ )
+
+ def all_regular_basenames(self):
+ return [mozpath.basename(source) for source in self.all_regular_sources()]
+
+ def all_regular_stems(self):
+ return [mozpath.splitext(b)[0] for b in self.all_regular_basenames()]
+
+ def all_regular_bindinggen_stems(self):
+ for stem in self.all_regular_stems():
+ yield "%sBinding" % stem
+
+ for source in self.generated_events_sources:
+ yield mozpath.splitext(mozpath.basename(source))[0]
+
+ def all_regular_cpp_basenames(self):
+ for stem in self.all_regular_bindinggen_stems():
+ yield "%s.cpp" % stem
+
+ def all_test_sources(self):
+ return self.test_sources | self.preprocessed_test_sources
+
+ def all_test_basenames(self):
+ return [mozpath.basename(source) for source in self.all_test_sources()]
+
+ def all_test_stems(self):
+ return [mozpath.splitext(b)[0] for b in self.all_test_basenames()]
+
+ def all_test_cpp_basenames(self):
+ return sorted("%sBinding.cpp" % s for s in self.all_test_stems())
+
+ def all_static_sources(self):
+ return self.sources | self.generated_events_sources | self.test_sources
+
+ def all_non_static_sources(self):
+ return self.generated_sources | self.all_preprocessed_sources()
+
+ def all_non_static_basenames(self):
+ return [mozpath.basename(s) for s in self.all_non_static_sources()]
+
+ def all_preprocessed_sources(self):
+ return self.preprocessed_sources | self.preprocessed_test_sources
+
+ def all_sources(self):
+ return set(self.all_regular_sources()) | set(self.all_test_sources())
+
+ def all_basenames(self):
+ return [mozpath.basename(source) for source in self.all_sources()]
+
+ def all_stems(self):
+ return [mozpath.splitext(b)[0] for b in self.all_basenames()]
+
+ def generated_events_basenames(self):
+ return [mozpath.basename(s) for s in self.generated_events_sources]
+
+ def generated_events_stems(self):
+ return [mozpath.splitext(b)[0] for b in self.generated_events_basenames()]
+
+ @property
+ def unified_source_mapping(self):
+ # Bindings are compiled in unified mode to speed up compilation and
+ # to reduce linker memory size. Note that test bindings are separated
+ # from regular ones so tests bindings aren't shipped.
+ return list(
+ group_unified_files(
+ sorted(self.all_regular_cpp_basenames()),
+ unified_prefix="UnifiedBindings",
+ unified_suffix="cpp",
+ files_per_unified_file=32,
+ )
+ )
+
+ def all_source_files(self):
+ from mozwebidlcodegen import WebIDLCodegenManager
+
+ return sorted(list(WebIDLCodegenManager.GLOBAL_DEFINE_FILES)) + sorted(
+ set(p for p, _ in self.unified_source_mapping)
+ )
+
+
+class IPDLCollection(ContextDerived):
+ """Collects IPDL files during the build."""
+
+ def __init__(self, context):
+ ContextDerived.__init__(self, context)
+ self.sources = set()
+ self.preprocessed_sources = set()
+
+ def all_sources(self):
+ return self.sources | self.preprocessed_sources
+
+ def all_regular_sources(self):
+ return self.sources
+
+ def all_preprocessed_sources(self):
+ return self.preprocessed_sources
+
+ def all_source_files(self):
+ # Source files generated by IPDL are built as generated UnifiedSources
+ # from the context which included the IPDL file, rather than the context
+ # which builds the IPDLCollection, so we report no files here.
+ return []
+
+
+class XPCOMComponentManifests(ContextDerived):
+ """Collects XPCOM manifest files during the build."""
+
+ def __init__(self, context):
+ ContextDerived.__init__(self, context)
+ self.manifests = set()
+
+ def all_sources(self):
+ return self.manifests
+
+ def all_source_files(self):
+ return []
+
+
+class LinkageWrongKindError(Exception):
+ """Error thrown when trying to link objects of the wrong kind"""
+
+
+class Linkable(ContextDerived):
+ """Generic context derived container object for programs and libraries"""
+
+ __slots__ = (
+ "cxx_link",
+ "lib_defines",
+ "linked_libraries",
+ "linked_system_libs",
+ "sources",
+ )
+
+ def __init__(self, context):
+ ContextDerived.__init__(self, context)
+ self.cxx_link = False
+ self.linked_libraries = []
+ self.linked_system_libs = []
+ self.lib_defines = Defines(context, OrderedDict())
+ self.sources = defaultdict(list)
+
+ def link_library(self, obj):
+ assert isinstance(obj, BaseLibrary)
+ if obj.KIND != self.KIND:
+ raise LinkageWrongKindError("%s != %s" % (obj.KIND, self.KIND))
+ self.linked_libraries.append(obj)
+ if obj.cxx_link and not isinstance(obj, SharedLibrary):
+ self.cxx_link = True
+ obj.refs.append(self)
+
+ def link_system_library(self, lib):
+ # The '$' check is here as a special temporary rule, allowing the
+ # inherited use of make variables, most notably in TK_LIBS.
+ if not lib.startswith("$") and not lib.startswith("-"):
+ type_var = "HOST_CC_TYPE" if self.KIND == "host" else "CC_TYPE"
+ compiler_type = self.config.substs.get(type_var)
+ if compiler_type in ("gcc", "clang"):
+ lib = "-l%s" % lib
+ elif self.KIND == "host":
+ lib = "%s%s%s" % (
+ self.config.host_import_prefix,
+ lib,
+ self.config.host_import_suffix,
+ )
+ else:
+ lib = "%s%s%s" % (
+ self.config.import_prefix,
+ lib,
+ self.config.import_suffix,
+ )
+ self.linked_system_libs.append(lib)
+
+ def source_files(self):
+ all_sources = []
+ # This is ordered for reproducibility and consistently w/
+ # config/rules.mk
+ for suffix in (".c", ".S", ".cpp", ".m", ".mm", ".s"):
+ all_sources += self.sources.get(suffix, [])
+ return all_sources
+
+ def _get_objs(self, sources):
+ obj_prefix = ""
+ if self.KIND == "host":
+ obj_prefix = "host_"
+
+ return [
+ mozpath.join(
+ self.objdir,
+ "%s%s.%s"
+ % (
+ obj_prefix,
+ mozpath.splitext(mozpath.basename(f))[0],
+ self._obj_suffix(),
+ ),
+ )
+ for f in sources
+ ]
+
+ def _obj_suffix(self):
+ """Can be overridden by a base class for custom behavior."""
+ return self.config.substs.get("OBJ_SUFFIX", "")
+
+ @property
+ def objs(self):
+ return self._get_objs(self.source_files())
+
+
+class BaseProgram(Linkable):
+ """Context derived container object for programs, which is a unicode
+ string.
+
+ This class handles automatically appending a binary suffix to the program
+ name.
+ If the suffix is not defined, the program name is unchanged.
+ Otherwise, if the program name ends with the given suffix, it is unchanged
+ Otherwise, the suffix is appended to the program name.
+ """
+
+ __slots__ = "program"
+
+ DICT_ATTRS = {"install_target", "KIND", "program", "relobjdir"}
+
+ def __init__(self, context, program, is_unit_test=False):
+ Linkable.__init__(self, context)
+
+ bin_suffix = context.config.substs.get(self.SUFFIX_VAR, "")
+ if not program.endswith(bin_suffix):
+ program += bin_suffix
+ self.program = program
+ self.is_unit_test = is_unit_test
+
+ @property
+ def output_path(self):
+ if self.installed:
+ return ObjDirPath(
+ self._context, "!/" + mozpath.join(self.install_target, self.program)
+ )
+ else:
+ return ObjDirPath(self._context, "!" + self.program)
+
+ def __repr__(self):
+ return "<%s: %s/%s>" % (type(self).__name__, self.relobjdir, self.program)
+
+ @property
+ def name(self):
+ return self.program
+
+
+class Program(BaseProgram):
+ """Context derived container object for PROGRAM"""
+
+ SUFFIX_VAR = "BIN_SUFFIX"
+ KIND = "target"
+
+
+class HostProgram(HostMixin, BaseProgram):
+ """Context derived container object for HOST_PROGRAM"""
+
+ SUFFIX_VAR = "HOST_BIN_SUFFIX"
+ KIND = "host"
+
+ @property
+ def install_target(self):
+ return "dist/host/bin"
+
+
+class SimpleProgram(BaseProgram):
+ """Context derived container object for each program in SIMPLE_PROGRAMS"""
+
+ SUFFIX_VAR = "BIN_SUFFIX"
+ KIND = "target"
+
+ def source_files(self):
+ for srcs in self.sources.values():
+ for f in srcs:
+ if (
+ mozpath.basename(mozpath.splitext(f)[0])
+ == mozpath.splitext(self.program)[0]
+ ):
+ return [f]
+ return []
+
+
+class HostSimpleProgram(HostMixin, BaseProgram):
+ """Context derived container object for each program in
+ HOST_SIMPLE_PROGRAMS"""
+
+ SUFFIX_VAR = "HOST_BIN_SUFFIX"
+ KIND = "host"
+
+ def source_files(self):
+ for srcs in self.sources.values():
+ for f in srcs:
+ if (
+ "host_%s" % mozpath.basename(mozpath.splitext(f)[0])
+ == mozpath.splitext(self.program)[0]
+ ):
+ return [f]
+ return []
+
+
+def cargo_output_directory(context, target_var):
+ # cargo creates several directories and places its build artifacts
+ # in those directories. The directory structure depends not only
+ # on the target, but also what sort of build we are doing.
+ rust_build_kind = "release"
+ if context.config.substs.get("MOZ_DEBUG_RUST"):
+ rust_build_kind = "debug"
+ return mozpath.join(context.config.substs[target_var], rust_build_kind)
+
+
+# Rust programs aren't really Linkable, since Cargo handles all the details
+# of linking things.
+class BaseRustProgram(ContextDerived):
+ __slots__ = (
+ "name",
+ "cargo_file",
+ "location",
+ "SUFFIX_VAR",
+ "KIND",
+ "TARGET_SUBST_VAR",
+ )
+
+ def __init__(self, context, name, cargo_file):
+ ContextDerived.__init__(self, context)
+ self.name = name
+ self.cargo_file = cargo_file
+ # Skip setting properties below which depend on cargo
+ # when we don't have a compile environment. The required
+ # config keys won't be available, but the instance variables
+ # that we don't set should never be accessed by the actual
+ # build in that case.
+ if not context.config.substs.get("COMPILE_ENVIRONMENT"):
+ return
+ cargo_dir = cargo_output_directory(context, self.TARGET_SUBST_VAR)
+ exe_file = "%s%s" % (name, context.config.substs.get(self.SUFFIX_VAR, ""))
+ self.location = mozpath.join(cargo_dir, exe_file)
+
+
+class RustProgram(BaseRustProgram):
+ SUFFIX_VAR = "BIN_SUFFIX"
+ KIND = "target"
+ TARGET_SUBST_VAR = "RUST_TARGET"
+
+
+class HostRustProgram(BaseRustProgram):
+ SUFFIX_VAR = "HOST_BIN_SUFFIX"
+ KIND = "host"
+ TARGET_SUBST_VAR = "RUST_HOST_TARGET"
+
+
+class RustTests(ContextDerived):
+ __slots__ = ("names", "features", "output_category")
+
+ def __init__(self, context, names, features):
+ ContextDerived.__init__(self, context)
+ self.names = names
+ self.features = features
+ self.output_category = "rusttests"
+
+
+class BaseLibrary(Linkable):
+ """Generic context derived container object for libraries."""
+
+ __slots__ = ("basename", "lib_name", "import_name", "refs")
+
+ def __init__(self, context, basename):
+ Linkable.__init__(self, context)
+
+ self.basename = self.lib_name = basename
+ if self.lib_name:
+ self.lib_name = "%s%s%s" % (
+ context.config.lib_prefix,
+ self.lib_name,
+ context.config.lib_suffix,
+ )
+ self.import_name = self.lib_name
+
+ self.refs = []
+
+ def __repr__(self):
+ return "<%s: %s/%s>" % (type(self).__name__, self.relobjdir, self.lib_name)
+
+ @property
+ def name(self):
+ return self.lib_name
+
+
+class Library(BaseLibrary):
+ """Context derived container object for a library"""
+
+ KIND = "target"
+ __slots__ = ()
+
+ def __init__(self, context, basename, real_name=None):
+ BaseLibrary.__init__(self, context, real_name or basename)
+ self.basename = basename
+
+
+class StaticLibrary(Library):
+ """Context derived container object for a static library"""
+
+ __slots__ = ("link_into", "no_expand_lib")
+
+ def __init__(
+ self, context, basename, real_name=None, link_into=None, no_expand_lib=False
+ ):
+ Library.__init__(self, context, basename, real_name)
+ self.link_into = link_into
+ self.no_expand_lib = no_expand_lib
+
+
+class SandboxedWasmLibrary(Library):
+ """Context derived container object for a static sandboxed wasm library"""
+
+ # This is a real static library; make it known to the build system.
+ no_expand_lib = True
+ KIND = "wasm"
+
+ def __init__(self, context, basename, real_name=None):
+ Library.__init__(self, context, basename, real_name)
+
+ # Wasm libraries are not going to compile unless we have a compiler
+ # for them.
+ assert context.config.substs["WASM_CC"] and context.config.substs["WASM_CXX"]
+
+ self.lib_name = "%s%s%s" % (
+ context.config.dll_prefix,
+ real_name or basename,
+ context.config.dll_suffix,
+ )
+
+ def _obj_suffix(self):
+ """Can be overridden by a base class for custom behavior."""
+ return self.config.substs.get("WASM_OBJ_SUFFIX", "")
+
+
+class BaseRustLibrary(object):
+ slots = (
+ "cargo_file",
+ "crate_type",
+ "dependencies",
+ "deps_path",
+ "features",
+ "output_category",
+ "is_gkrust",
+ )
+
+ def init(
+ self,
+ context,
+ basename,
+ cargo_file,
+ crate_type,
+ dependencies,
+ features,
+ is_gkrust,
+ ):
+ self.is_gkrust = is_gkrust
+ self.cargo_file = cargo_file
+ self.crate_type = crate_type
+ # We need to adjust our naming here because cargo replaces '-' in
+ # package names defined in Cargo.toml with underscores in actual
+ # filenames. But we need to keep the basename consistent because
+ # many other things in the build system depend on that.
+ assert self.crate_type == "staticlib"
+ self.lib_name = "%s%s%s" % (
+ context.config.lib_prefix,
+ basename.replace("-", "_"),
+ context.config.lib_suffix,
+ )
+ self.dependencies = dependencies
+ self.features = features
+ self.output_category = context.get("RUST_LIBRARY_OUTPUT_CATEGORY")
+ # Skip setting properties below which depend on cargo
+ # when we don't have a compile environment. The required
+ # config keys won't be available, but the instance variables
+ # that we don't set should never be accessed by the actual
+ # build in that case.
+ if not context.config.substs.get("COMPILE_ENVIRONMENT"):
+ return
+ build_dir = mozpath.join(
+ context.config.topobjdir,
+ cargo_output_directory(context, self.TARGET_SUBST_VAR),
+ )
+ self.import_name = mozpath.join(build_dir, self.lib_name)
+ self.deps_path = mozpath.join(build_dir, "deps")
+
+
+class RustLibrary(StaticLibrary, BaseRustLibrary):
+ """Context derived container object for a rust static library"""
+
+ KIND = "target"
+ TARGET_SUBST_VAR = "RUST_TARGET"
+ FEATURES_VAR = "RUST_LIBRARY_FEATURES"
+ LIB_FILE_VAR = "RUST_LIBRARY_FILE"
+ __slots__ = BaseRustLibrary.slots
+
+ def __init__(
+ self,
+ context,
+ basename,
+ cargo_file,
+ crate_type,
+ dependencies,
+ features,
+ is_gkrust=False,
+ link_into=None,
+ ):
+ StaticLibrary.__init__(
+ self,
+ context,
+ basename,
+ link_into=link_into,
+ # A rust library is a real static library ; make
+ # it known to the build system.
+ no_expand_lib=True,
+ )
+ BaseRustLibrary.init(
+ self,
+ context,
+ basename,
+ cargo_file,
+ crate_type,
+ dependencies,
+ features,
+ is_gkrust,
+ )
+
+
+class SharedLibrary(Library):
+ """Context derived container object for a shared library"""
+
+ __slots__ = (
+ "soname",
+ "variant",
+ "symbols_file",
+ "output_category",
+ "symbols_link_arg",
+ )
+
+ DICT_ATTRS = {
+ "basename",
+ "import_name",
+ "install_target",
+ "lib_name",
+ "relobjdir",
+ "soname",
+ }
+
+ FRAMEWORK = 1
+ MAX_VARIANT = 2
+
+ def __init__(
+ self,
+ context,
+ basename,
+ real_name=None,
+ soname=None,
+ variant=None,
+ symbols_file=False,
+ ):
+ assert variant in range(1, self.MAX_VARIANT) or variant is None
+ Library.__init__(self, context, basename, real_name)
+ self.variant = variant
+ self.lib_name = real_name or basename
+ self.output_category = context.get("SHARED_LIBRARY_OUTPUT_CATEGORY")
+ assert self.lib_name
+
+ if variant == self.FRAMEWORK:
+ self.import_name = self.lib_name
+ else:
+ self.import_name = "%s%s%s" % (
+ context.config.import_prefix,
+ self.lib_name,
+ context.config.import_suffix,
+ )
+ self.lib_name = "%s%s%s" % (
+ context.config.dll_prefix,
+ self.lib_name,
+ context.config.dll_suffix,
+ )
+ if soname:
+ self.soname = "%s%s%s" % (
+ context.config.dll_prefix,
+ soname,
+ context.config.dll_suffix,
+ )
+ else:
+ self.soname = self.lib_name
+
+ if symbols_file is False:
+ # No symbols file.
+ self.symbols_file = None
+ elif symbols_file is True:
+ # Symbols file with default name.
+ if context.config.substs["OS_TARGET"] == "WINNT":
+ self.symbols_file = "%s.def" % self.lib_name
+ else:
+ self.symbols_file = "%s.symbols" % self.lib_name
+ else:
+ # Explicitly provided name.
+ self.symbols_file = symbols_file
+
+ if self.symbols_file:
+ os_target = context.config.substs["OS_TARGET"]
+ if os_target == "Darwin":
+ self.symbols_link_arg = (
+ "-Wl,-exported_symbols_list," + self.symbols_file
+ )
+ elif os_target == "SunOS":
+ self.symbols_link_arg = (
+ "-z gnu-version-script-compat -Wl,--version-script,"
+ + self.symbols_file
+ )
+ elif os_target == "WINNT":
+ if context.config.substs.get("GNU_CC"):
+ self.symbols_link_arg = self.symbols_file
+ else:
+ self.symbols_link_arg = "-DEF:" + self.symbols_file
+ elif context.config.substs.get("GCC_USE_GNU_LD"):
+ self.symbols_link_arg = "-Wl,--version-script," + self.symbols_file
+
+
+class HostSharedLibrary(HostMixin, Library):
+ """Context derived container object for a host shared library.
+
+ This class supports less things than SharedLibrary does for target shared
+ libraries. Currently has enough build system support to build the clang
+ plugin."""
+
+ KIND = "host"
+
+ def __init__(self, context, basename):
+ Library.__init__(self, context, basename)
+ self.lib_name = "%s%s%s" % (
+ context.config.host_dll_prefix,
+ self.basename,
+ context.config.host_dll_suffix,
+ )
+
+
+class ExternalLibrary(object):
+ """Empty mixin for libraries built by an external build system."""
+
+
+class ExternalStaticLibrary(StaticLibrary, ExternalLibrary):
+ """Context derived container for static libraries built by an external
+ build system."""
+
+
+class ExternalSharedLibrary(SharedLibrary, ExternalLibrary):
+ """Context derived container for shared libraries built by an external
+ build system."""
+
+
+class HostLibrary(HostMixin, BaseLibrary):
+ """Context derived container object for a host library"""
+
+ KIND = "host"
+ no_expand_lib = False
+
+
+class HostRustLibrary(HostLibrary, BaseRustLibrary):
+ """Context derived container object for a host rust library"""
+
+ KIND = "host"
+ TARGET_SUBST_VAR = "RUST_HOST_TARGET"
+ FEATURES_VAR = "HOST_RUST_LIBRARY_FEATURES"
+ LIB_FILE_VAR = "HOST_RUST_LIBRARY_FILE"
+ __slots__ = BaseRustLibrary.slots
+ no_expand_lib = True
+
+ def __init__(
+ self,
+ context,
+ basename,
+ cargo_file,
+ crate_type,
+ dependencies,
+ features,
+ is_gkrust,
+ ):
+ HostLibrary.__init__(self, context, basename)
+ BaseRustLibrary.init(
+ self,
+ context,
+ basename,
+ cargo_file,
+ crate_type,
+ dependencies,
+ features,
+ is_gkrust,
+ )
+
+
+class TestManifest(ContextDerived):
+ """Represents a manifest file containing information about tests."""
+
+ __slots__ = (
+ # The type of test manifest this is.
+ "flavor",
+ # Maps source filename to destination filename. The destination
+ # path is relative from the tests root directory. Values are 2-tuples
+ # of (destpath, is_test_file) where the 2nd item is True if this
+ # item represents a test file (versus a support file).
+ "installs",
+ # A list of pattern matching installs to perform. Entries are
+ # (base, pattern, dest).
+ "pattern_installs",
+ # Where all files for this manifest flavor are installed in the unified
+ # test package directory.
+ "install_prefix",
+ # Set of files provided by an external mechanism.
+ "external_installs",
+ # Set of files required by multiple test directories, whose installation
+ # will be resolved when running tests.
+ "deferred_installs",
+ # The full path of this manifest file.
+ "path",
+ # The directory where this manifest is defined.
+ "directory",
+ # The parsed manifestparser.TestManifest instance.
+ "manifest",
+ # List of tests. Each element is a dict of metadata.
+ "tests",
+ # The relative path of the parsed manifest within the srcdir.
+ "manifest_relpath",
+ # The relative path of the parsed manifest within the objdir.
+ "manifest_obj_relpath",
+ # The relative paths to all source files for this manifest.
+ "source_relpaths",
+ # If this manifest is a duplicate of another one, this is the
+ # manifestparser.TestManifest of the other one.
+ "dupe_manifest",
+ )
+
+ def __init__(
+ self,
+ context,
+ path,
+ manifest,
+ flavor=None,
+ install_prefix=None,
+ relpath=None,
+ sources=(),
+ dupe_manifest=False,
+ ):
+ ContextDerived.__init__(self, context)
+
+ assert flavor in all_test_flavors()
+
+ self.path = path
+ self.directory = mozpath.dirname(path)
+ self.manifest = manifest
+ self.flavor = flavor
+ self.install_prefix = install_prefix
+ self.manifest_relpath = relpath
+ self.manifest_obj_relpath = relpath
+ self.source_relpaths = sources
+ self.dupe_manifest = dupe_manifest
+ self.installs = {}
+ self.pattern_installs = []
+ self.tests = []
+ self.external_installs = set()
+ self.deferred_installs = set()
+
+
+class LocalInclude(ContextDerived):
+ """Describes an individual local include path."""
+
+ __slots__ = ("path",)
+
+ def __init__(self, context, path):
+ ContextDerived.__init__(self, context)
+
+ self.path = path
+
+
+class PerSourceFlag(ContextDerived):
+ """Describes compiler flags specified for individual source files."""
+
+ __slots__ = ("file_name", "flags")
+
+ def __init__(self, context, file_name, flags):
+ ContextDerived.__init__(self, context)
+
+ self.file_name = file_name
+ self.flags = flags
+
+
+class JARManifest(ContextDerived):
+ """Describes an individual JAR manifest file and how to process it.
+
+ This class isn't very useful for optimizing backends yet because we don't
+ capture defines. We can't capture defines safely until all of them are
+ defined in moz.build and not Makefile.in files.
+ """
+
+ __slots__ = ("path",)
+
+ def __init__(self, context, path):
+ ContextDerived.__init__(self, context)
+
+ self.path = path
+
+
+class BaseSources(ContextDerived):
+ """Base class for files to be compiled during the build."""
+
+ __slots__ = ("files", "static_files", "generated_files", "canonical_suffix")
+
+ def __init__(self, context, static_files, generated_files, canonical_suffix):
+ ContextDerived.__init__(self, context)
+
+ # Sorted so output is consistent and we don't bump mtimes, but always
+ # order generated files after static ones to be consistent across build
+ # environments, which may have different objdir paths relative to
+ # topsrcdir.
+ self.static_files = sorted(static_files)
+ self.generated_files = sorted(generated_files)
+ self.files = self.static_files + self.generated_files
+ self.canonical_suffix = canonical_suffix
+
+
+class Sources(BaseSources):
+ """Represents files to be compiled during the build."""
+
+ def __init__(self, context, static_files, generated_files, canonical_suffix):
+ BaseSources.__init__(
+ self, context, static_files, generated_files, canonical_suffix
+ )
+
+
+class PgoGenerateOnlySources(BaseSources):
+ """Represents files to be compiled during the build.
+
+ These files are only used during the PGO generation phase."""
+
+ def __init__(self, context, files):
+ BaseSources.__init__(self, context, files, [], ".cpp")
+
+
+class HostSources(HostMixin, BaseSources):
+ """Represents files to be compiled for the host during the build."""
+
+ def __init__(self, context, static_files, generated_files, canonical_suffix):
+ BaseSources.__init__(
+ self, context, static_files, generated_files, canonical_suffix
+ )
+
+
+class WasmSources(BaseSources):
+ """Represents files to be compiled with the wasm compiler during the build."""
+
+ def __init__(self, context, static_files, generated_files, canonical_suffix):
+ BaseSources.__init__(
+ self, context, static_files, generated_files, canonical_suffix
+ )
+
+
+class UnifiedSources(BaseSources):
+ """Represents files to be compiled in a unified fashion during the build."""
+
+ __slots__ = ("have_unified_mapping", "unified_source_mapping")
+
+ def __init__(self, context, static_files, generated_files, canonical_suffix):
+ BaseSources.__init__(
+ self, context, static_files, generated_files, canonical_suffix
+ )
+
+ unified_build = context.config.substs.get("ENABLE_UNIFIED_BUILD", False)
+ files_per_unified_file = (
+ context.get("FILES_PER_UNIFIED_FILE", 16) if unified_build else 1
+ )
+
+ self.have_unified_mapping = files_per_unified_file > 1
+
+ if self.have_unified_mapping:
+ # On Windows, path names have a maximum length of 255 characters,
+ # so avoid creating extremely long path names.
+ unified_prefix = context.relsrcdir
+ if len(unified_prefix) > 20:
+ unified_prefix = unified_prefix[-20:].split("/", 1)[-1]
+ unified_prefix = unified_prefix.replace("/", "_")
+
+ suffix = self.canonical_suffix[1:]
+ unified_prefix = "Unified_%s_%s" % (suffix, unified_prefix)
+ self.unified_source_mapping = list(
+ group_unified_files(
+ # NOTE: self.files is already (partially) sorted, and we
+ # intentionally do not re-sort it here to avoid a dependency
+ # on the build environment's objdir path.
+ self.files,
+ unified_prefix=unified_prefix,
+ unified_suffix=suffix,
+ files_per_unified_file=files_per_unified_file,
+ )
+ )
+
+
+class InstallationTarget(ContextDerived):
+ """Describes the rules that affect where files get installed to."""
+
+ __slots__ = ("xpiname", "subdir", "target", "enabled")
+
+ def __init__(self, context):
+ ContextDerived.__init__(self, context)
+
+ self.xpiname = context.get("XPI_NAME", "")
+ self.subdir = context.get("DIST_SUBDIR", "")
+ self.target = context["FINAL_TARGET"]
+ self.enabled = context["DIST_INSTALL"] is not False
+
+ def is_custom(self):
+ """Returns whether or not the target is not derived from the default
+ given xpiname and subdir."""
+
+ return (
+ FinalTargetValue(dict(XPI_NAME=self.xpiname, DIST_SUBDIR=self.subdir))
+ == self.target
+ )
+
+
+class FinalTargetFiles(ContextDerived):
+ """Sandbox container object for FINAL_TARGET_FILES, which is a
+ HierarchicalStringList.
+
+ We need an object derived from ContextDerived for use in the backend, so
+ this object fills that role. It just has a reference to the underlying
+ HierarchicalStringList, which is created when parsing FINAL_TARGET_FILES.
+ """
+
+ __slots__ = "files"
+
+ def __init__(self, sandbox, files):
+ ContextDerived.__init__(self, sandbox)
+ self.files = files
+
+
+class FinalTargetPreprocessedFiles(ContextDerived):
+ """Sandbox container object for FINAL_TARGET_PP_FILES, which is a
+ HierarchicalStringList.
+
+ We need an object derived from ContextDerived for use in the backend, so
+ this object fills that role. It just has a reference to the underlying
+ HierarchicalStringList, which is created when parsing
+ FINAL_TARGET_PP_FILES.
+ """
+
+ __slots__ = "files"
+
+ def __init__(self, sandbox, files):
+ ContextDerived.__init__(self, sandbox)
+ self.files = files
+
+
+class LocalizedFiles(FinalTargetFiles):
+ """Sandbox container object for LOCALIZED_FILES, which is a
+ HierarchicalStringList.
+ """
+
+ pass
+
+
+class LocalizedPreprocessedFiles(FinalTargetPreprocessedFiles):
+ """Sandbox container object for LOCALIZED_PP_FILES, which is a
+ HierarchicalStringList.
+ """
+
+ pass
+
+
+class ObjdirFiles(FinalTargetFiles):
+ """Sandbox container object for OBJDIR_FILES, which is a
+ HierarchicalStringList.
+ """
+
+ @property
+ def install_target(self):
+ return ""
+
+
+class ObjdirPreprocessedFiles(FinalTargetPreprocessedFiles):
+ """Sandbox container object for OBJDIR_PP_FILES, which is a
+ HierarchicalStringList.
+ """
+
+ @property
+ def install_target(self):
+ return ""
+
+
+class TestHarnessFiles(FinalTargetFiles):
+ """Sandbox container object for TEST_HARNESS_FILES,
+ which is a HierarchicalStringList.
+ """
+
+ @property
+ def install_target(self):
+ return "_tests"
+
+
+class Exports(FinalTargetFiles):
+ """Context derived container object for EXPORTS, which is a
+ HierarchicalStringList.
+
+ We need an object derived from ContextDerived for use in the backend, so
+ this object fills that role. It just has a reference to the underlying
+ HierarchicalStringList, which is created when parsing EXPORTS.
+ """
+
+ @property
+ def install_target(self):
+ return "dist/include"
+
+
+class GeneratedFile(ContextDerived):
+ """Represents a generated file."""
+
+ __slots__ = (
+ "script",
+ "method",
+ "outputs",
+ "inputs",
+ "flags",
+ "required_before_export",
+ "required_before_compile",
+ "required_during_compile",
+ "localized",
+ "force",
+ "py2",
+ )
+
+ def __init__(
+ self,
+ context,
+ script,
+ method,
+ outputs,
+ inputs,
+ flags=(),
+ localized=False,
+ force=False,
+ py2=False,
+ required_during_compile=None,
+ ):
+ ContextDerived.__init__(self, context)
+ self.script = script
+ self.method = method
+ self.outputs = outputs if isinstance(outputs, tuple) else (outputs,)
+ self.inputs = inputs
+ self.flags = flags
+ self.localized = localized
+ self.force = force
+ self.py2 = py2
+
+ if self.config.substs.get("MOZ_WIDGET_TOOLKIT") == "android":
+ # In GeckoView builds we process Jinja files during pre-export
+ self.required_before_export = [
+ f for f in self.inputs if f.endswith(".jinja")
+ ]
+ else:
+ self.required_before_export = False
+
+ suffixes = [
+ ".h",
+ ".py",
+ ".rs",
+ # We need to compile Java to generate JNI wrappers for native code
+ # compilation to consume.
+ "android_apks",
+ ".profdata",
+ ".webidl",
+ ]
+
+ try:
+ lib_suffix = context.config.substs["LIB_SUFFIX"]
+ suffixes.append("." + lib_suffix)
+ except KeyError:
+ # Tests may not define LIB_SUFFIX
+ pass
+
+ suffixes = tuple(suffixes)
+
+ self.required_before_compile = [
+ f
+ for f in self.outputs
+ if f.endswith(suffixes) or "stl_wrappers/" in f or "xpidl.stub" in f
+ ]
+
+ if required_during_compile is None:
+ self.required_during_compile = [
+ f
+ for f in self.outputs
+ if f.endswith(
+ (".asm", ".c", ".cpp", ".inc", ".m", ".mm", ".def", "symverscript")
+ )
+ ]
+ else:
+ self.required_during_compile = required_during_compile
+
+
+class ChromeManifestEntry(ContextDerived):
+ """Represents a chrome.manifest entry."""
+
+ __slots__ = ("path", "entry")
+
+ def __init__(self, context, manifest_path, entry):
+ ContextDerived.__init__(self, context)
+ assert isinstance(entry, ManifestEntry)
+ self.path = mozpath.join(self.install_target, manifest_path)
+ # Ensure the entry is relative to the directory containing the
+ # manifest path.
+ entry = entry.rebase(mozpath.dirname(manifest_path))
+ # Then add the install_target to the entry base directory.
+ self.entry = entry.move(mozpath.dirname(self.path))
diff --git a/python/mozbuild/mozbuild/frontend/emitter.py b/python/mozbuild/mozbuild/frontend/emitter.py
new file mode 100644
index 0000000000..8d62072421
--- /dev/null
+++ b/python/mozbuild/mozbuild/frontend/emitter.py
@@ -0,0 +1,1892 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import logging
+import os
+import sys
+import time
+import traceback
+from collections import OrderedDict, defaultdict
+
+import mozinfo
+import mozpack.path as mozpath
+import six
+import toml
+from mach.mixin.logging import LoggingMixin
+from mozpack.chrome.manifest import Manifest
+
+from mozbuild.base import ExecutionSummary
+from mozbuild.util import OrderedDefaultDict, memoize
+
+from ..testing import REFTEST_FLAVORS, TEST_MANIFESTS, SupportFilesConverter
+from .context import Context, ObjDirPath, Path, SourcePath, SubContext
+from .data import (
+ BaseRustProgram,
+ ChromeManifestEntry,
+ ComputedFlags,
+ ConfigFileSubstitution,
+ Defines,
+ DirectoryTraversal,
+ Exports,
+ ExternalSharedLibrary,
+ ExternalStaticLibrary,
+ FinalTargetFiles,
+ FinalTargetPreprocessedFiles,
+ GeneratedFile,
+ HostDefines,
+ HostLibrary,
+ HostProgram,
+ HostRustLibrary,
+ HostRustProgram,
+ HostSharedLibrary,
+ HostSimpleProgram,
+ HostSources,
+ InstallationTarget,
+ IPDLCollection,
+ JARManifest,
+ Library,
+ Linkable,
+ LocalInclude,
+ LocalizedFiles,
+ LocalizedPreprocessedFiles,
+ ObjdirFiles,
+ ObjdirPreprocessedFiles,
+ PerSourceFlag,
+ Program,
+ RustLibrary,
+ RustProgram,
+ RustTests,
+ SandboxedWasmLibrary,
+ SharedLibrary,
+ SimpleProgram,
+ Sources,
+ StaticLibrary,
+ TestHarnessFiles,
+ TestManifest,
+ UnifiedSources,
+ VariablePassthru,
+ WasmDefines,
+ WasmSources,
+ WebIDLCollection,
+ XPCOMComponentManifests,
+ XPIDLModule,
+)
+from .reader import SandboxValidationError
+
+
+class TreeMetadataEmitter(LoggingMixin):
+ """Converts the executed mozbuild files into data structures.
+
+ This is a bridge between reader.py and data.py. It takes what was read by
+ reader.BuildReader and converts it into the classes defined in the data
+ module.
+ """
+
+ def __init__(self, config):
+ self.populate_logger()
+
+ self.config = config
+
+ mozinfo.find_and_update_from_json(config.topobjdir)
+
+ self.info = dict(mozinfo.info)
+
+ self._libs = OrderedDefaultDict(list)
+ self._binaries = OrderedDict()
+ self._compile_dirs = set()
+ self._host_compile_dirs = set()
+ self._wasm_compile_dirs = set()
+ self._asm_compile_dirs = set()
+ self._compile_flags = dict()
+ self._compile_as_flags = dict()
+ self._linkage = []
+ self._static_linking_shared = set()
+ self._crate_verified_local = set()
+ self._crate_directories = dict()
+ self._idls = defaultdict(set)
+
+ # Keep track of external paths (third party build systems), starting
+ # from what we run a subconfigure in. We'll eliminate some directories
+ # as we traverse them with moz.build (e.g. js/src).
+ subconfigures = os.path.join(self.config.topobjdir, "subconfigures")
+ paths = []
+ if os.path.exists(subconfigures):
+ paths = open(subconfigures).read().splitlines()
+ self._external_paths = set(mozpath.normsep(d) for d in paths)
+
+ self._emitter_time = 0.0
+ self._object_count = 0
+ self._test_files_converter = SupportFilesConverter()
+
+ def summary(self):
+ return ExecutionSummary(
+ "Processed into {object_count:d} build config descriptors in "
+ "{execution_time:.2f}s",
+ execution_time=self._emitter_time,
+ object_count=self._object_count,
+ )
+
+ def emit(self, output, emitfn=None):
+ """Convert the BuildReader output into data structures.
+
+ The return value from BuildReader.read_topsrcdir() (a generator) is
+ typically fed into this function.
+ """
+ contexts = {}
+ emitfn = emitfn or self.emit_from_context
+
+ def emit_objs(objs):
+ for o in objs:
+ self._object_count += 1
+ yield o
+
+ for out in output:
+ # Nothing in sub-contexts is currently of interest to us. Filter
+ # them all out.
+ if isinstance(out, SubContext):
+ continue
+
+ if isinstance(out, Context):
+ # Keep all contexts around, we will need them later.
+ contexts[os.path.normcase(out.objdir)] = out
+
+ start = time.monotonic()
+ # We need to expand the generator for the timings to work.
+ objs = list(emitfn(out))
+ self._emitter_time += time.monotonic() - start
+
+ for o in emit_objs(objs):
+ yield o
+
+ else:
+ raise Exception("Unhandled output type: %s" % type(out))
+
+ # Don't emit Linkable objects when COMPILE_ENVIRONMENT is not set
+ if self.config.substs.get("COMPILE_ENVIRONMENT"):
+ start = time.monotonic()
+ objs = list(self._emit_libs_derived(contexts))
+ self._emitter_time += time.monotonic() - start
+
+ for o in emit_objs(objs):
+ yield o
+
+ def _emit_libs_derived(self, contexts):
+
+ # First aggregate idl sources.
+ webidl_attrs = [
+ ("GENERATED_EVENTS_WEBIDL_FILES", lambda c: c.generated_events_sources),
+ ("GENERATED_WEBIDL_FILES", lambda c: c.generated_sources),
+ ("PREPROCESSED_TEST_WEBIDL_FILES", lambda c: c.preprocessed_test_sources),
+ ("PREPROCESSED_WEBIDL_FILES", lambda c: c.preprocessed_sources),
+ ("TEST_WEBIDL_FILES", lambda c: c.test_sources),
+ ("WEBIDL_FILES", lambda c: c.sources),
+ ("WEBIDL_EXAMPLE_INTERFACES", lambda c: c.example_interfaces),
+ ]
+ ipdl_attrs = [
+ ("IPDL_SOURCES", lambda c: c.sources),
+ ("PREPROCESSED_IPDL_SOURCES", lambda c: c.preprocessed_sources),
+ ]
+ xpcom_attrs = [("XPCOM_MANIFESTS", lambda c: c.manifests)]
+
+ idl_sources = {}
+ for root, cls, attrs in (
+ (self.config.substs.get("WEBIDL_ROOT"), WebIDLCollection, webidl_attrs),
+ (self.config.substs.get("IPDL_ROOT"), IPDLCollection, ipdl_attrs),
+ (
+ self.config.substs.get("XPCOM_ROOT"),
+ XPCOMComponentManifests,
+ xpcom_attrs,
+ ),
+ ):
+ if root:
+ collection = cls(contexts[os.path.normcase(root)])
+ for var, src_getter in attrs:
+ src_getter(collection).update(self._idls[var])
+
+ idl_sources[root] = collection.all_source_files()
+ if isinstance(collection, WebIDLCollection):
+ # Test webidl sources are added here as a somewhat special
+ # case.
+ idl_sources[mozpath.join(root, "test")] = [
+ s for s in collection.all_test_cpp_basenames()
+ ]
+
+ yield collection
+
+ # Next do FINAL_LIBRARY linkage.
+ for lib in (l for libs in self._libs.values() for l in libs):
+ if not isinstance(lib, (StaticLibrary, RustLibrary)) or not lib.link_into:
+ continue
+ if lib.link_into not in self._libs:
+ raise SandboxValidationError(
+ 'FINAL_LIBRARY ("%s") does not match any LIBRARY_NAME'
+ % lib.link_into,
+ contexts[os.path.normcase(lib.objdir)],
+ )
+ candidates = self._libs[lib.link_into]
+
+ # When there are multiple candidates, but all are in the same
+ # directory and have a different type, we want all of them to
+ # have the library linked. The typical usecase is when building
+ # both a static and a shared library in a directory, and having
+ # that as a FINAL_LIBRARY.
+ if (
+ len(set(type(l) for l in candidates)) == len(candidates)
+ and len(set(l.objdir for l in candidates)) == 1
+ ):
+ for c in candidates:
+ c.link_library(lib)
+ else:
+ raise SandboxValidationError(
+ 'FINAL_LIBRARY ("%s") matches a LIBRARY_NAME defined in '
+ "multiple places:\n %s"
+ % (lib.link_into, "\n ".join(l.objdir for l in candidates)),
+ contexts[os.path.normcase(lib.objdir)],
+ )
+
+ # ...and USE_LIBS linkage.
+ for context, obj, variable in self._linkage:
+ self._link_libraries(context, obj, variable, idl_sources)
+
+ def recurse_refs(lib):
+ for o in lib.refs:
+ yield o
+ if isinstance(o, StaticLibrary):
+ for q in recurse_refs(o):
+ yield q
+
+ # Check that all static libraries refering shared libraries in
+ # USE_LIBS are linked into a shared library or program.
+ for lib in self._static_linking_shared:
+ if all(isinstance(o, StaticLibrary) for o in recurse_refs(lib)):
+ shared_libs = sorted(
+ l.basename
+ for l in lib.linked_libraries
+ if isinstance(l, SharedLibrary)
+ )
+ raise SandboxValidationError(
+ 'The static "%s" library is not used in a shared library '
+ "or a program, but USE_LIBS contains the following shared "
+ "library names:\n %s\n\nMaybe you can remove the "
+ 'static "%s" library?'
+ % (lib.basename, "\n ".join(shared_libs), lib.basename),
+ contexts[os.path.normcase(lib.objdir)],
+ )
+
+ @memoize
+ def rust_libraries(obj):
+ libs = []
+ for o in obj.linked_libraries:
+ if isinstance(o, (HostRustLibrary, RustLibrary)):
+ libs.append(o)
+ elif isinstance(o, (HostLibrary, StaticLibrary, SandboxedWasmLibrary)):
+ libs.extend(rust_libraries(o))
+ return libs
+
+ def check_rust_libraries(obj):
+ rust_libs = set(rust_libraries(obj))
+ if len(rust_libs) <= 1:
+ return
+ if isinstance(obj, (Library, HostLibrary)):
+ what = '"%s" library' % obj.basename
+ else:
+ what = '"%s" program' % obj.name
+ raise SandboxValidationError(
+ "Cannot link the following Rust libraries into the %s:\n"
+ "%s\nOnly one is allowed."
+ % (
+ what,
+ "\n".join(
+ " - %s" % r.basename
+ for r in sorted(rust_libs, key=lambda r: r.basename)
+ ),
+ ),
+ contexts[os.path.normcase(obj.objdir)],
+ )
+
+ # Propagate LIBRARY_DEFINES to all child libraries recursively.
+ def propagate_defines(outerlib, defines):
+ outerlib.lib_defines.update(defines)
+ for lib in outerlib.linked_libraries:
+ # Propagate defines only along FINAL_LIBRARY paths, not USE_LIBS
+ # paths.
+ if (
+ isinstance(lib, StaticLibrary)
+ and lib.link_into == outerlib.basename
+ ):
+ propagate_defines(lib, defines)
+
+ for lib in (l for libs in self._libs.values() for l in libs):
+ if isinstance(lib, Library):
+ propagate_defines(lib, lib.lib_defines)
+ check_rust_libraries(lib)
+ yield lib
+
+ for lib in (l for libs in self._libs.values() for l in libs):
+ lib_defines = list(lib.lib_defines.get_defines())
+ if lib_defines:
+ objdir_flags = self._compile_flags[lib.objdir]
+ objdir_flags.resolve_flags("LIBRARY_DEFINES", lib_defines)
+
+ objdir_flags = self._compile_as_flags.get(lib.objdir)
+ if objdir_flags:
+ objdir_flags.resolve_flags("LIBRARY_DEFINES", lib_defines)
+
+ for flags_obj in self._compile_flags.values():
+ yield flags_obj
+
+ for flags_obj in self._compile_as_flags.values():
+ yield flags_obj
+
+ for obj in self._binaries.values():
+ if isinstance(obj, Linkable):
+ check_rust_libraries(obj)
+ yield obj
+
+ LIBRARY_NAME_VAR = {
+ "host": "HOST_LIBRARY_NAME",
+ "target": "LIBRARY_NAME",
+ "wasm": "SANDBOXED_WASM_LIBRARY_NAME",
+ }
+
+ ARCH_VAR = {"host": "HOST_OS_ARCH", "target": "OS_TARGET"}
+
+ STDCXXCOMPAT_NAME = {"host": "host_stdc++compat", "target": "stdc++compat"}
+
+ def _link_libraries(self, context, obj, variable, extra_sources):
+ """Add linkage declarations to a given object."""
+ assert isinstance(obj, Linkable)
+
+ if context.objdir in extra_sources:
+ # All "extra sources" are .cpp for the moment, and happen to come
+ # first in order.
+ obj.sources[".cpp"] = extra_sources[context.objdir] + obj.sources[".cpp"]
+
+ for path in context.get(variable, []):
+ self._link_library(context, obj, variable, path)
+
+ # Link system libraries from OS_LIBS/HOST_OS_LIBS.
+ for lib in context.get(variable.replace("USE", "OS"), []):
+ obj.link_system_library(lib)
+
+ # We have to wait for all the self._link_library calls above to have
+ # happened for obj.cxx_link to be final.
+ # FIXME: Theoretically, HostSharedLibrary shouldn't be here (bug
+ # 1474022).
+ if (
+ not isinstance(
+ obj, (StaticLibrary, HostLibrary, HostSharedLibrary, BaseRustProgram)
+ )
+ and obj.cxx_link
+ ):
+ if (
+ context.config.substs.get("MOZ_STDCXX_COMPAT")
+ and context.config.substs.get(self.ARCH_VAR.get(obj.KIND)) == "Linux"
+ ):
+ self._link_library(
+ context, obj, variable, self.STDCXXCOMPAT_NAME[obj.KIND]
+ )
+ if obj.KIND == "target":
+ for lib in context.config.substs.get("STLPORT_LIBS", []):
+ obj.link_system_library(lib)
+
+ def _link_library(self, context, obj, variable, path):
+ force_static = path.startswith("static:") and obj.KIND == "target"
+ if force_static:
+ path = path[7:]
+ name = mozpath.basename(path)
+ dir = mozpath.dirname(path)
+ candidates = [l for l in self._libs[name] if l.KIND == obj.KIND]
+ if dir:
+ if dir.startswith("/"):
+ dir = mozpath.normpath(mozpath.join(obj.topobjdir, dir[1:]))
+ else:
+ dir = mozpath.normpath(mozpath.join(obj.objdir, dir))
+ dir = mozpath.relpath(dir, obj.topobjdir)
+ candidates = [l for l in candidates if l.relobjdir == dir]
+ if not candidates:
+ # If the given directory is under one of the external
+ # (third party) paths, use a fake library reference to
+ # there.
+ for d in self._external_paths:
+ if dir.startswith("%s/" % d):
+ candidates = [
+ self._get_external_library(dir, name, force_static)
+ ]
+ break
+
+ if not candidates:
+ raise SandboxValidationError(
+ '%s contains "%s", but there is no "%s" %s in %s.'
+ % (variable, path, name, self.LIBRARY_NAME_VAR[obj.KIND], dir),
+ context,
+ )
+
+ if len(candidates) > 1:
+ # If there's more than one remaining candidate, it could be
+ # that there are instances for the same library, in static and
+ # shared form.
+ libs = {}
+ for l in candidates:
+ key = mozpath.join(l.relobjdir, l.basename)
+ if force_static:
+ if isinstance(l, StaticLibrary):
+ libs[key] = l
+ else:
+ if key in libs and isinstance(l, SharedLibrary):
+ libs[key] = l
+ if key not in libs:
+ libs[key] = l
+ candidates = list(libs.values())
+ if force_static and not candidates:
+ if dir:
+ raise SandboxValidationError(
+ '%s contains "static:%s", but there is no static '
+ '"%s" %s in %s.'
+ % (variable, path, name, self.LIBRARY_NAME_VAR[obj.KIND], dir),
+ context,
+ )
+ raise SandboxValidationError(
+ '%s contains "static:%s", but there is no static "%s" '
+ "%s in the tree"
+ % (variable, name, name, self.LIBRARY_NAME_VAR[obj.KIND]),
+ context,
+ )
+
+ if not candidates:
+ raise SandboxValidationError(
+ '%s contains "%s", which does not match any %s in the tree.'
+ % (variable, path, self.LIBRARY_NAME_VAR[obj.KIND]),
+ context,
+ )
+
+ elif len(candidates) > 1:
+ paths = (mozpath.join(l.relsrcdir, "moz.build") for l in candidates)
+ raise SandboxValidationError(
+ '%s contains "%s", which matches a %s defined in multiple '
+ "places:\n %s"
+ % (
+ variable,
+ path,
+ self.LIBRARY_NAME_VAR[obj.KIND],
+ "\n ".join(paths),
+ ),
+ context,
+ )
+
+ elif force_static and not isinstance(candidates[0], StaticLibrary):
+ raise SandboxValidationError(
+ '%s contains "static:%s", but there is only a shared "%s" '
+ "in %s. You may want to add FORCE_STATIC_LIB=True in "
+ '%s/moz.build, or remove "static:".'
+ % (
+ variable,
+ path,
+ name,
+ candidates[0].relobjdir,
+ candidates[0].relobjdir,
+ ),
+ context,
+ )
+
+ elif isinstance(obj, StaticLibrary) and isinstance(
+ candidates[0], SharedLibrary
+ ):
+ self._static_linking_shared.add(obj)
+ obj.link_library(candidates[0])
+
+ @memoize
+ def _get_external_library(self, dir, name, force_static):
+ # Create ExternalStaticLibrary or ExternalSharedLibrary object with a
+ # context more or less truthful about where the external library is.
+ context = Context(config=self.config)
+ context.add_source(mozpath.join(self.config.topsrcdir, dir, "dummy"))
+ if force_static:
+ return ExternalStaticLibrary(context, name)
+ else:
+ return ExternalSharedLibrary(context, name)
+
+ def _parse_cargo_file(self, context):
+ """Parse the Cargo.toml file in context and return a Python object
+ representation of it. Raise a SandboxValidationError if the Cargo.toml
+ file does not exist. Return a tuple of (config, cargo_file)."""
+ cargo_file = mozpath.join(context.srcdir, "Cargo.toml")
+ if not os.path.exists(cargo_file):
+ raise SandboxValidationError(
+ "No Cargo.toml file found in %s" % cargo_file, context
+ )
+ with open(cargo_file, "r") as f:
+ return toml.load(f), cargo_file
+
+ def _verify_deps(
+ self, context, crate_dir, crate_name, dependencies, description="Dependency"
+ ):
+ """Verify that a crate's dependencies all specify local paths."""
+ for dep_crate_name, values in six.iteritems(dependencies):
+ # A simple version number.
+ if isinstance(values, (six.binary_type, six.text_type)):
+ raise SandboxValidationError(
+ "%s %s of crate %s does not list a path"
+ % (description, dep_crate_name, crate_name),
+ context,
+ )
+
+ dep_path = values.get("path", None)
+ if not dep_path:
+ raise SandboxValidationError(
+ "%s %s of crate %s does not list a path"
+ % (description, dep_crate_name, crate_name),
+ context,
+ )
+
+ # Try to catch the case where somebody listed a
+ # local path for development.
+ if os.path.isabs(dep_path):
+ raise SandboxValidationError(
+ "%s %s of crate %s has a non-relative path"
+ % (description, dep_crate_name, crate_name),
+ context,
+ )
+
+ if not os.path.exists(
+ mozpath.join(context.config.topsrcdir, crate_dir, dep_path)
+ ):
+ raise SandboxValidationError(
+ "%s %s of crate %s refers to a non-existent path"
+ % (description, dep_crate_name, crate_name),
+ context,
+ )
+
+ def _rust_library(
+ self, context, libname, static_args, is_gkrust=False, cls=RustLibrary
+ ):
+ # We need to note any Rust library for linking purposes.
+ config, cargo_file = self._parse_cargo_file(context)
+ crate_name = config["package"]["name"]
+
+ if crate_name != libname:
+ raise SandboxValidationError(
+ "library %s does not match Cargo.toml-defined package %s"
+ % (libname, crate_name),
+ context,
+ )
+
+ # Check that the [lib.crate-type] field is correct
+ lib_section = config.get("lib", None)
+ if not lib_section:
+ raise SandboxValidationError(
+ "Cargo.toml for %s has no [lib] section" % libname, context
+ )
+
+ crate_type = lib_section.get("crate-type", None)
+ if not crate_type:
+ raise SandboxValidationError(
+ "Can't determine a crate-type for %s from Cargo.toml" % libname, context
+ )
+
+ crate_type = crate_type[0]
+ if crate_type != "staticlib":
+ raise SandboxValidationError(
+ "crate-type %s is not permitted for %s" % (crate_type, libname), context
+ )
+
+ dependencies = set(six.iterkeys(config.get("dependencies", {})))
+
+ features = context.get(cls.FEATURES_VAR, [])
+ unique_features = set(features)
+ if len(features) != len(unique_features):
+ raise SandboxValidationError(
+ "features for %s should not contain duplicates: %s"
+ % (libname, features),
+ context,
+ )
+
+ return cls(
+ context,
+ libname,
+ cargo_file,
+ crate_type,
+ dependencies,
+ features,
+ is_gkrust,
+ **static_args,
+ )
+
+ def _handle_linkables(self, context, passthru, generated_files):
+ linkables = []
+ host_linkables = []
+ wasm_linkables = []
+
+ def add_program(prog, var):
+ if var.startswith("HOST_"):
+ host_linkables.append(prog)
+ else:
+ linkables.append(prog)
+
+ def check_unique_binary(program, kind):
+ if program in self._binaries:
+ raise SandboxValidationError(
+ 'Cannot use "%s" as %s name, '
+ "because it is already used in %s"
+ % (program, kind, self._binaries[program].relsrcdir),
+ context,
+ )
+
+ for kind, cls in [("PROGRAM", Program), ("HOST_PROGRAM", HostProgram)]:
+ program = context.get(kind)
+ if program:
+ check_unique_binary(program, kind)
+ self._binaries[program] = cls(context, program)
+ self._linkage.append(
+ (
+ context,
+ self._binaries[program],
+ kind.replace("PROGRAM", "USE_LIBS"),
+ )
+ )
+ add_program(self._binaries[program], kind)
+
+ all_rust_programs = []
+ for kind, cls in [
+ ("RUST_PROGRAMS", RustProgram),
+ ("HOST_RUST_PROGRAMS", HostRustProgram),
+ ]:
+ programs = context[kind]
+ if not programs:
+ continue
+
+ all_rust_programs.append((programs, kind, cls))
+
+ # Verify Rust program definitions.
+ if all_rust_programs:
+ config, cargo_file = self._parse_cargo_file(context)
+ bin_section = config.get("bin", None)
+ if not bin_section:
+ raise SandboxValidationError(
+ "Cargo.toml in %s has no [bin] section" % context.srcdir, context
+ )
+
+ defined_binaries = {b["name"] for b in bin_section}
+
+ for programs, kind, cls in all_rust_programs:
+ for program in programs:
+ if program not in defined_binaries:
+ raise SandboxValidationError(
+ "Cannot find Cargo.toml definition for %s" % program,
+ context,
+ )
+
+ check_unique_binary(program, kind)
+ self._binaries[program] = cls(context, program, cargo_file)
+ add_program(self._binaries[program], kind)
+
+ for kind, cls in [
+ ("SIMPLE_PROGRAMS", SimpleProgram),
+ ("CPP_UNIT_TESTS", SimpleProgram),
+ ("HOST_SIMPLE_PROGRAMS", HostSimpleProgram),
+ ]:
+ for program in context[kind]:
+ if program in self._binaries:
+ raise SandboxValidationError(
+ 'Cannot use "%s" in %s, '
+ "because it is already used in %s"
+ % (program, kind, self._binaries[program].relsrcdir),
+ context,
+ )
+ self._binaries[program] = cls(
+ context, program, is_unit_test=kind == "CPP_UNIT_TESTS"
+ )
+ self._linkage.append(
+ (
+ context,
+ self._binaries[program],
+ "HOST_USE_LIBS"
+ if kind == "HOST_SIMPLE_PROGRAMS"
+ else "USE_LIBS",
+ )
+ )
+ add_program(self._binaries[program], kind)
+
+ host_libname = context.get("HOST_LIBRARY_NAME")
+ libname = context.get("LIBRARY_NAME")
+
+ if host_libname:
+ if host_libname == libname:
+ raise SandboxValidationError(
+ "LIBRARY_NAME and HOST_LIBRARY_NAME must have a different value",
+ context,
+ )
+
+ is_rust_library = context.get("IS_RUST_LIBRARY")
+ if is_rust_library:
+ lib = self._rust_library(context, host_libname, {}, cls=HostRustLibrary)
+ elif context.get("FORCE_SHARED_LIB"):
+ lib = HostSharedLibrary(context, host_libname)
+ else:
+ lib = HostLibrary(context, host_libname)
+ self._libs[host_libname].append(lib)
+ self._linkage.append((context, lib, "HOST_USE_LIBS"))
+ host_linkables.append(lib)
+
+ final_lib = context.get("FINAL_LIBRARY")
+ if not libname and final_lib:
+ # If no LIBRARY_NAME is given, create one.
+ libname = context.relsrcdir.replace("/", "_")
+
+ static_lib = context.get("FORCE_STATIC_LIB")
+ shared_lib = context.get("FORCE_SHARED_LIB")
+
+ static_name = context.get("STATIC_LIBRARY_NAME")
+ shared_name = context.get("SHARED_LIBRARY_NAME")
+
+ is_framework = context.get("IS_FRAMEWORK")
+
+ soname = context.get("SONAME")
+
+ lib_defines = context.get("LIBRARY_DEFINES")
+
+ wasm_lib = context.get("SANDBOXED_WASM_LIBRARY_NAME")
+
+ shared_args = {}
+ static_args = {}
+
+ if final_lib:
+ if static_lib:
+ raise SandboxValidationError(
+ "FINAL_LIBRARY implies FORCE_STATIC_LIB. "
+ "Please remove the latter.",
+ context,
+ )
+ if shared_lib:
+ raise SandboxValidationError(
+ "FINAL_LIBRARY conflicts with FORCE_SHARED_LIB. "
+ "Please remove one.",
+ context,
+ )
+ if is_framework:
+ raise SandboxValidationError(
+ "FINAL_LIBRARY conflicts with IS_FRAMEWORK. " "Please remove one.",
+ context,
+ )
+ static_args["link_into"] = final_lib
+ static_lib = True
+
+ if libname:
+ if is_framework:
+ if soname:
+ raise SandboxValidationError(
+ "IS_FRAMEWORK conflicts with SONAME. " "Please remove one.",
+ context,
+ )
+ shared_lib = True
+ shared_args["variant"] = SharedLibrary.FRAMEWORK
+
+ if not static_lib and not shared_lib:
+ static_lib = True
+
+ if static_name:
+ if not static_lib:
+ raise SandboxValidationError(
+ "STATIC_LIBRARY_NAME requires FORCE_STATIC_LIB", context
+ )
+ static_args["real_name"] = static_name
+
+ if shared_name:
+ if not shared_lib:
+ raise SandboxValidationError(
+ "SHARED_LIBRARY_NAME requires FORCE_SHARED_LIB", context
+ )
+ shared_args["real_name"] = shared_name
+
+ if soname:
+ if not shared_lib:
+ raise SandboxValidationError(
+ "SONAME requires FORCE_SHARED_LIB", context
+ )
+ shared_args["soname"] = soname
+
+ if context.get("NO_EXPAND_LIBS"):
+ if not static_lib:
+ raise SandboxValidationError(
+ "NO_EXPAND_LIBS can only be set for static libraries.", context
+ )
+ static_args["no_expand_lib"] = True
+
+ if shared_lib and static_lib:
+ if not static_name and not shared_name:
+ raise SandboxValidationError(
+ "Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, "
+ "but neither STATIC_LIBRARY_NAME or "
+ "SHARED_LIBRARY_NAME is set. At least one is required.",
+ context,
+ )
+ if static_name and not shared_name and static_name == libname:
+ raise SandboxValidationError(
+ "Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, "
+ "but STATIC_LIBRARY_NAME is the same as LIBRARY_NAME, "
+ "and SHARED_LIBRARY_NAME is unset. Please either "
+ "change STATIC_LIBRARY_NAME or LIBRARY_NAME, or set "
+ "SHARED_LIBRARY_NAME.",
+ context,
+ )
+ if shared_name and not static_name and shared_name == libname:
+ raise SandboxValidationError(
+ "Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, "
+ "but SHARED_LIBRARY_NAME is the same as LIBRARY_NAME, "
+ "and STATIC_LIBRARY_NAME is unset. Please either "
+ "change SHARED_LIBRARY_NAME or LIBRARY_NAME, or set "
+ "STATIC_LIBRARY_NAME.",
+ context,
+ )
+ if shared_name and static_name and shared_name == static_name:
+ raise SandboxValidationError(
+ "Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, "
+ "but SHARED_LIBRARY_NAME is the same as "
+ "STATIC_LIBRARY_NAME. Please change one of them.",
+ context,
+ )
+
+ symbols_file = context.get("SYMBOLS_FILE")
+ if symbols_file:
+ if not shared_lib:
+ raise SandboxValidationError(
+ "SYMBOLS_FILE can only be used with a SHARED_LIBRARY.", context
+ )
+ if context.get("DEFFILE"):
+ raise SandboxValidationError(
+ "SYMBOLS_FILE cannot be used along DEFFILE.", context
+ )
+ if isinstance(symbols_file, SourcePath):
+ if not os.path.exists(symbols_file.full_path):
+ raise SandboxValidationError(
+ "Path specified in SYMBOLS_FILE does not exist: %s "
+ "(resolved to %s)" % (symbols_file, symbols_file.full_path),
+ context,
+ )
+ shared_args["symbols_file"] = True
+ else:
+ if symbols_file.target_basename not in generated_files:
+ raise SandboxValidationError(
+ (
+ "Objdir file specified in SYMBOLS_FILE not in "
+ + "GENERATED_FILES: %s"
+ )
+ % (symbols_file,),
+ context,
+ )
+ shared_args["symbols_file"] = symbols_file.target_basename
+
+ if shared_lib:
+ lib = SharedLibrary(context, libname, **shared_args)
+ self._libs[libname].append(lib)
+ self._linkage.append((context, lib, "USE_LIBS"))
+ linkables.append(lib)
+ if not lib.installed:
+ generated_files.add(lib.lib_name)
+ if symbols_file and isinstance(symbols_file, SourcePath):
+ script = mozpath.join(
+ mozpath.dirname(mozpath.dirname(__file__)),
+ "action",
+ "generate_symbols_file.py",
+ )
+ defines = ()
+ if lib.defines:
+ defines = lib.defines.get_defines()
+ yield GeneratedFile(
+ context,
+ script,
+ "generate_symbols_file",
+ lib.symbols_file,
+ [symbols_file],
+ defines,
+ required_during_compile=[lib.symbols_file],
+ )
+ if static_lib:
+ is_rust_library = context.get("IS_RUST_LIBRARY")
+ if is_rust_library:
+ lib = self._rust_library(
+ context,
+ libname,
+ static_args,
+ is_gkrust=bool(context.get("IS_GKRUST")),
+ )
+ else:
+ lib = StaticLibrary(context, libname, **static_args)
+ self._libs[libname].append(lib)
+ self._linkage.append((context, lib, "USE_LIBS"))
+ linkables.append(lib)
+
+ if lib_defines:
+ if not libname:
+ raise SandboxValidationError(
+ "LIBRARY_DEFINES needs a " "LIBRARY_NAME to take effect",
+ context,
+ )
+ lib.lib_defines.update(lib_defines)
+
+ if wasm_lib:
+ if wasm_lib == libname:
+ raise SandboxValidationError(
+ "SANDBOXED_WASM_LIBRARY_NAME and LIBRARY_NAME must have a "
+ "different value.",
+ context,
+ )
+ if wasm_lib == host_libname:
+ raise SandboxValidationError(
+ "SANDBOXED_WASM_LIBRARY_NAME and HOST_LIBRARY_NAME must "
+ "have a different value.",
+ context,
+ )
+ if wasm_lib == shared_name:
+ raise SandboxValidationError(
+ "SANDBOXED_WASM_LIBRARY_NAME and SHARED_NAME must have a "
+ "different value.",
+ context,
+ )
+ if wasm_lib == static_name:
+ raise SandboxValidationError(
+ "SANDBOXED_WASM_LIBRARY_NAME and STATIC_NAME must have a "
+ "different value.",
+ context,
+ )
+ lib = SandboxedWasmLibrary(context, wasm_lib)
+ self._libs[libname].append(lib)
+ wasm_linkables.append(lib)
+ self._wasm_compile_dirs.add(context.objdir)
+
+ seen = {}
+ for symbol in ("SOURCES", "UNIFIED_SOURCES"):
+ for src in context.get(symbol, []):
+ basename = os.path.splitext(os.path.basename(src))[0]
+ if basename in seen:
+ other_src, where = seen[basename]
+ extra = ""
+ if "UNIFIED_SOURCES" in (symbol, where):
+ extra = " in non-unified builds"
+ raise SandboxValidationError(
+ f"{src} from {symbol} would have the same object name "
+ f"as {other_src} from {where}{extra}.",
+ context,
+ )
+ seen[basename] = (src, symbol)
+
+ # Only emit sources if we have linkables defined in the same context.
+ # Note the linkables are not emitted in this function, but much later,
+ # after aggregation (because of e.g. USE_LIBS processing).
+ if not (linkables or host_linkables or wasm_linkables):
+ return
+
+ # TODO: objdirs with only host things in them shouldn't need target
+ # flags, but there's at least one Makefile.in (in
+ # build/unix/elfhack) that relies on the value of LDFLAGS being
+ # passed to one-off rules.
+ self._compile_dirs.add(context.objdir)
+
+ if host_linkables or any(
+ isinstance(l, (RustLibrary, RustProgram)) for l in linkables
+ ):
+ self._host_compile_dirs.add(context.objdir)
+
+ sources = defaultdict(list)
+ gen_sources = defaultdict(list)
+ all_flags = {}
+ for symbol in ("SOURCES", "HOST_SOURCES", "UNIFIED_SOURCES", "WASM_SOURCES"):
+ srcs = sources[symbol]
+ gen_srcs = gen_sources[symbol]
+ context_srcs = context.get(symbol, [])
+ seen_sources = set()
+ for f in context_srcs:
+ if f in seen_sources:
+ raise SandboxValidationError(
+ "Source file should only "
+ "be added to %s once: %s" % (symbol, f),
+ context,
+ )
+ seen_sources.add(f)
+ full_path = f.full_path
+ if isinstance(f, SourcePath):
+ srcs.append(full_path)
+ else:
+ assert isinstance(f, Path)
+ gen_srcs.append(full_path)
+ if symbol == "SOURCES":
+ context_flags = context_srcs[f]
+ if context_flags:
+ all_flags[full_path] = context_flags
+
+ if isinstance(f, SourcePath) and not os.path.exists(full_path):
+ raise SandboxValidationError(
+ "File listed in %s does not "
+ "exist: '%s'" % (symbol, full_path),
+ context,
+ )
+
+ # Process the .cpp files generated by IPDL as generated sources within
+ # the context which declared the IPDL_SOURCES attribute.
+ ipdl_root = self.config.substs.get("IPDL_ROOT")
+ for symbol in ("IPDL_SOURCES", "PREPROCESSED_IPDL_SOURCES"):
+ context_srcs = context.get(symbol, [])
+ for f in context_srcs:
+ root, ext = mozpath.splitext(mozpath.basename(f))
+
+ suffix_map = {
+ ".ipdlh": [".cpp"],
+ ".ipdl": [".cpp", "Child.cpp", "Parent.cpp"],
+ }
+ if ext not in suffix_map:
+ raise SandboxValidationError(
+ "Unexpected extension for IPDL source %s" % ext
+ )
+
+ gen_sources["UNIFIED_SOURCES"].extend(
+ mozpath.join(ipdl_root, root + suffix) for suffix in suffix_map[ext]
+ )
+
+ no_pgo = context.get("NO_PGO")
+ no_pgo_sources = [f for f, flags in six.iteritems(all_flags) if flags.no_pgo]
+ if no_pgo:
+ if no_pgo_sources:
+ raise SandboxValidationError(
+ "NO_PGO and SOURCES[...].no_pgo " "cannot be set at the same time",
+ context,
+ )
+ passthru.variables["NO_PROFILE_GUIDED_OPTIMIZE"] = no_pgo
+ if no_pgo_sources:
+ passthru.variables["NO_PROFILE_GUIDED_OPTIMIZE"] = no_pgo_sources
+
+ # A map from "canonical suffixes" for a particular source file
+ # language to the range of suffixes associated with that language.
+ #
+ # We deliberately don't list the canonical suffix in the suffix list
+ # in the definition; we'll add it in programmatically after defining
+ # things.
+ suffix_map = {
+ ".s": set([".asm"]),
+ ".c": set(),
+ ".m": set(),
+ ".mm": set(),
+ ".cpp": set([".cc", ".cxx"]),
+ ".S": set(),
+ }
+
+ # The inverse of the above, mapping suffixes to their canonical suffix.
+ canonicalized_suffix_map = {}
+ for suffix, alternatives in six.iteritems(suffix_map):
+ alternatives.add(suffix)
+ for a in alternatives:
+ canonicalized_suffix_map[a] = suffix
+
+ # A map from moz.build variables to the canonical suffixes of file
+ # kinds that can be listed therein.
+ all_suffixes = list(suffix_map.keys())
+ varmap = dict(
+ SOURCES=(Sources, all_suffixes),
+ HOST_SOURCES=(HostSources, [".c", ".mm", ".cpp"]),
+ UNIFIED_SOURCES=(UnifiedSources, [".c", ".mm", ".m", ".cpp"]),
+ )
+ # Only include a WasmSources context if there are any WASM_SOURCES.
+ # (This is going to matter later because we inject an extra .c file to
+ # compile with the wasm compiler if, and only if, there are any WASM
+ # sources.)
+ if sources["WASM_SOURCES"] or gen_sources["WASM_SOURCES"]:
+ varmap["WASM_SOURCES"] = (WasmSources, [".c", ".cpp"])
+ # Track whether there are any C++ source files.
+ # Technically this won't do the right thing for SIMPLE_PROGRAMS in
+ # a directory with mixed C and C++ source, but it's not that important.
+ cxx_sources = defaultdict(bool)
+
+ # Source files to track for linkables associated with this context.
+ ctxt_sources = defaultdict(lambda: defaultdict(list))
+
+ for variable, (klass, suffixes) in varmap.items():
+ # Group static and generated files by their canonical suffixes, and
+ # ensure we haven't been given filetypes that we don't recognize.
+ by_canonical_suffix = defaultdict(lambda: {"static": [], "generated": []})
+ for srcs, key in (
+ (sources[variable], "static"),
+ (gen_sources[variable], "generated"),
+ ):
+ for f in srcs:
+ canonical_suffix = canonicalized_suffix_map.get(
+ mozpath.splitext(f)[1]
+ )
+ if canonical_suffix not in suffixes:
+ raise SandboxValidationError(
+ "%s has an unknown file type." % f, context
+ )
+ by_canonical_suffix[canonical_suffix][key].append(f)
+
+ # Yield an object for each canonical suffix, grouping generated and
+ # static sources together to allow them to be unified together.
+ for canonical_suffix in sorted(by_canonical_suffix.keys()):
+ if canonical_suffix in (".cpp", ".mm"):
+ cxx_sources[variable] = True
+ elif canonical_suffix in (".s", ".S"):
+ self._asm_compile_dirs.add(context.objdir)
+ src_group = by_canonical_suffix[canonical_suffix]
+ obj = klass(
+ context,
+ src_group["static"],
+ src_group["generated"],
+ canonical_suffix,
+ )
+ srcs = list(obj.files)
+ if isinstance(obj, UnifiedSources) and obj.have_unified_mapping:
+ srcs = sorted(dict(obj.unified_source_mapping).keys())
+ ctxt_sources[variable][canonical_suffix] += srcs
+ yield obj
+
+ if ctxt_sources:
+ for linkable in linkables:
+ for target_var in ("SOURCES", "UNIFIED_SOURCES"):
+ for suffix, srcs in ctxt_sources[target_var].items():
+ linkable.sources[suffix] += srcs
+ for host_linkable in host_linkables:
+ for suffix, srcs in ctxt_sources["HOST_SOURCES"].items():
+ host_linkable.sources[suffix] += srcs
+ for wasm_linkable in wasm_linkables:
+ for suffix, srcs in ctxt_sources["WASM_SOURCES"].items():
+ wasm_linkable.sources[suffix] += srcs
+
+ for f, flags in sorted(six.iteritems(all_flags)):
+ if flags.flags:
+ ext = mozpath.splitext(f)[1]
+ yield PerSourceFlag(context, f, flags.flags)
+
+ # If there are any C++ sources, set all the linkables defined here
+ # to require the C++ linker.
+ for vars, linkable_items in (
+ (("SOURCES", "UNIFIED_SOURCES"), linkables),
+ (("HOST_SOURCES",), host_linkables),
+ ):
+ for var in vars:
+ if cxx_sources[var]:
+ for l in linkable_items:
+ l.cxx_link = True
+ break
+
+ def emit_from_context(self, context):
+ """Convert a Context to tree metadata objects.
+
+ This is a generator of mozbuild.frontend.data.ContextDerived instances.
+ """
+
+ # We only want to emit an InstallationTarget if one of the consulted
+ # variables is defined. Later on, we look up FINAL_TARGET, which has
+ # the side-effect of populating it. So, we need to do this lookup
+ # early.
+ if any(k in context for k in ("FINAL_TARGET", "XPI_NAME", "DIST_SUBDIR")):
+ yield InstallationTarget(context)
+
+ # We always emit a directory traversal descriptor. This is needed by
+ # the recursive make backend.
+ for o in self._emit_directory_traversal_from_context(context):
+ yield o
+
+ for obj in self._process_xpidl(context):
+ yield obj
+
+ computed_flags = ComputedFlags(context, context["COMPILE_FLAGS"])
+ computed_link_flags = ComputedFlags(context, context["LINK_FLAGS"])
+ computed_host_flags = ComputedFlags(context, context["HOST_COMPILE_FLAGS"])
+ computed_as_flags = ComputedFlags(context, context["ASM_FLAGS"])
+ computed_wasm_flags = ComputedFlags(context, context["WASM_FLAGS"])
+
+ # Proxy some variables as-is until we have richer classes to represent
+ # them. We should aim to keep this set small because it violates the
+ # desired abstraction of the build definition away from makefiles.
+ passthru = VariablePassthru(context)
+ varlist = [
+ "EXTRA_DSO_LDOPTS",
+ "RCFILE",
+ "RCINCLUDE",
+ "WIN32_EXE_LDFLAGS",
+ "USE_EXTENSION_MANIFEST",
+ ]
+ for v in varlist:
+ if v in context and context[v]:
+ passthru.variables[v] = context[v]
+
+ if (
+ context.config.substs.get("OS_TARGET") == "WINNT"
+ and context["DELAYLOAD_DLLS"]
+ ):
+ if context.config.substs.get("CC_TYPE") != "clang":
+ context["LDFLAGS"].extend(
+ [("-DELAYLOAD:%s" % dll) for dll in context["DELAYLOAD_DLLS"]]
+ )
+ else:
+ context["LDFLAGS"].extend(
+ [
+ ("-Wl,-Xlink=-DELAYLOAD:%s" % dll)
+ for dll in context["DELAYLOAD_DLLS"]
+ ]
+ )
+ context["OS_LIBS"].append("delayimp")
+
+ for v in ["CMFLAGS", "CMMFLAGS"]:
+ if v in context and context[v]:
+ passthru.variables["MOZBUILD_" + v] = context[v]
+
+ for v in ["CXXFLAGS", "CFLAGS"]:
+ if v in context and context[v]:
+ computed_flags.resolve_flags("MOZBUILD_%s" % v, context[v])
+
+ for v in ["WASM_CFLAGS", "WASM_CXXFLAGS"]:
+ if v in context and context[v]:
+ computed_wasm_flags.resolve_flags("MOZBUILD_%s" % v, context[v])
+
+ for v in ["HOST_CXXFLAGS", "HOST_CFLAGS"]:
+ if v in context and context[v]:
+ computed_host_flags.resolve_flags("MOZBUILD_%s" % v, context[v])
+
+ if "LDFLAGS" in context and context["LDFLAGS"]:
+ computed_link_flags.resolve_flags("MOZBUILD", context["LDFLAGS"])
+
+ deffile = context.get("DEFFILE")
+ if deffile and context.config.substs.get("OS_TARGET") == "WINNT":
+ if isinstance(deffile, SourcePath):
+ if not os.path.exists(deffile.full_path):
+ raise SandboxValidationError(
+ "Path specified in DEFFILE does not exist: %s "
+ "(resolved to %s)" % (deffile, deffile.full_path),
+ context,
+ )
+ path = mozpath.relpath(deffile.full_path, context.objdir)
+ else:
+ path = deffile.target_basename
+
+ if context.config.substs.get("GNU_CC"):
+ computed_link_flags.resolve_flags("DEFFILE", [path])
+ else:
+ computed_link_flags.resolve_flags("DEFFILE", ["-DEF:" + path])
+
+ dist_install = context["DIST_INSTALL"]
+ if dist_install is True:
+ passthru.variables["DIST_INSTALL"] = True
+ elif dist_install is False:
+ passthru.variables["NO_DIST_INSTALL"] = True
+
+ # Ideally, this should be done in templates, but this is difficult at
+ # the moment because USE_STATIC_LIBS can be set after a template
+ # returns. Eventually, with context-based templates, it will be
+ # possible.
+ if context.config.substs.get(
+ "OS_ARCH"
+ ) == "WINNT" and not context.config.substs.get("GNU_CC"):
+ use_static_lib = context.get(
+ "USE_STATIC_LIBS"
+ ) and not context.config.substs.get("MOZ_ASAN")
+ rtl_flag = "-MT" if use_static_lib else "-MD"
+ if context.config.substs.get("MOZ_DEBUG") and not context.config.substs.get(
+ "MOZ_NO_DEBUG_RTL"
+ ):
+ rtl_flag += "d"
+ computed_flags.resolve_flags("RTL", [rtl_flag])
+ if not context.config.substs.get("CROSS_COMPILE"):
+ computed_host_flags.resolve_flags("RTL", [rtl_flag])
+
+ generated_files = set()
+ localized_generated_files = set()
+ for obj in self._process_generated_files(context):
+ for f in obj.outputs:
+ generated_files.add(f)
+ if obj.localized:
+ localized_generated_files.add(f)
+ yield obj
+
+ for path in context["CONFIGURE_SUBST_FILES"]:
+ sub = self._create_substitution(ConfigFileSubstitution, context, path)
+ generated_files.add(str(sub.relpath))
+ yield sub
+
+ for defines_var, cls, backend_flags in (
+ ("DEFINES", Defines, (computed_flags, computed_as_flags)),
+ ("HOST_DEFINES", HostDefines, (computed_host_flags,)),
+ ("WASM_DEFINES", WasmDefines, (computed_wasm_flags,)),
+ ):
+ defines = context.get(defines_var)
+ if defines:
+ defines_obj = cls(context, defines)
+ if isinstance(defines_obj, Defines):
+ # DEFINES have consumers outside the compile command line,
+ # HOST_DEFINES do not.
+ yield defines_obj
+ else:
+ # If we don't have explicitly set defines we need to make sure
+ # initialized values if present end up in computed flags.
+ defines_obj = cls(context, context[defines_var])
+
+ defines_from_obj = list(defines_obj.get_defines())
+ if defines_from_obj:
+ for flags in backend_flags:
+ flags.resolve_flags(defines_var, defines_from_obj)
+
+ idl_vars = (
+ "GENERATED_EVENTS_WEBIDL_FILES",
+ "GENERATED_WEBIDL_FILES",
+ "PREPROCESSED_TEST_WEBIDL_FILES",
+ "PREPROCESSED_WEBIDL_FILES",
+ "TEST_WEBIDL_FILES",
+ "WEBIDL_FILES",
+ "IPDL_SOURCES",
+ "PREPROCESSED_IPDL_SOURCES",
+ "XPCOM_MANIFESTS",
+ )
+ for context_var in idl_vars:
+ for name in context.get(context_var, []):
+ self._idls[context_var].add(mozpath.join(context.srcdir, name))
+ # WEBIDL_EXAMPLE_INTERFACES do not correspond to files.
+ for name in context.get("WEBIDL_EXAMPLE_INTERFACES", []):
+ self._idls["WEBIDL_EXAMPLE_INTERFACES"].add(name)
+
+ local_includes = []
+ for local_include in context.get("LOCAL_INCLUDES", []):
+ full_path = local_include.full_path
+ if not isinstance(local_include, ObjDirPath):
+ if not os.path.exists(full_path):
+ raise SandboxValidationError(
+ "Path specified in LOCAL_INCLUDES does not exist: %s (resolved to %s)"
+ % (local_include, full_path),
+ context,
+ )
+ if not os.path.isdir(full_path):
+ raise SandboxValidationError(
+ "Path specified in LOCAL_INCLUDES "
+ "is a filename, but a directory is required: %s "
+ "(resolved to %s)" % (local_include, full_path),
+ context,
+ )
+ if (
+ full_path == context.config.topsrcdir
+ or full_path == context.config.topobjdir
+ ):
+ raise SandboxValidationError(
+ "Path specified in LOCAL_INCLUDES "
+ "(%s) resolves to the topsrcdir or topobjdir (%s), which is "
+ "not allowed" % (local_include, full_path),
+ context,
+ )
+ include_obj = LocalInclude(context, local_include)
+ local_includes.append(include_obj.path.full_path)
+ yield include_obj
+
+ computed_flags.resolve_flags(
+ "LOCAL_INCLUDES", ["-I%s" % p for p in local_includes]
+ )
+ computed_as_flags.resolve_flags(
+ "LOCAL_INCLUDES", ["-I%s" % p for p in local_includes]
+ )
+ computed_host_flags.resolve_flags(
+ "LOCAL_INCLUDES", ["-I%s" % p for p in local_includes]
+ )
+ computed_wasm_flags.resolve_flags(
+ "LOCAL_INCLUDES", ["-I%s" % p for p in local_includes]
+ )
+
+ for obj in self._handle_linkables(context, passthru, generated_files):
+ yield obj
+
+ generated_files.update(
+ [
+ "%s%s" % (k, self.config.substs.get("BIN_SUFFIX", ""))
+ for k in self._binaries.keys()
+ ]
+ )
+
+ components = []
+ for var, cls in (
+ ("EXPORTS", Exports),
+ ("FINAL_TARGET_FILES", FinalTargetFiles),
+ ("FINAL_TARGET_PP_FILES", FinalTargetPreprocessedFiles),
+ ("LOCALIZED_FILES", LocalizedFiles),
+ ("LOCALIZED_PP_FILES", LocalizedPreprocessedFiles),
+ ("OBJDIR_FILES", ObjdirFiles),
+ ("OBJDIR_PP_FILES", ObjdirPreprocessedFiles),
+ ("TEST_HARNESS_FILES", TestHarnessFiles),
+ ):
+ all_files = context.get(var)
+ if not all_files:
+ continue
+ if dist_install is False and var != "TEST_HARNESS_FILES":
+ raise SandboxValidationError(
+ "%s cannot be used with DIST_INSTALL = False" % var, context
+ )
+ has_prefs = False
+ has_resources = False
+ for base, files in all_files.walk():
+ if var == "TEST_HARNESS_FILES" and not base:
+ raise SandboxValidationError(
+ "Cannot install files to the root of TEST_HARNESS_FILES",
+ context,
+ )
+ if base == "components":
+ components.extend(files)
+ if base == "defaults/pref":
+ has_prefs = True
+ if mozpath.split(base)[0] == "res":
+ has_resources = True
+ for f in files:
+ if (
+ var
+ in (
+ "FINAL_TARGET_PP_FILES",
+ "OBJDIR_PP_FILES",
+ "LOCALIZED_PP_FILES",
+ )
+ and not isinstance(f, SourcePath)
+ ):
+ raise SandboxValidationError(
+ ("Only source directory paths allowed in " + "%s: %s")
+ % (var, f),
+ context,
+ )
+ if var.startswith("LOCALIZED_"):
+ if isinstance(f, SourcePath):
+ if f.startswith("en-US/"):
+ pass
+ elif "locales/en-US/" in f:
+ pass
+ else:
+ raise SandboxValidationError(
+ "%s paths must start with `en-US/` or "
+ "contain `locales/en-US/`: %s" % (var, f),
+ context,
+ )
+
+ if not isinstance(f, ObjDirPath):
+ path = f.full_path
+ if "*" not in path and not os.path.exists(path):
+ raise SandboxValidationError(
+ "File listed in %s does not exist: %s" % (var, path),
+ context,
+ )
+ else:
+ # TODO: Bug 1254682 - The '/' check is to allow
+ # installing files generated from other directories,
+ # which is done occasionally for tests. However, it
+ # means we don't fail early if the file isn't actually
+ # created by the other moz.build file.
+ if f.target_basename not in generated_files and "/" not in f:
+ raise SandboxValidationError(
+ (
+ "Objdir file listed in %s not in "
+ + "GENERATED_FILES: %s"
+ )
+ % (var, f),
+ context,
+ )
+
+ if var.startswith("LOCALIZED_"):
+ # Further require that LOCALIZED_FILES are from
+ # LOCALIZED_GENERATED_FILES.
+ if f.target_basename not in localized_generated_files:
+ raise SandboxValidationError(
+ (
+ "Objdir file listed in %s not in "
+ + "LOCALIZED_GENERATED_FILES: %s"
+ )
+ % (var, f),
+ context,
+ )
+ else:
+ # Additionally, don't allow LOCALIZED_GENERATED_FILES to be used
+ # in anything *but* LOCALIZED_FILES.
+ if f.target_basename in localized_generated_files:
+ raise SandboxValidationError(
+ (
+ "Outputs of LOCALIZED_GENERATED_FILES cannot "
+ "be used in %s: %s"
+ )
+ % (var, f),
+ context,
+ )
+
+ # Addons (when XPI_NAME is defined) and Applications (when
+ # DIST_SUBDIR is defined) use a different preferences directory
+ # (default/preferences) from the one the GRE uses (defaults/pref).
+ # Hence, we move the files from the latter to the former in that
+ # case.
+ if has_prefs and (context.get("XPI_NAME") or context.get("DIST_SUBDIR")):
+ all_files.defaults.preferences += all_files.defaults.pref
+ del all_files.defaults._children["pref"]
+
+ if has_resources and (
+ context.get("DIST_SUBDIR") or context.get("XPI_NAME")
+ ):
+ raise SandboxValidationError(
+ "RESOURCES_FILES cannot be used with DIST_SUBDIR or " "XPI_NAME.",
+ context,
+ )
+
+ yield cls(context, all_files)
+
+ for c in components:
+ if c.endswith(".manifest"):
+ yield ChromeManifestEntry(
+ context,
+ "chrome.manifest",
+ Manifest("components", mozpath.basename(c)),
+ )
+
+ rust_tests = context.get("RUST_TESTS", [])
+ if rust_tests:
+ # TODO: more sophisticated checking of the declared name vs.
+ # contents of the Cargo.toml file.
+ features = context.get("RUST_TEST_FEATURES", [])
+
+ yield RustTests(context, rust_tests, features)
+
+ for obj in self._process_test_manifests(context):
+ yield obj
+
+ for obj in self._process_jar_manifests(context):
+ yield obj
+
+ computed_as_flags.resolve_flags("MOZBUILD", context.get("ASFLAGS"))
+
+ if context.get("USE_NASM") is True:
+ nasm = context.config.substs.get("NASM")
+ if not nasm:
+ raise SandboxValidationError("nasm is not available", context)
+ passthru.variables["AS"] = nasm
+ passthru.variables["AS_DASH_C_FLAG"] = ""
+ passthru.variables["ASOUTOPTION"] = "-o "
+ computed_as_flags.resolve_flags(
+ "OS", context.config.substs.get("NASM_ASFLAGS", [])
+ )
+
+ if context.get("USE_INTEGRATED_CLANGCL_AS") is True:
+ if context.config.substs.get("CC_TYPE") != "clang-cl":
+ raise SandboxValidationError("clang-cl is not available", context)
+ passthru.variables["AS"] = context.config.substs.get("CC")
+ passthru.variables["AS_DASH_C_FLAG"] = "-c"
+ passthru.variables["ASOUTOPTION"] = "-o "
+
+ if passthru.variables:
+ yield passthru
+
+ if context.objdir in self._compile_dirs:
+ self._compile_flags[context.objdir] = computed_flags
+ yield computed_link_flags
+
+ if context.objdir in self._asm_compile_dirs:
+ self._compile_as_flags[context.objdir] = computed_as_flags
+
+ if context.objdir in self._host_compile_dirs:
+ yield computed_host_flags
+
+ if context.objdir in self._wasm_compile_dirs:
+ yield computed_wasm_flags
+
+ def _create_substitution(self, cls, context, path):
+ sub = cls(context)
+ sub.input_path = "%s.in" % path.full_path
+ sub.output_path = path.translated
+ sub.relpath = path
+
+ return sub
+
+ def _process_xpidl(self, context):
+ # XPIDL source files get processed and turned into .h and .xpt files.
+ # If there are multiple XPIDL files in a directory, they get linked
+ # together into a final .xpt, which has the name defined by
+ # XPIDL_MODULE.
+ xpidl_module = context["XPIDL_MODULE"]
+
+ if not xpidl_module:
+ if context["XPIDL_SOURCES"]:
+ raise SandboxValidationError(
+ "XPIDL_MODULE must be defined if " "XPIDL_SOURCES is defined.",
+ context,
+ )
+ return
+
+ if not context["XPIDL_SOURCES"]:
+ raise SandboxValidationError(
+ "XPIDL_MODULE cannot be defined " "unless there are XPIDL_SOURCES",
+ context,
+ )
+
+ if context["DIST_INSTALL"] is False:
+ self.log(
+ logging.WARN,
+ "mozbuild_warning",
+ dict(path=context.main_path),
+ "{path}: DIST_INSTALL = False has no effect on XPIDL_SOURCES.",
+ )
+
+ for idl in context["XPIDL_SOURCES"]:
+ if not os.path.exists(idl.full_path):
+ raise SandboxValidationError(
+ "File %s from XPIDL_SOURCES " "does not exist" % idl.full_path,
+ context,
+ )
+
+ yield XPIDLModule(context, xpidl_module, context["XPIDL_SOURCES"])
+
+ def _process_generated_files(self, context):
+ for path in context["CONFIGURE_DEFINE_FILES"]:
+ script = mozpath.join(
+ mozpath.dirname(mozpath.dirname(__file__)),
+ "action",
+ "process_define_files.py",
+ )
+ yield GeneratedFile(
+ context,
+ script,
+ "process_define_file",
+ six.text_type(path),
+ [Path(context, path + ".in")],
+ )
+
+ generated_files = context.get("GENERATED_FILES") or []
+ localized_generated_files = context.get("LOCALIZED_GENERATED_FILES") or []
+ if not (generated_files or localized_generated_files):
+ return
+
+ for (localized, gen) in (
+ (False, generated_files),
+ (True, localized_generated_files),
+ ):
+ for f in gen:
+ flags = gen[f]
+ outputs = f
+ inputs = []
+ if flags.script:
+ method = "main"
+ script = SourcePath(context, flags.script).full_path
+
+ # Deal with cases like "C:\\path\\to\\script.py:function".
+ if ".py:" in script:
+ script, method = script.rsplit(".py:", 1)
+ script += ".py"
+
+ if not os.path.exists(script):
+ raise SandboxValidationError(
+ "Script for generating %s does not exist: %s" % (f, script),
+ context,
+ )
+ if os.path.splitext(script)[1] != ".py":
+ raise SandboxValidationError(
+ "Script for generating %s does not end in .py: %s"
+ % (f, script),
+ context,
+ )
+ else:
+ script = None
+ method = None
+
+ for i in flags.inputs:
+ p = Path(context, i)
+ if isinstance(p, SourcePath) and not os.path.exists(p.full_path):
+ raise SandboxValidationError(
+ "Input for generating %s does not exist: %s"
+ % (f, p.full_path),
+ context,
+ )
+ inputs.append(p)
+
+ yield GeneratedFile(
+ context,
+ script,
+ method,
+ outputs,
+ inputs,
+ flags.flags,
+ localized=localized,
+ force=flags.force,
+ )
+
+ def _process_test_manifests(self, context):
+ for prefix, info in TEST_MANIFESTS.items():
+ for path, manifest in context.get("%s_MANIFESTS" % prefix, []):
+ for obj in self._process_test_manifest(context, info, path, manifest):
+ yield obj
+
+ for flavor in REFTEST_FLAVORS:
+ for path, manifest in context.get("%s_MANIFESTS" % flavor.upper(), []):
+ for obj in self._process_reftest_manifest(
+ context, flavor, path, manifest
+ ):
+ yield obj
+
+ def _process_test_manifest(self, context, info, manifest_path, mpmanifest):
+ flavor, install_root, install_subdir, package_tests = info
+
+ path = manifest_path.full_path
+ manifest_dir = mozpath.dirname(path)
+ manifest_reldir = mozpath.dirname(
+ mozpath.relpath(path, context.config.topsrcdir)
+ )
+ manifest_sources = [
+ mozpath.relpath(pth, context.config.topsrcdir)
+ for pth in mpmanifest.source_files
+ ]
+ install_prefix = mozpath.join(install_root, install_subdir)
+
+ try:
+ if not mpmanifest.tests:
+ raise SandboxValidationError("Empty test manifest: %s" % path, context)
+
+ defaults = mpmanifest.manifest_defaults[os.path.normpath(path)]
+ obj = TestManifest(
+ context,
+ path,
+ mpmanifest,
+ flavor=flavor,
+ install_prefix=install_prefix,
+ relpath=mozpath.join(manifest_reldir, mozpath.basename(path)),
+ sources=manifest_sources,
+ dupe_manifest="dupe-manifest" in defaults,
+ )
+
+ filtered = mpmanifest.tests
+
+ missing = [t["name"] for t in filtered if not os.path.exists(t["path"])]
+ if missing:
+ raise SandboxValidationError(
+ "Test manifest (%s) lists "
+ "test that does not exist: %s" % (path, ", ".join(missing)),
+ context,
+ )
+
+ out_dir = mozpath.join(install_prefix, manifest_reldir)
+
+ def process_support_files(test):
+ install_info = self._test_files_converter.convert_support_files(
+ test, install_root, manifest_dir, out_dir
+ )
+
+ obj.pattern_installs.extend(install_info.pattern_installs)
+ for source, dest in install_info.installs:
+ obj.installs[source] = (dest, False)
+ obj.external_installs |= install_info.external_installs
+ for install_path in install_info.deferred_installs:
+ if all(
+ [
+ "*" not in install_path,
+ not os.path.isfile(
+ mozpath.join(context.config.topsrcdir, install_path[2:])
+ ),
+ install_path not in install_info.external_installs,
+ ]
+ ):
+ raise SandboxValidationError(
+ "Error processing test "
+ "manifest %s: entry in support-files not present "
+ "in the srcdir: %s" % (path, install_path),
+ context,
+ )
+
+ obj.deferred_installs |= install_info.deferred_installs
+
+ for test in filtered:
+ obj.tests.append(test)
+
+ # Some test files are compiled and should not be copied into the
+ # test package. They function as identifiers rather than files.
+ if package_tests:
+ manifest_relpath = mozpath.relpath(
+ test["path"], mozpath.dirname(test["manifest"])
+ )
+ obj.installs[mozpath.normpath(test["path"])] = (
+ (mozpath.join(out_dir, manifest_relpath)),
+ True,
+ )
+
+ process_support_files(test)
+
+ for path, m_defaults in mpmanifest.manifest_defaults.items():
+ process_support_files(m_defaults)
+
+ # We also copy manifests into the output directory,
+ # including manifests from [include:foo] directives.
+ for mpath in mpmanifest.manifests():
+ mpath = mozpath.normpath(mpath)
+ out_path = mozpath.join(out_dir, mozpath.basename(mpath))
+ obj.installs[mpath] = (out_path, False)
+
+ # Some manifests reference files that are auto generated as
+ # part of the build or shouldn't be installed for some
+ # reason. Here, we prune those files from the install set.
+ # FUTURE we should be able to detect autogenerated files from
+ # other build metadata. Once we do that, we can get rid of this.
+ for f in defaults.get("generated-files", "").split():
+ # We re-raise otherwise the stack trace isn't informative.
+ try:
+ del obj.installs[mozpath.join(manifest_dir, f)]
+ except KeyError:
+ raise SandboxValidationError(
+ "Error processing test "
+ "manifest %s: entry in generated-files not present "
+ "elsewhere in manifest: %s" % (path, f),
+ context,
+ )
+
+ yield obj
+ except (AssertionError, Exception):
+ raise SandboxValidationError(
+ "Error processing test "
+ "manifest file %s: %s"
+ % (path, "\n".join(traceback.format_exception(*sys.exc_info()))),
+ context,
+ )
+
+ def _process_reftest_manifest(self, context, flavor, manifest_path, manifest):
+ manifest_full_path = manifest_path.full_path
+ manifest_reldir = mozpath.dirname(
+ mozpath.relpath(manifest_full_path, context.config.topsrcdir)
+ )
+
+ # reftest manifests don't come from manifest parser. But they are
+ # similar enough that we can use the same emitted objects. Note
+ # that we don't perform any installs for reftests.
+ obj = TestManifest(
+ context,
+ manifest_full_path,
+ manifest,
+ flavor=flavor,
+ install_prefix="%s/" % flavor,
+ relpath=mozpath.join(manifest_reldir, mozpath.basename(manifest_path)),
+ )
+ obj.tests = list(sorted(manifest.tests, key=lambda t: t["path"]))
+
+ yield obj
+
+ def _process_jar_manifests(self, context):
+ jar_manifests = context.get("JAR_MANIFESTS", [])
+ if len(jar_manifests) > 1:
+ raise SandboxValidationError(
+ "While JAR_MANIFESTS is a list, "
+ "it is currently limited to one value.",
+ context,
+ )
+
+ for path in jar_manifests:
+ yield JARManifest(context, path)
+
+ # Temporary test to look for jar.mn files that creep in without using
+ # the new declaration. Before, we didn't require jar.mn files to
+ # declared anywhere (they were discovered). This will detect people
+ # relying on the old behavior.
+ if os.path.exists(os.path.join(context.srcdir, "jar.mn")):
+ if "jar.mn" not in jar_manifests:
+ raise SandboxValidationError(
+ "A jar.mn exists but it "
+ "is not referenced in the moz.build file. "
+ "Please define JAR_MANIFESTS.",
+ context,
+ )
+
+ def _emit_directory_traversal_from_context(self, context):
+ o = DirectoryTraversal(context)
+ o.dirs = context.get("DIRS", [])
+
+ # Some paths have a subconfigure, yet also have a moz.build. Those
+ # shouldn't end up in self._external_paths.
+ if o.objdir:
+ self._external_paths -= {o.relobjdir}
+
+ yield o
diff --git a/python/mozbuild/mozbuild/frontend/gyp_reader.py b/python/mozbuild/mozbuild/frontend/gyp_reader.py
new file mode 100644
index 0000000000..cd69dfddce
--- /dev/null
+++ b/python/mozbuild/mozbuild/frontend/gyp_reader.py
@@ -0,0 +1,497 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import sys
+import time
+
+import gyp
+import gyp.msvs_emulation
+import mozpack.path as mozpath
+import six
+from mozpack.files import FileFinder
+
+from mozbuild import shellutil
+from mozbuild.util import expand_variables
+
+from .context import VARIABLES, ObjDirPath, SourcePath, TemplateContext
+from .sandbox import alphabetical_sorted
+
+# Define this module as gyp.generator.mozbuild so that gyp can use it
+# as a generator under the name "mozbuild".
+sys.modules["gyp.generator.mozbuild"] = sys.modules[__name__]
+
+# build/gyp_chromium does this:
+# script_dir = os.path.dirname(os.path.realpath(__file__))
+# chrome_src = os.path.abspath(os.path.join(script_dir, os.pardir))
+# sys.path.insert(0, os.path.join(chrome_src, 'tools', 'gyp', 'pylib'))
+# We're not importing gyp_chromium, but we want both script_dir and
+# chrome_src for the default includes, so go backwards from the pylib
+# directory, which is the parent directory of gyp module.
+chrome_src = mozpath.abspath(
+ mozpath.join(mozpath.dirname(gyp.__file__), "../../../../..")
+)
+script_dir = mozpath.join(chrome_src, "build")
+
+
+# Default variables gyp uses when evaluating gyp files.
+generator_default_variables = {}
+for dirname in [
+ "INTERMEDIATE_DIR",
+ "SHARED_INTERMEDIATE_DIR",
+ "PRODUCT_DIR",
+ "LIB_DIR",
+ "SHARED_LIB_DIR",
+]:
+ # Some gyp steps fail if these are empty(!).
+ generator_default_variables[dirname] = "$" + dirname
+
+for unused in [
+ "RULE_INPUT_PATH",
+ "RULE_INPUT_ROOT",
+ "RULE_INPUT_NAME",
+ "RULE_INPUT_DIRNAME",
+ "RULE_INPUT_EXT",
+ "EXECUTABLE_PREFIX",
+ "EXECUTABLE_SUFFIX",
+ "STATIC_LIB_PREFIX",
+ "STATIC_LIB_SUFFIX",
+ "SHARED_LIB_PREFIX",
+ "SHARED_LIB_SUFFIX",
+ "LINKER_SUPPORTS_ICF",
+]:
+ generator_default_variables[unused] = ""
+
+
+class GypContext(TemplateContext):
+ """Specialized Context for use with data extracted from Gyp.
+
+ config is the ConfigEnvironment for this context.
+ relobjdir is the object directory that will be used for this context,
+ relative to the topobjdir defined in the ConfigEnvironment.
+ """
+
+ def __init__(self, config, relobjdir):
+ self._relobjdir = relobjdir
+ TemplateContext.__init__(
+ self, template="Gyp", allowed_variables=VARIABLES, config=config
+ )
+
+
+def handle_actions(actions, context, action_overrides):
+ idir = "$INTERMEDIATE_DIR/"
+ for action in actions:
+ name = action["action_name"]
+ if name not in action_overrides:
+ raise RuntimeError("GYP action %s not listed in action_overrides" % name)
+ outputs = action["outputs"]
+ if len(outputs) > 1:
+ raise NotImplementedError(
+ "GYP actions with more than one output not supported: %s" % name
+ )
+ output = outputs[0]
+ if not output.startswith(idir):
+ raise NotImplementedError(
+ "GYP actions outputting to somewhere other than "
+ "<(INTERMEDIATE_DIR) not supported: %s" % output
+ )
+ output = output[len(idir) :]
+ context["GENERATED_FILES"] += [output]
+ g = context["GENERATED_FILES"][output]
+ g.script = action_overrides[name]
+ g.inputs = action["inputs"]
+
+
+def handle_copies(copies, context):
+ dist = "$PRODUCT_DIR/dist/"
+ for copy in copies:
+ dest = copy["destination"]
+ if not dest.startswith(dist):
+ raise NotImplementedError(
+ "GYP copies to somewhere other than <(PRODUCT_DIR)/dist not supported: %s"
+ % dest
+ )
+ dest_paths = dest[len(dist) :].split("/")
+ exports = context["EXPORTS"]
+ while dest_paths:
+ exports = getattr(exports, dest_paths.pop(0))
+ exports += sorted(copy["files"], key=lambda x: x.lower())
+
+
+def process_gyp_result(
+ gyp_result,
+ gyp_dir_attrs,
+ path,
+ config,
+ output,
+ non_unified_sources,
+ action_overrides,
+):
+ flat_list, targets, data = gyp_result
+ no_chromium = gyp_dir_attrs.no_chromium
+ no_unified = gyp_dir_attrs.no_unified
+
+ # Process all targets from the given gyp files and its dependencies.
+ # The path given to AllTargets needs to use os.sep, while the frontend code
+ # gives us paths normalized with forward slash separator.
+ for target in sorted(
+ gyp.common.AllTargets(flat_list, targets, path.replace("/", os.sep))
+ ):
+ build_file, target_name, toolset = gyp.common.ParseQualifiedTarget(target)
+
+ # Each target is given its own objdir. The base of that objdir
+ # is derived from the relative path from the root gyp file path
+ # to the current build_file, placed under the given output
+ # directory. Since several targets can be in a given build_file,
+ # separate them in subdirectories using the build_file basename
+ # and the target_name.
+ reldir = mozpath.relpath(mozpath.dirname(build_file), mozpath.dirname(path))
+ subdir = "%s_%s" % (
+ mozpath.splitext(mozpath.basename(build_file))[0],
+ target_name,
+ )
+ # Emit a context for each target.
+ context = GypContext(
+ config,
+ mozpath.relpath(mozpath.join(output, reldir, subdir), config.topobjdir),
+ )
+ context.add_source(mozpath.abspath(build_file))
+ # The list of included files returned by gyp are relative to build_file
+ for f in data[build_file]["included_files"]:
+ context.add_source(
+ mozpath.abspath(mozpath.join(mozpath.dirname(build_file), f))
+ )
+
+ spec = targets[target]
+
+ # Derive which gyp configuration to use based on MOZ_DEBUG.
+ c = "Debug" if config.substs.get("MOZ_DEBUG") else "Release"
+ if c not in spec["configurations"]:
+ raise RuntimeError(
+ "Missing %s gyp configuration for target %s "
+ "in %s" % (c, target_name, build_file)
+ )
+ target_conf = spec["configurations"][c]
+
+ if "actions" in spec:
+ handle_actions(spec["actions"], context, action_overrides)
+ if "copies" in spec:
+ handle_copies(spec["copies"], context)
+
+ use_libs = []
+ libs = []
+
+ def add_deps(s):
+ for t in s.get("dependencies", []) + s.get("dependencies_original", []):
+ ty = targets[t]["type"]
+ if ty in ("static_library", "shared_library"):
+ l = targets[t]["target_name"]
+ if l not in use_libs:
+ use_libs.append(l)
+ # Manually expand out transitive dependencies--
+ # gyp won't do this for static libs or none targets.
+ if ty in ("static_library", "none"):
+ add_deps(targets[t])
+ libs.extend(spec.get("libraries", []))
+
+ # XXX: this sucks, but webrtc breaks with this right now because
+ # it builds a library called 'gtest' and we just get lucky
+ # that it isn't in USE_LIBS by that name anywhere.
+ if no_chromium:
+ add_deps(spec)
+
+ os_libs = []
+ for l in libs:
+ if l.startswith("-"):
+ if l.startswith("-l"):
+ # Remove "-l" for consumption in OS_LIBS. Other flags
+ # are passed through unchanged.
+ l = l[2:]
+ if l not in os_libs:
+ os_libs.append(l)
+ elif l.endswith(".lib"):
+ l = l[:-4]
+ if l not in os_libs:
+ os_libs.append(l)
+ elif l:
+ # For library names passed in from moz.build.
+ l = os.path.basename(l)
+ if l not in use_libs:
+ use_libs.append(l)
+
+ if spec["type"] == "none":
+ if not ("actions" in spec or "copies" in spec):
+ continue
+ elif spec["type"] in ("static_library", "shared_library", "executable"):
+ # Remove leading 'lib' from the target_name if any, and use as
+ # library name.
+ name = six.ensure_text(spec["target_name"])
+ if spec["type"] in ("static_library", "shared_library"):
+ if name.startswith("lib"):
+ name = name[3:]
+ context["LIBRARY_NAME"] = name
+ else:
+ context["PROGRAM"] = name
+ if spec["type"] == "shared_library":
+ context["FORCE_SHARED_LIB"] = True
+ elif (
+ spec["type"] == "static_library"
+ and spec.get("variables", {}).get("no_expand_libs", "0") == "1"
+ ):
+ # PSM links a NSS static library, but our folded libnss
+ # doesn't actually export everything that all of the
+ # objects within would need, so that one library
+ # should be built as a real static library.
+ context["NO_EXPAND_LIBS"] = True
+ if use_libs:
+ context["USE_LIBS"] = sorted(use_libs, key=lambda s: s.lower())
+ if os_libs:
+ context["OS_LIBS"] = os_libs
+ # gyp files contain headers and asm sources in sources lists.
+ sources = []
+ unified_sources = []
+ extensions = set()
+ use_defines_in_asflags = False
+ for f in spec.get("sources", []):
+ ext = mozpath.splitext(f)[-1]
+ extensions.add(ext)
+ if f.startswith("$INTERMEDIATE_DIR/"):
+ s = ObjDirPath(context, f.replace("$INTERMEDIATE_DIR/", "!"))
+ else:
+ s = SourcePath(context, f)
+ if ext == ".h":
+ continue
+ if ext == ".def":
+ context["SYMBOLS_FILE"] = s
+ elif ext != ".S" and not no_unified and s not in non_unified_sources:
+ unified_sources.append(s)
+ else:
+ sources.append(s)
+ # The Mozilla build system doesn't use DEFINES for building
+ # ASFILES.
+ if ext == ".s":
+ use_defines_in_asflags = True
+
+ # The context expects alphabetical order when adding sources
+ context["SOURCES"] = alphabetical_sorted(sources)
+ context["UNIFIED_SOURCES"] = alphabetical_sorted(unified_sources)
+
+ defines = target_conf.get("defines", [])
+ if config.substs["CC_TYPE"] == "clang-cl" and no_chromium:
+ msvs_settings = gyp.msvs_emulation.MsvsSettings(spec, {})
+ # Hack: MsvsSettings._TargetConfig tries to compare a str to an int,
+ # so convert manually.
+ msvs_settings.vs_version.short_name = int(
+ msvs_settings.vs_version.short_name
+ )
+ defines.extend(msvs_settings.GetComputedDefines(c))
+ for define in defines:
+ if "=" in define:
+ name, value = define.split("=", 1)
+ context["DEFINES"][name] = value
+ else:
+ context["DEFINES"][define] = True
+
+ product_dir_dist = "$PRODUCT_DIR/dist/"
+ for include in target_conf.get("include_dirs", []):
+ if include.startswith(product_dir_dist):
+ # special-case includes of <(PRODUCT_DIR)/dist/ to match
+ # handle_copies above. This is used for NSS' exports.
+ include = "!/dist/include/" + include[len(product_dir_dist) :]
+ elif include.startswith(config.topobjdir):
+ # NSPR_INCLUDE_DIR gets passed into the NSS build this way.
+ include = "!/" + mozpath.relpath(include, config.topobjdir)
+ else:
+ # moz.build expects all LOCAL_INCLUDES to exist, so ensure they do.
+ #
+ # NB: gyp files sometimes have actual absolute paths (e.g.
+ # /usr/include32) and sometimes paths that moz.build considers
+ # absolute, i.e. starting from topsrcdir. There's no good way
+ # to tell them apart here, and the actual absolute paths are
+ # likely bogus. In any event, actual absolute paths will be
+ # filtered out by trying to find them in topsrcdir.
+ #
+ # We do allow !- and %-prefixed paths, assuming they come
+ # from moz.build and will be handled the same way as if they
+ # were given to LOCAL_INCLUDES in moz.build.
+ if include.startswith("/"):
+ resolved = mozpath.abspath(
+ mozpath.join(config.topsrcdir, include[1:])
+ )
+ elif not include.startswith(("!", "%")):
+ resolved = mozpath.abspath(
+ mozpath.join(mozpath.dirname(build_file), include)
+ )
+ if not include.startswith(("!", "%")) and not os.path.exists(
+ resolved
+ ):
+ continue
+ context["LOCAL_INCLUDES"] += [include]
+
+ context["ASFLAGS"] = target_conf.get("asflags_mozilla", [])
+ if use_defines_in_asflags and defines:
+ context["ASFLAGS"] += ["-D" + d for d in defines]
+ if config.substs["OS_TARGET"] == "SunOS":
+ context["LDFLAGS"] = target_conf.get("ldflags", [])
+ flags = target_conf.get("cflags_mozilla", [])
+ if flags:
+ suffix_map = {
+ ".c": "CFLAGS",
+ ".cpp": "CXXFLAGS",
+ ".cc": "CXXFLAGS",
+ ".m": "CMFLAGS",
+ ".mm": "CMMFLAGS",
+ }
+ variables = (suffix_map[e] for e in extensions if e in suffix_map)
+ for var in variables:
+ for f in flags:
+ # We may be getting make variable references out of the
+ # gyp data, and we don't want those in emitted data, so
+ # substitute them with their actual value.
+ f = expand_variables(f, config.substs).split()
+ if not f:
+ continue
+ # the result may be a string or a list.
+ if isinstance(f, six.string_types):
+ context[var].append(f)
+ else:
+ context[var].extend(f)
+ else:
+ # Ignore other types because we don't have
+ # anything using them, and we're not testing them. They can be
+ # added when that becomes necessary.
+ raise NotImplementedError("Unsupported gyp target type: %s" % spec["type"])
+
+ if not no_chromium:
+ # Add some features to all contexts. Put here in case LOCAL_INCLUDES
+ # order matters.
+ context["LOCAL_INCLUDES"] += [
+ "!/ipc/ipdl/_ipdlheaders",
+ "/ipc/chromium/src",
+ ]
+ # These get set via VC project file settings for normal GYP builds.
+ if config.substs["OS_TARGET"] == "WINNT":
+ context["DEFINES"]["UNICODE"] = True
+ context["DEFINES"]["_UNICODE"] = True
+ context["COMPILE_FLAGS"]["OS_INCLUDES"] = []
+
+ for key, value in gyp_dir_attrs.sandbox_vars.items():
+ if context.get(key) and isinstance(context[key], list):
+ # If we have a key from sanbox_vars that's also been
+ # populated here we use the value from sandbox_vars as our
+ # basis rather than overriding outright.
+ context[key] = value + context[key]
+ elif context.get(key) and isinstance(context[key], dict):
+ context[key].update(value)
+ else:
+ context[key] = value
+
+ yield context
+
+
+# A version of gyp.Load that doesn't return the generator (because module objects
+# aren't Pickle-able, and we don't use it anyway).
+def load_gyp(*args):
+ _, flat_list, targets, data = gyp.Load(*args)
+ return flat_list, targets, data
+
+
+class GypProcessor(object):
+ """Reads a gyp configuration in the background using the given executor and
+ emits GypContexts for the backend to process.
+
+ config is a ConfigEnvironment, path is the path to a root gyp configuration
+ file, and output is the base path under which the objdir for the various
+ gyp dependencies will be. gyp_dir_attrs are attributes set for the dir
+ from moz.build.
+ """
+
+ def __init__(
+ self,
+ config,
+ gyp_dir_attrs,
+ path,
+ output,
+ executor,
+ action_overrides,
+ non_unified_sources,
+ ):
+ self._path = path
+ self._config = config
+ self._output = output
+ self._non_unified_sources = non_unified_sources
+ self._gyp_dir_attrs = gyp_dir_attrs
+ self._action_overrides = action_overrides
+ self.execution_time = 0.0
+ self._results = []
+
+ # gyp expects plain str instead of unicode. The frontend code gives us
+ # unicode strings, so convert them.
+ if config.substs["CC_TYPE"] == "clang-cl":
+ # This isn't actually used anywhere in this generator, but it's needed
+ # to override the registry detection of VC++ in gyp.
+ os.environ.update(
+ {
+ "GYP_MSVS_OVERRIDE_PATH": "fake_path",
+ "GYP_MSVS_VERSION": config.substs["MSVS_VERSION"],
+ }
+ )
+
+ params = {
+ "parallel": False,
+ "generator_flags": {},
+ "build_files": [path],
+ "root_targets": None,
+ }
+ # The NSS gyp configuration uses CC and CFLAGS to determine the
+ # floating-point ABI on arm.
+ os.environ.update(
+ CC=config.substs["CC"],
+ CFLAGS=shellutil.quote(*config.substs["CC_BASE_FLAGS"]),
+ )
+
+ if gyp_dir_attrs.no_chromium:
+ includes = []
+ depth = mozpath.dirname(path)
+ else:
+ depth = chrome_src
+ # Files that gyp_chromium always includes
+ includes = [mozpath.join(script_dir, "gyp_includes", "common.gypi")]
+ finder = FileFinder(chrome_src)
+ includes.extend(
+ mozpath.join(chrome_src, name)
+ for name, _ in finder.find("*/supplement.gypi")
+ )
+
+ str_vars = dict(gyp_dir_attrs.variables)
+ str_vars["python"] = sys.executable
+ self._gyp_loader_future = executor.submit(
+ load_gyp, [path], "mozbuild", str_vars, includes, depth, params
+ )
+
+ @property
+ def results(self):
+ if self._results:
+ for res in self._results:
+ yield res
+ else:
+ # We report our execution time as the time spent blocked in a call
+ # to `result`, which is the only case a gyp processor will
+ # contribute significantly to total wall time.
+ t0 = time.monotonic()
+ flat_list, targets, data = self._gyp_loader_future.result()
+ self.execution_time += time.monotonic() - t0
+ results = []
+ for res in process_gyp_result(
+ (flat_list, targets, data),
+ self._gyp_dir_attrs,
+ self._path,
+ self._config,
+ self._output,
+ self._non_unified_sources,
+ self._action_overrides,
+ ):
+ results.append(res)
+ yield res
+ self._results = results
diff --git a/python/mozbuild/mozbuild/frontend/mach_commands.py b/python/mozbuild/mozbuild/frontend/mach_commands.py
new file mode 100644
index 0000000000..6d379977df
--- /dev/null
+++ b/python/mozbuild/mozbuild/frontend/mach_commands.py
@@ -0,0 +1,338 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import json
+import os
+import sys
+from collections import defaultdict
+
+import mozpack.path as mozpath
+from mach.decorators import Command, CommandArgument, SubCommand
+
+TOPSRCDIR = os.path.abspath(os.path.join(__file__, "../../../../../"))
+
+
+class InvalidPathException(Exception):
+ """Represents an error due to an invalid path."""
+
+
+@Command(
+ "mozbuild-reference",
+ category="build-dev",
+ description="View reference documentation on mozbuild files.",
+ virtualenv_name="docs",
+)
+@CommandArgument(
+ "symbol",
+ default=None,
+ nargs="*",
+ help="Symbol to view help on. If not specified, all will be shown.",
+)
+@CommandArgument(
+ "--name-only",
+ "-n",
+ default=False,
+ action="store_true",
+ help="Print symbol names only.",
+)
+def reference(command_context, symbol, name_only=False):
+ import mozbuild.frontend.context as m
+ from mozbuild.sphinx import (
+ format_module,
+ function_reference,
+ special_reference,
+ variable_reference,
+ )
+
+ if name_only:
+ for s in sorted(m.VARIABLES.keys()):
+ print(s)
+
+ for s in sorted(m.FUNCTIONS.keys()):
+ print(s)
+
+ for s in sorted(m.SPECIAL_VARIABLES.keys()):
+ print(s)
+
+ return 0
+
+ if len(symbol):
+ for s in symbol:
+ if s in m.VARIABLES:
+ for line in variable_reference(s, *m.VARIABLES[s]):
+ print(line)
+ continue
+ elif s in m.FUNCTIONS:
+ for line in function_reference(s, *m.FUNCTIONS[s]):
+ print(line)
+ continue
+ elif s in m.SPECIAL_VARIABLES:
+ for line in special_reference(s, *m.SPECIAL_VARIABLES[s]):
+ print(line)
+ continue
+
+ print("Could not find symbol: %s" % s)
+ return 1
+
+ return 0
+
+ for line in format_module(m):
+ print(line)
+
+ return 0
+
+
+@Command(
+ "file-info", category="build-dev", description="Query for metadata about files."
+)
+def file_info(command_context):
+ """Show files metadata derived from moz.build files.
+
+ moz.build files contain "Files" sub-contexts for declaring metadata
+ against file patterns. This command suite is used to query that data.
+ """
+
+
+@SubCommand(
+ "file-info",
+ "bugzilla-component",
+ "Show Bugzilla component info for files listed.",
+)
+@CommandArgument("-r", "--rev", help="Version control revision to look up info from")
+@CommandArgument(
+ "--format",
+ choices={"json", "plain"},
+ default="plain",
+ help="Output format",
+ dest="fmt",
+)
+@CommandArgument("paths", nargs="+", help="Paths whose data to query")
+def file_info_bugzilla(command_context, paths, rev=None, fmt=None):
+ """Show Bugzilla component for a set of files.
+
+ Given a requested set of files (which can be specified using
+ wildcards), print the Bugzilla component for each file.
+ """
+ components = defaultdict(set)
+ try:
+ for p, m in _get_files_info(command_context, paths, rev=rev).items():
+ components[m.get("BUG_COMPONENT")].add(p)
+ except InvalidPathException as e:
+ print(e)
+ return 1
+
+ if fmt == "json":
+ data = {}
+ for component, files in components.items():
+ if not component:
+ continue
+ for f in files:
+ data[f] = [component.product, component.component]
+
+ json.dump(data, sys.stdout, sort_keys=True, indent=2)
+ return
+ elif fmt == "plain":
+ comp_to_file = sorted(
+ (
+ "UNKNOWN"
+ if component is None
+ else "%s :: %s" % (component.product, component.component),
+ sorted(files),
+ )
+ for component, files in components.items()
+ )
+ for component, files in comp_to_file:
+ print(component)
+ for f in files:
+ print(" %s" % f)
+ else:
+ print("unhandled output format: %s" % fmt)
+ return 1
+
+
+@SubCommand(
+ "file-info", "missing-bugzilla", "Show files missing Bugzilla component info"
+)
+@CommandArgument("-r", "--rev", help="Version control revision to look up info from")
+@CommandArgument(
+ "--format",
+ choices={"json", "plain"},
+ dest="fmt",
+ default="plain",
+ help="Output format",
+)
+@CommandArgument("paths", nargs="+", help="Paths whose data to query")
+def file_info_missing_bugzilla(command_context, paths, rev=None, fmt=None):
+ missing = set()
+
+ try:
+ for p, m in _get_files_info(command_context, paths, rev=rev).items():
+ if "BUG_COMPONENT" not in m:
+ missing.add(p)
+ except InvalidPathException as e:
+ print(e)
+ return 1
+
+ if fmt == "json":
+ json.dump({"missing": sorted(missing)}, sys.stdout, indent=2)
+ return
+ elif fmt == "plain":
+ for f in sorted(missing):
+ print(f)
+ else:
+ print("unhandled output format: %s" % fmt)
+ return 1
+
+
+@SubCommand(
+ "file-info",
+ "bugzilla-automation",
+ "Perform Bugzilla metadata analysis as required for automation",
+)
+@CommandArgument("out_dir", help="Where to write files")
+def bugzilla_automation(command_context, out_dir):
+ """Analyze and validate Bugzilla metadata as required by automation.
+
+ This will write out JSON and gzipped JSON files for Bugzilla metadata.
+
+ The exit code will be non-0 if Bugzilla metadata fails validation.
+ """
+ import gzip
+
+ missing_component = set()
+ seen_components = set()
+ component_by_path = {}
+
+ # TODO operate in VCS space. This requires teaching the VCS reader
+ # to understand wildcards and/or for the relative path issue in the
+ # VCS finder to be worked out.
+ for p, m in sorted(_get_files_info(command_context, ["**"]).items()):
+ if "BUG_COMPONENT" not in m:
+ missing_component.add(p)
+ print(
+ "FileToBugzillaMappingError: Missing Bugzilla component: "
+ "%s - Set the BUG_COMPONENT in the moz.build file to fix "
+ "the issue." % p
+ )
+ continue
+
+ c = m["BUG_COMPONENT"]
+ seen_components.add(c)
+ component_by_path[p] = [c.product, c.component]
+
+ print("Examined %d files" % len(component_by_path))
+
+ # We also have a normalized versions of the file to components mapping
+ # that requires far less storage space by eliminating redundant strings.
+ indexed_components = {
+ i: [c.product, c.component] for i, c in enumerate(sorted(seen_components))
+ }
+ components_index = {tuple(v): k for k, v in indexed_components.items()}
+ normalized_component = {"components": indexed_components, "paths": {}}
+
+ for p, c in component_by_path.items():
+ d = normalized_component["paths"]
+ while "/" in p:
+ base, p = p.split("/", 1)
+ d = d.setdefault(base, {})
+
+ d[p] = components_index[tuple(c)]
+
+ if not os.path.exists(out_dir):
+ os.makedirs(out_dir)
+
+ components_json = os.path.join(out_dir, "components.json")
+ print("Writing %s" % components_json)
+ with open(components_json, "w") as fh:
+ json.dump(component_by_path, fh, sort_keys=True, indent=2)
+
+ missing_json = os.path.join(out_dir, "missing.json")
+ print("Writing %s" % missing_json)
+ with open(missing_json, "w") as fh:
+ json.dump({"missing": sorted(missing_component)}, fh, indent=2)
+
+ indexed_components_json = os.path.join(out_dir, "components-normalized.json")
+ print("Writing %s" % indexed_components_json)
+ with open(indexed_components_json, "w") as fh:
+ # Don't indent so file is as small as possible.
+ json.dump(normalized_component, fh, sort_keys=True)
+
+ # Write compressed versions of JSON files.
+ for p in (components_json, indexed_components_json, missing_json):
+ gzip_path = "%s.gz" % p
+ print("Writing %s" % gzip_path)
+ with open(p, "rb") as ifh, gzip.open(gzip_path, "wb") as ofh:
+ while True:
+ data = ifh.read(32768)
+ if not data:
+ break
+ ofh.write(data)
+
+ # Causes CI task to fail if files are missing Bugzilla annotation.
+ if missing_component:
+ return 1
+
+
+def _get_files_info(command_context, paths, rev=None):
+ reader = command_context.mozbuild_reader(config_mode="empty", vcs_revision=rev)
+
+ # Normalize to relative from topsrcdir.
+ relpaths = []
+ for p in paths:
+ a = mozpath.abspath(p)
+ if not mozpath.basedir(a, [command_context.topsrcdir]):
+ raise InvalidPathException("path is outside topsrcdir: %s" % p)
+
+ relpaths.append(mozpath.relpath(a, command_context.topsrcdir))
+
+ # Expand wildcards.
+ # One variable is for ordering. The other for membership tests.
+ # (Membership testing on a list can be slow.)
+ allpaths = []
+ all_paths_set = set()
+ for p in relpaths:
+ if "*" not in p:
+ if p not in all_paths_set:
+ if not os.path.exists(mozpath.join(command_context.topsrcdir, p)):
+ print("(%s does not exist; ignoring)" % p, file=sys.stderr)
+ continue
+
+ all_paths_set.add(p)
+ allpaths.append(p)
+ continue
+
+ if rev:
+ raise InvalidPathException("cannot use wildcard in version control mode")
+
+ # finder is rooted at / for now.
+ # TODO bug 1171069 tracks changing to relative.
+ search = mozpath.join(command_context.topsrcdir, p)[1:]
+ for path, f in reader.finder.find(search):
+ path = path[len(command_context.topsrcdir) :]
+ if path not in all_paths_set:
+ all_paths_set.add(path)
+ allpaths.append(path)
+
+ return reader.files_info(allpaths)
+
+
+@SubCommand(
+ "file-info", "schedules", "Show the combined SCHEDULES for the files listed."
+)
+@CommandArgument("paths", nargs="+", help="Paths whose data to query")
+def file_info_schedules(command_context, paths):
+ """Show what is scheduled by the given files.
+
+ Given a requested set of files (which can be specified using
+ wildcards), print the total set of scheduled components.
+ """
+ from mozbuild.frontend.reader import BuildReader, EmptyConfig
+
+ config = EmptyConfig(TOPSRCDIR)
+ reader = BuildReader(config)
+ schedules = set()
+ for p, m in reader.files_info(paths).items():
+ schedules |= set(m["SCHEDULES"].components)
+
+ print(", ".join(schedules))
diff --git a/python/mozbuild/mozbuild/frontend/reader.py b/python/mozbuild/mozbuild/frontend/reader.py
new file mode 100644
index 0000000000..9d624b37ec
--- /dev/null
+++ b/python/mozbuild/mozbuild/frontend/reader.py
@@ -0,0 +1,1432 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This file contains code for reading metadata from the build system into
+# data structures.
+
+r"""Read build frontend files into data structures.
+
+In terms of code architecture, the main interface is BuildReader. BuildReader
+starts with a root mozbuild file. It creates a new execution environment for
+this file, which is represented by the Sandbox class. The Sandbox class is used
+to fill a Context, representing the output of an individual mozbuild file. The
+
+The BuildReader contains basic logic for traversing a tree of mozbuild files.
+It does this by examining specific variables populated during execution.
+"""
+
+import ast
+import inspect
+import logging
+import os
+import sys
+import textwrap
+import time
+import traceback
+import types
+from collections import OrderedDict, defaultdict
+from concurrent.futures.process import ProcessPoolExecutor
+from io import StringIO
+from itertools import chain
+from multiprocessing import cpu_count
+
+import mozpack.path as mozpath
+import six
+from mozpack.files import FileFinder
+from six import string_types
+
+from mozbuild.backend.configenvironment import ConfigEnvironment
+from mozbuild.base import ExecutionSummary
+from mozbuild.util import (
+ EmptyValue,
+ HierarchicalStringList,
+ ReadOnlyDefaultDict,
+ memoize,
+)
+
+from .context import (
+ DEPRECATION_HINTS,
+ FUNCTIONS,
+ SPECIAL_VARIABLES,
+ SUBCONTEXTS,
+ VARIABLES,
+ Context,
+ ContextDerivedValue,
+ Files,
+ SourcePath,
+ SubContext,
+ TemplateContext,
+)
+from .sandbox import (
+ Sandbox,
+ SandboxError,
+ SandboxExecutionError,
+ SandboxLoadError,
+ default_finder,
+)
+
+if six.PY2:
+ type_type = types.TypeType
+else:
+ type_type = type
+
+
+def log(logger, level, action, params, formatter):
+ logger.log(level, formatter, extra={"action": action, "params": params})
+
+
+class EmptyConfig(object):
+ """A config object that is empty.
+
+ This config object is suitable for using with a BuildReader on a vanilla
+ checkout, without any existing configuration. The config is simply
+ bootstrapped from a top source directory path.
+ """
+
+ class PopulateOnGetDict(ReadOnlyDefaultDict):
+ """A variation on ReadOnlyDefaultDict that populates during .get().
+
+ This variation is needed because CONFIG uses .get() to access members.
+ Without it, None (instead of our EmptyValue types) would be returned.
+ """
+
+ def get(self, key, default=None):
+ return self[key]
+
+ default_substs = {
+ # These 2 variables are used semi-frequently and it isn't worth
+ # changing all the instances.
+ "MOZ_APP_NAME": "empty",
+ "MOZ_CHILD_PROCESS_NAME": "empty",
+ # Needed to prevent js/src's config.status from loading.
+ "JS_STANDALONE": "1",
+ }
+
+ def __init__(self, topsrcdir, substs=None):
+ self.topsrcdir = topsrcdir
+ self.topobjdir = ""
+
+ self.substs = self.PopulateOnGetDict(EmptyValue, substs or self.default_substs)
+ self.defines = self.substs
+ self.error_is_fatal = False
+
+
+def is_read_allowed(path, config):
+ """Whether we are allowed to load a mozbuild file at the specified path.
+
+ This is used as cheap security to ensure the build is isolated to known
+ source directories.
+
+ We are allowed to read from the main source directory and any defined
+ external source directories. The latter is to allow 3rd party applications
+ to hook into our build system.
+ """
+ assert os.path.isabs(path)
+ assert os.path.isabs(config.topsrcdir)
+
+ path = mozpath.normpath(path)
+ topsrcdir = mozpath.normpath(config.topsrcdir)
+
+ if mozpath.basedir(path, [topsrcdir]):
+ return True
+
+ return False
+
+
+class SandboxCalledError(SandboxError):
+ """Represents an error resulting from calling the error() function."""
+
+ def __init__(self, file_stack, message):
+ SandboxError.__init__(self, file_stack)
+ self.message = message
+
+
+class MozbuildSandbox(Sandbox):
+ """Implementation of a Sandbox tailored for mozbuild files.
+
+ We expose a few useful functions and expose the set of variables defining
+ Mozilla's build system.
+
+ context is a Context instance.
+
+ metadata is a dict of metadata that can be used during the sandbox
+ evaluation.
+ """
+
+ def __init__(self, context, metadata={}, finder=default_finder):
+ assert isinstance(context, Context)
+
+ Sandbox.__init__(self, context, finder=finder)
+
+ self._log = logging.getLogger(__name__)
+
+ self.metadata = dict(metadata)
+ exports = self.metadata.get("exports", {})
+ self.exports = set(exports.keys())
+ context.update(exports)
+ self.templates = self.metadata.setdefault("templates", {})
+ self.special_variables = self.metadata.setdefault(
+ "special_variables", SPECIAL_VARIABLES
+ )
+ self.functions = self.metadata.setdefault("functions", FUNCTIONS)
+ self.subcontext_types = self.metadata.setdefault("subcontexts", SUBCONTEXTS)
+
+ def __getitem__(self, key):
+ if key in self.special_variables:
+ return self.special_variables[key][0](self._context)
+ if key in self.functions:
+ return self._create_function(self.functions[key])
+ if key in self.subcontext_types:
+ return self._create_subcontext(self.subcontext_types[key])
+ if key in self.templates:
+ return self._create_template_wrapper(self.templates[key])
+ return Sandbox.__getitem__(self, key)
+
+ def __contains__(self, key):
+ if any(
+ key in d
+ for d in (
+ self.special_variables,
+ self.functions,
+ self.subcontext_types,
+ self.templates,
+ )
+ ):
+ return True
+
+ return Sandbox.__contains__(self, key)
+
+ def __setitem__(self, key, value):
+ if key in self.special_variables and value is self[key]:
+ return
+ if (
+ key in self.special_variables
+ or key in self.functions
+ or key in self.subcontext_types
+ ):
+ raise KeyError('Cannot set "%s" because it is a reserved keyword' % key)
+ if key in self.exports:
+ self._context[key] = value
+ self.exports.remove(key)
+ return
+ Sandbox.__setitem__(self, key, value)
+
+ def exec_file(self, path):
+ """Override exec_file to normalize paths and restrict file loading.
+
+ Paths will be rejected if they do not fall under topsrcdir or one of
+ the external roots.
+ """
+
+ # realpath() is needed for true security. But, this isn't for security
+ # protection, so it is omitted.
+ if not is_read_allowed(path, self._context.config):
+ raise SandboxLoadError(
+ self._context.source_stack, sys.exc_info()[2], illegal_path=path
+ )
+
+ Sandbox.exec_file(self, path)
+
+ def _export(self, varname):
+ """Export the variable to all subdirectories of the current path."""
+
+ exports = self.metadata.setdefault("exports", dict())
+ if varname in exports:
+ raise Exception("Variable has already been exported: %s" % varname)
+
+ try:
+ # Doing a regular self._context[varname] causes a set as a side
+ # effect. By calling the dict method instead, we don't have any
+ # side effects.
+ exports[varname] = dict.__getitem__(self._context, varname)
+ except KeyError:
+ self.last_name_error = KeyError("global_ns", "get_unknown", varname)
+ raise self.last_name_error
+
+ def recompute_exports(self):
+ """Recompute the variables to export to subdirectories with the current
+ values in the subdirectory."""
+
+ if "exports" in self.metadata:
+ for key in self.metadata["exports"]:
+ self.metadata["exports"][key] = self[key]
+
+ def _include(self, path):
+ """Include and exec another file within the context of this one."""
+
+ # path is a SourcePath
+ self.exec_file(path.full_path)
+
+ def _warning(self, message):
+ # FUTURE consider capturing warnings in a variable instead of printing.
+ print("WARNING: %s" % message, file=sys.stderr)
+
+ def _error(self, message):
+ if self._context.error_is_fatal:
+ raise SandboxCalledError(self._context.source_stack, message)
+ else:
+ self._warning(message)
+
+ def _template_decorator(self, func):
+ """Registers a template function."""
+
+ if not inspect.isfunction(func):
+ raise Exception(
+ "`template` is a function decorator. You must "
+ "use it as `@template` preceding a function declaration."
+ )
+
+ name = func.__name__
+
+ if name in self.templates:
+ raise KeyError(
+ 'A template named "%s" was already declared in %s.'
+ % (name, self.templates[name].path)
+ )
+
+ if name.islower() or name.isupper() or name[0].islower():
+ raise NameError("Template function names must be CamelCase.")
+
+ self.templates[name] = TemplateFunction(func, self)
+
+ @memoize
+ def _create_subcontext(self, cls):
+ """Return a function object that creates SubContext instances."""
+
+ def fn(*args, **kwargs):
+ return cls(self._context, *args, **kwargs)
+
+ return fn
+
+ @memoize
+ def _create_function(self, function_def):
+ """Returns a function object for use within the sandbox for the given
+ function definition.
+
+ The wrapper function does type coercion on the function arguments
+ """
+ func, args_def, doc = function_def
+
+ def function(*args):
+ def coerce(arg, type):
+ if not isinstance(arg, type):
+ if issubclass(type, ContextDerivedValue):
+ arg = type(self._context, arg)
+ else:
+ arg = type(arg)
+ return arg
+
+ args = [coerce(arg, type) for arg, type in zip(args, args_def)]
+ return func(self)(*args)
+
+ return function
+
+ @memoize
+ def _create_template_wrapper(self, template):
+ """Returns a function object for use within the sandbox for the given
+ TemplateFunction instance..
+
+ When a moz.build file contains a reference to a template call, the
+ sandbox needs a function to execute. This is what this method returns.
+ That function creates a new sandbox for execution of the template.
+ After the template is executed, the data from its execution is merged
+ with the context of the calling sandbox.
+ """
+
+ def template_wrapper(*args, **kwargs):
+ context = TemplateContext(
+ template=template.name,
+ allowed_variables=self._context._allowed_variables,
+ config=self._context.config,
+ )
+ context.add_source(self._context.current_path)
+ for p in self._context.all_paths:
+ context.add_source(p)
+
+ sandbox = MozbuildSandbox(
+ context,
+ metadata={
+ # We should arguably set these defaults to something else.
+ # Templates, for example, should arguably come from the state
+ # of the sandbox from when the template was declared, not when
+ # it was instantiated. Bug 1137319.
+ "functions": self.metadata.get("functions", {}),
+ "special_variables": self.metadata.get("special_variables", {}),
+ "subcontexts": self.metadata.get("subcontexts", {}),
+ "templates": self.metadata.get("templates", {}),
+ },
+ finder=self._finder,
+ )
+
+ template.exec_in_sandbox(sandbox, *args, **kwargs)
+
+ # This is gross, but allows the merge to happen. Eventually, the
+ # merging will go away and template contexts emitted independently.
+ klass = self._context.__class__
+ self._context.__class__ = TemplateContext
+ # The sandbox will do all the necessary checks for these merges.
+ for key, value in context.items():
+ if isinstance(value, dict):
+ self[key].update(value)
+ elif isinstance(value, (list, HierarchicalStringList)):
+ self[key] += value
+ else:
+ self[key] = value
+ self._context.__class__ = klass
+
+ for p in context.all_paths:
+ self._context.add_source(p)
+
+ return template_wrapper
+
+
+class TemplateFunction(object):
+ def __init__(self, func, sandbox):
+ self.path = func.__code__.co_filename
+ self.name = func.__name__
+
+ code = func.__code__
+ firstlineno = code.co_firstlineno
+ lines = sandbox._current_source.splitlines(True)
+ if lines:
+ # Older versions of python 2.7 had a buggy inspect.getblock() that
+ # would ignore the last line if it didn't terminate with a newline.
+ if not lines[-1].endswith("\n"):
+ lines[-1] += "\n"
+ lines = inspect.getblock(lines[firstlineno - 1 :])
+
+ # The code lines we get out of inspect.getsourcelines look like
+ # @template
+ # def Template(*args, **kwargs):
+ # VAR = 'value'
+ # ...
+ func_ast = ast.parse("".join(lines), self.path)
+ # Remove decorators
+ func_ast.body[0].decorator_list = []
+ # Adjust line numbers accordingly
+ ast.increment_lineno(func_ast, firstlineno - 1)
+
+ # When using a custom dictionary for function globals/locals, Cpython
+ # actually never calls __getitem__ and __setitem__, so we need to
+ # modify the AST so that accesses to globals are properly directed
+ # to a dict. AST wants binary_type for this in Py2 and text_type for
+ # this in Py3, so cast to str.
+ self._global_name = str("_data")
+ # In case '_data' is a name used for a variable in the function code,
+ # prepend more underscores until we find an unused name.
+ while (
+ self._global_name in code.co_names or self._global_name in code.co_varnames
+ ):
+ self._global_name += str("_")
+ func_ast = self.RewriteName(sandbox, self._global_name).visit(func_ast)
+
+ # Execute the rewritten code. That code now looks like:
+ # def Template(*args, **kwargs):
+ # _data['VAR'] = 'value'
+ # ...
+ # The result of executing this code is the creation of a 'Template'
+ # function object in the global namespace.
+ glob = {"__builtins__": sandbox._builtins}
+ func = types.FunctionType(
+ compile(func_ast, self.path, "exec"),
+ glob,
+ self.name,
+ func.__defaults__,
+ func.__closure__,
+ )
+ func()
+
+ self._func = glob[self.name]
+
+ def exec_in_sandbox(self, sandbox, *args, **kwargs):
+ """Executes the template function in the given sandbox."""
+ # Create a new function object associated with the execution sandbox
+ glob = {self._global_name: sandbox, "__builtins__": sandbox._builtins}
+ func = types.FunctionType(
+ self._func.__code__,
+ glob,
+ self.name,
+ self._func.__defaults__,
+ self._func.__closure__,
+ )
+ sandbox.exec_function(func, args, kwargs, self.path, becomes_current_path=False)
+
+ class RewriteName(ast.NodeTransformer):
+ """AST Node Transformer to rewrite variable accesses to go through
+ a dict.
+ """
+
+ def __init__(self, sandbox, global_name):
+ self._sandbox = sandbox
+ self._global_name = global_name
+
+ def visit_Str(self, node):
+ node.s = six.ensure_text(node.s)
+ return node
+
+ def visit_Name(self, node):
+ # Modify uppercase variable references and names known to the
+ # sandbox as if they were retrieved from a dict instead.
+ if not node.id.isupper() and node.id not in self._sandbox:
+ return node
+
+ def c(new_node):
+ return ast.copy_location(new_node, node)
+
+ return c(
+ ast.Subscript(
+ value=c(ast.Name(id=self._global_name, ctx=ast.Load())),
+ slice=c(ast.Index(value=c(ast.Str(s=node.id)))),
+ ctx=node.ctx,
+ )
+ )
+
+
+class SandboxValidationError(Exception):
+ """Represents an error encountered when validating sandbox results."""
+
+ def __init__(self, message, context):
+ Exception.__init__(self, message)
+ self.context = context
+
+ def __str__(self):
+ s = StringIO()
+
+ delim = "=" * 30
+ s.write("\n%s\nFATAL ERROR PROCESSING MOZBUILD FILE\n%s\n\n" % (delim, delim))
+
+ s.write("The error occurred while processing the following file or ")
+ s.write("one of the files it includes:\n")
+ s.write("\n")
+ s.write(" %s/moz.build\n" % self.context.srcdir)
+ s.write("\n")
+
+ s.write("The error occurred when validating the result of ")
+ s.write("the execution. The reported error is:\n")
+ s.write("\n")
+ s.write(
+ "".join(
+ " %s\n" % l
+ for l in super(SandboxValidationError, self).__str__().splitlines()
+ )
+ )
+ s.write("\n")
+
+ return s.getvalue()
+
+
+class BuildReaderError(Exception):
+ """Represents errors encountered during BuildReader execution.
+
+ The main purpose of this class is to facilitate user-actionable error
+ messages. Execution errors should say:
+
+ - Why they failed
+ - Where they failed
+ - What can be done to prevent the error
+
+ A lot of the code in this class should arguably be inside sandbox.py.
+ However, extraction is somewhat difficult given the additions
+ MozbuildSandbox has over Sandbox (e.g. the concept of included files -
+ which affect error messages, of course).
+ """
+
+ def __init__(
+ self,
+ file_stack,
+ trace,
+ sandbox_exec_error=None,
+ sandbox_load_error=None,
+ validation_error=None,
+ other_error=None,
+ sandbox_called_error=None,
+ ):
+
+ self.file_stack = file_stack
+ self.trace = trace
+ self.sandbox_called_error = sandbox_called_error
+ self.sandbox_exec = sandbox_exec_error
+ self.sandbox_load = sandbox_load_error
+ self.validation_error = validation_error
+ self.other = other_error
+
+ @property
+ def main_file(self):
+ return self.file_stack[-1]
+
+ @property
+ def actual_file(self):
+ # We report the file that called out to the file that couldn't load.
+ if self.sandbox_load is not None:
+ if len(self.sandbox_load.file_stack) > 1:
+ return self.sandbox_load.file_stack[-2]
+
+ if len(self.file_stack) > 1:
+ return self.file_stack[-2]
+
+ if self.sandbox_error is not None and len(self.sandbox_error.file_stack):
+ return self.sandbox_error.file_stack[-1]
+
+ return self.file_stack[-1]
+
+ @property
+ def sandbox_error(self):
+ return self.sandbox_exec or self.sandbox_load or self.sandbox_called_error
+
+ def __str__(self):
+ s = StringIO()
+
+ delim = "=" * 30
+ s.write("\n%s\nFATAL ERROR PROCESSING MOZBUILD FILE\n%s\n\n" % (delim, delim))
+
+ s.write("The error occurred while processing the following file:\n")
+ s.write("\n")
+ s.write(" %s\n" % self.actual_file)
+ s.write("\n")
+
+ if self.actual_file != self.main_file and not self.sandbox_load:
+ s.write("This file was included as part of processing:\n")
+ s.write("\n")
+ s.write(" %s\n" % self.main_file)
+ s.write("\n")
+
+ if self.sandbox_error is not None:
+ self._print_sandbox_error(s)
+ elif self.validation_error is not None:
+ s.write("The error occurred when validating the result of ")
+ s.write("the execution. The reported error is:\n")
+ s.write("\n")
+ s.write(
+ "".join(
+ " %s\n" % l
+ for l in six.text_type(self.validation_error).splitlines()
+ )
+ )
+ s.write("\n")
+ else:
+ s.write("The error appears to be part of the %s " % __name__)
+ s.write("Python module itself! It is possible you have stumbled ")
+ s.write("across a legitimate bug.\n")
+ s.write("\n")
+
+ for l in traceback.format_exception(
+ type(self.other), self.other, self.trace
+ ):
+ s.write(six.ensure_text(l))
+
+ return s.getvalue()
+
+ def _print_sandbox_error(self, s):
+ # Try to find the frame of the executed code.
+ script_frame = None
+
+ # We don't currently capture the trace for SandboxCalledError.
+ # Therefore, we don't get line numbers from the moz.build file.
+ # FUTURE capture this.
+ trace = getattr(self.sandbox_error, "trace", None)
+ frames = []
+ if trace:
+ frames = traceback.extract_tb(trace)
+ for frame in frames:
+ if frame[0] == self.actual_file:
+ script_frame = frame
+
+ # Reset if we enter a new execution context. This prevents errors
+ # in this module from being attributes to a script.
+ elif frame[0] == __file__ and frame[2] == "exec_function":
+ script_frame = None
+
+ if script_frame is not None:
+ s.write("The error was triggered on line %d " % script_frame[1])
+ s.write("of this file:\n")
+ s.write("\n")
+ s.write(" %s\n" % script_frame[3])
+ s.write("\n")
+
+ if self.sandbox_called_error is not None:
+ self._print_sandbox_called_error(s)
+ return
+
+ if self.sandbox_load is not None:
+ self._print_sandbox_load_error(s)
+ return
+
+ self._print_sandbox_exec_error(s)
+
+ def _print_sandbox_called_error(self, s):
+ assert self.sandbox_called_error is not None
+
+ s.write("A moz.build file called the error() function.\n")
+ s.write("\n")
+ s.write("The error it encountered is:\n")
+ s.write("\n")
+ s.write(" %s\n" % self.sandbox_called_error.message)
+ s.write("\n")
+ s.write("Correct the error condition and try again.\n")
+
+ def _print_sandbox_load_error(self, s):
+ assert self.sandbox_load is not None
+
+ if self.sandbox_load.illegal_path is not None:
+ s.write("The underlying problem is an illegal file access. ")
+ s.write("This is likely due to trying to access a file ")
+ s.write("outside of the top source directory.\n")
+ s.write("\n")
+ s.write("The path whose access was denied is:\n")
+ s.write("\n")
+ s.write(" %s\n" % self.sandbox_load.illegal_path)
+ s.write("\n")
+ s.write("Modify the script to not access this file and ")
+ s.write("try again.\n")
+ return
+
+ if self.sandbox_load.read_error is not None:
+ if not os.path.exists(self.sandbox_load.read_error):
+ s.write("The underlying problem is we referenced a path ")
+ s.write("that does not exist. That path is:\n")
+ s.write("\n")
+ s.write(" %s\n" % self.sandbox_load.read_error)
+ s.write("\n")
+ s.write("Either create the file if it needs to exist or ")
+ s.write("do not reference it.\n")
+ else:
+ s.write("The underlying problem is a referenced path could ")
+ s.write("not be read. The trouble path is:\n")
+ s.write("\n")
+ s.write(" %s\n" % self.sandbox_load.read_error)
+ s.write("\n")
+ s.write("It is possible the path is not correct. Is it ")
+ s.write("pointing to a directory? It could also be a file ")
+ s.write("permissions issue. Ensure that the file is ")
+ s.write("readable.\n")
+
+ return
+
+ # This module is buggy if you see this.
+ raise AssertionError("SandboxLoadError with unhandled properties!")
+
+ def _print_sandbox_exec_error(self, s):
+ assert self.sandbox_exec is not None
+
+ inner = self.sandbox_exec.exc_value
+
+ if isinstance(inner, SyntaxError):
+ s.write("The underlying problem is a Python syntax error ")
+ s.write("on line %d:\n" % inner.lineno)
+ s.write("\n")
+ s.write(" %s\n" % inner.text)
+ if inner.offset:
+ s.write((" " * (inner.offset + 4)) + "^\n")
+ s.write("\n")
+ s.write("Fix the syntax error and try again.\n")
+ return
+
+ if isinstance(inner, KeyError):
+ self._print_keyerror(inner, s)
+ elif isinstance(inner, ValueError):
+ self._print_valueerror(inner, s)
+ else:
+ self._print_exception(inner, s)
+
+ def _print_keyerror(self, inner, s):
+ if not inner.args or inner.args[0] not in ("global_ns", "local_ns"):
+ self._print_exception(inner, s)
+ return
+
+ if inner.args[0] == "global_ns":
+ import difflib
+
+ verb = None
+ if inner.args[1] == "get_unknown":
+ verb = "read"
+ elif inner.args[1] == "set_unknown":
+ verb = "write"
+ elif inner.args[1] == "reassign":
+ s.write("The underlying problem is an attempt to reassign ")
+ s.write("a reserved UPPERCASE variable.\n")
+ s.write("\n")
+ s.write("The reassigned variable causing the error is:\n")
+ s.write("\n")
+ s.write(" %s\n" % inner.args[2])
+ s.write("\n")
+ s.write('Maybe you meant "+=" instead of "="?\n')
+ return
+ else:
+ raise AssertionError("Unhandled global_ns: %s" % inner.args[1])
+
+ s.write("The underlying problem is an attempt to %s " % verb)
+ s.write("a reserved UPPERCASE variable that does not exist.\n")
+ s.write("\n")
+ s.write("The variable %s causing the error is:\n" % verb)
+ s.write("\n")
+ s.write(" %s\n" % inner.args[2])
+ s.write("\n")
+ close_matches = difflib.get_close_matches(
+ inner.args[2], VARIABLES.keys(), 2
+ )
+ if close_matches:
+ s.write("Maybe you meant %s?\n" % " or ".join(close_matches))
+ s.write("\n")
+
+ if inner.args[2] in DEPRECATION_HINTS:
+ s.write(
+ "%s\n" % textwrap.dedent(DEPRECATION_HINTS[inner.args[2]]).strip()
+ )
+ return
+
+ s.write("Please change the file to not use this variable.\n")
+ s.write("\n")
+ s.write("For reference, the set of valid variables is:\n")
+ s.write("\n")
+ s.write(", ".join(sorted(VARIABLES.keys())) + "\n")
+ return
+
+ s.write("The underlying problem is a reference to an undefined ")
+ s.write("local variable:\n")
+ s.write("\n")
+ s.write(" %s\n" % inner.args[2])
+ s.write("\n")
+ s.write("Please change the file to not reference undefined ")
+ s.write("variables and try again.\n")
+
+ def _print_valueerror(self, inner, s):
+ if not inner.args or inner.args[0] not in ("global_ns", "local_ns"):
+ self._print_exception(inner, s)
+ return
+
+ assert inner.args[1] == "set_type"
+
+ s.write("The underlying problem is an attempt to write an illegal ")
+ s.write("value to a special variable.\n")
+ s.write("\n")
+ s.write("The variable whose value was rejected is:\n")
+ s.write("\n")
+ s.write(" %s" % inner.args[2])
+ s.write("\n")
+ s.write("The value being written to it was of the following type:\n")
+ s.write("\n")
+ s.write(" %s\n" % type(inner.args[3]).__name__)
+ s.write("\n")
+ s.write("This variable expects the following type(s):\n")
+ s.write("\n")
+ if type(inner.args[4]) == type_type:
+ s.write(" %s\n" % inner.args[4].__name__)
+ else:
+ for t in inner.args[4]:
+ s.write(" %s\n" % t.__name__)
+ s.write("\n")
+ s.write("Change the file to write a value of the appropriate type ")
+ s.write("and try again.\n")
+
+ def _print_exception(self, e, s):
+ s.write("An error was encountered as part of executing the file ")
+ s.write("itself. The error appears to be the fault of the script.\n")
+ s.write("\n")
+ s.write("The error as reported by Python is:\n")
+ s.write("\n")
+ s.write(" %s\n" % traceback.format_exception_only(type(e), e))
+
+
+class BuildReader(object):
+ """Read a tree of mozbuild files into data structures.
+
+ This is where the build system starts. You give it a tree configuration
+ (the output of configuration) and it executes the moz.build files and
+ collects the data they define.
+
+ The reader can optionally call a callable after each sandbox is evaluated
+ but before its evaluated content is processed. This gives callers the
+ opportunity to modify contexts before side-effects occur from their
+ content. This callback receives the ``Context`` containing the result of
+ each sandbox evaluation. Its return value is ignored.
+ """
+
+ def __init__(self, config, finder=default_finder):
+ self.config = config
+
+ self._log = logging.getLogger(__name__)
+ self._read_files = set()
+ self._execution_stack = []
+ self.finder = finder
+
+ # Finder patterns to ignore when searching for moz.build files.
+ ignores = {
+ # Ignore fake moz.build files used for testing moz.build.
+ "python/mozbuild/mozbuild/test",
+ "testing/mozbase/moztest/tests/data",
+ # Ignore object directories.
+ "obj*",
+ }
+
+ self._relevant_mozbuild_finder = FileFinder(
+ self.config.topsrcdir, ignore=ignores
+ )
+
+ # Also ignore any other directories that could be objdirs, they don't
+ # necessarily start with the string 'obj'.
+ for path, f in self._relevant_mozbuild_finder.find("*/config.status"):
+ self._relevant_mozbuild_finder.ignore.add(os.path.dirname(path))
+
+ max_workers = cpu_count()
+ if sys.platform.startswith("win"):
+ # In python 3, on Windows, ProcessPoolExecutor uses
+ # _winapi.WaitForMultipleObjects, which doesn't work on large
+ # number of objects. It also has some automatic capping to avoid
+ # _winapi.WaitForMultipleObjects being unhappy as a consequence,
+ # but that capping is actually insufficient in python 3.7 and 3.8
+ # (as well as inexistent in older versions). So we cap ourselves
+ # to 60, see https://bugs.python.org/issue26903#msg365886.
+ max_workers = min(max_workers, 60)
+ self._gyp_worker_pool = ProcessPoolExecutor(max_workers=max_workers)
+ self._gyp_processors = []
+ self._execution_time = 0.0
+ self._file_count = 0
+ self._gyp_execution_time = 0.0
+ self._gyp_file_count = 0
+
+ def summary(self):
+ return ExecutionSummary(
+ "Finished reading {file_count:d} moz.build files in "
+ "{execution_time:.2f}s",
+ file_count=self._file_count,
+ execution_time=self._execution_time,
+ )
+
+ def gyp_summary(self):
+ return ExecutionSummary(
+ "Read {file_count:d} gyp files in parallel contributing "
+ "{execution_time:.2f}s to total wall time",
+ file_count=self._gyp_file_count,
+ execution_time=self._gyp_execution_time,
+ )
+
+ def read_topsrcdir(self):
+ """Read the tree of linked moz.build files.
+
+ This starts with the tree's top-most moz.build file and descends into
+ all linked moz.build files until all relevant files have been evaluated.
+
+ This is a generator of Context instances. As each moz.build file is
+ read, a new Context is created and emitted.
+ """
+ path = mozpath.join(self.config.topsrcdir, "moz.build")
+ for r in self.read_mozbuild(path, self.config):
+ yield r
+ all_gyp_paths = set()
+ for g in self._gyp_processors:
+ for gyp_context in g.results:
+ all_gyp_paths |= gyp_context.all_paths
+ yield gyp_context
+ self._gyp_execution_time += g.execution_time
+ self._gyp_file_count += len(all_gyp_paths)
+ self._gyp_worker_pool.shutdown()
+
+ def all_mozbuild_paths(self):
+ """Iterator over all available moz.build files.
+
+ This method has little to do with the reader. It should arguably belong
+ elsewhere.
+ """
+ # In the future, we may traverse moz.build files by looking
+ # for DIRS references in the AST, even if a directory is added behind
+ # a conditional. For now, just walk the filesystem.
+ for path, f in self._relevant_mozbuild_finder.find("**/moz.build"):
+ yield path
+
+ def find_variables_from_ast(self, variables, path=None):
+ """Finds all assignments to the specified variables by parsing
+ moz.build abstract syntax trees.
+
+ This function only supports two cases, as detailed below.
+
+ 1) A dict. Keys and values should both be strings, e.g:
+
+ VARIABLE['foo'] = 'bar'
+
+ This is an `Assign` node with a `Subscript` target. The `Subscript`'s
+ value is a `Name` node with id "VARIABLE". The slice of this target is
+ an `Index` node and its value is a `Str` with value "foo".
+
+ 2) A simple list. Values should be strings, e.g: The target of the
+ assignment should be a Name node. Values should be a List node,
+ whose elements are Str nodes. e.g:
+
+ VARIABLE += ['foo']
+
+ This is an `AugAssign` node with a `Name` target with id "VARIABLE".
+ The value is a `List` node containing one `Str` element whose value is
+ "foo".
+
+ With a little work, this function could support other types of
+ assignment. But if we end up writing a lot of AST code, it might be
+ best to import a high-level AST manipulation library into the tree.
+
+ Args:
+ variables (list): A list of variable assignments to capture.
+ path (str): A path relative to the source dir. If specified, only
+ `moz.build` files relevant to this path will be parsed. Otherwise
+ all `moz.build` files are parsed.
+
+ Returns:
+ A generator that generates tuples of the form `(<moz.build path>,
+ <variable name>, <key>, <value>)`. The `key` will only be
+ defined if the variable is an object, otherwise it is `None`.
+ """
+
+ if isinstance(variables, string_types):
+ variables = [variables]
+
+ def assigned_variable(node):
+ # This is not correct, but we don't care yet.
+ if hasattr(node, "targets"):
+ # Nothing in moz.build does multi-assignment (yet). So error if
+ # we see it.
+ assert len(node.targets) == 1
+
+ target = node.targets[0]
+ else:
+ target = node.target
+
+ if isinstance(target, ast.Subscript):
+ if not isinstance(target.value, ast.Name):
+ return None, None
+ name = target.value.id
+ elif isinstance(target, ast.Name):
+ name = target.id
+ else:
+ return None, None
+
+ if name not in variables:
+ return None, None
+
+ key = None
+ if isinstance(target, ast.Subscript):
+ # We need to branch to deal with python version differences.
+ if isinstance(target.slice, ast.Constant):
+ # Python >= 3.9
+ assert isinstance(target.slice.value, str)
+ key = target.slice.value
+ else:
+ # Others
+ assert isinstance(target.slice, ast.Index)
+ assert isinstance(target.slice.value, ast.Str)
+ key = target.slice.value.s
+
+ return name, key
+
+ def assigned_values(node):
+ value = node.value
+ if isinstance(value, ast.List):
+ for v in value.elts:
+ assert isinstance(v, ast.Str)
+ yield v.s
+ else:
+ assert isinstance(value, ast.Str)
+ yield value.s
+
+ assignments = []
+
+ class Visitor(ast.NodeVisitor):
+ def helper(self, node):
+ name, key = assigned_variable(node)
+ if not name:
+ return
+
+ for v in assigned_values(node):
+ assignments.append((name, key, v))
+
+ def visit_Assign(self, node):
+ self.helper(node)
+
+ def visit_AugAssign(self, node):
+ self.helper(node)
+
+ if path:
+ mozbuild_paths = chain(*self._find_relevant_mozbuilds([path]).values())
+ else:
+ mozbuild_paths = self.all_mozbuild_paths()
+
+ for p in mozbuild_paths:
+ assignments[:] = []
+ full = os.path.join(self.config.topsrcdir, p)
+
+ with open(full, "rb") as fh:
+ source = fh.read()
+
+ tree = ast.parse(source, full)
+ Visitor().visit(tree)
+
+ for name, key, value in assignments:
+ yield p, name, key, value
+
+ def read_mozbuild(self, path, config, descend=True, metadata={}):
+ """Read and process a mozbuild file, descending into children.
+
+ This starts with a single mozbuild file, executes it, and descends into
+ other referenced files per our traversal logic.
+
+ The traversal logic is to iterate over the ``*DIRS`` variables, treating
+ each element as a relative directory path. For each encountered
+ directory, we will open the moz.build file located in that
+ directory in a new Sandbox and process it.
+
+ If descend is True (the default), we will descend into child
+ directories and files per variable values.
+
+ Arbitrary metadata in the form of a dict can be passed into this
+ function. This feature is intended to facilitate the build reader
+ injecting state and annotations into moz.build files that is
+ independent of the sandbox's execution context.
+
+ Traversal is performed depth first (for no particular reason).
+ """
+ self._execution_stack.append(path)
+ try:
+ for s in self._read_mozbuild(
+ path, config, descend=descend, metadata=metadata
+ ):
+ yield s
+
+ except BuildReaderError as bre:
+ raise bre
+
+ except SandboxCalledError as sce:
+ raise BuildReaderError(
+ list(self._execution_stack), sys.exc_info()[2], sandbox_called_error=sce
+ )
+
+ except SandboxExecutionError as se:
+ raise BuildReaderError(
+ list(self._execution_stack), sys.exc_info()[2], sandbox_exec_error=se
+ )
+
+ except SandboxLoadError as sle:
+ raise BuildReaderError(
+ list(self._execution_stack), sys.exc_info()[2], sandbox_load_error=sle
+ )
+
+ except SandboxValidationError as ve:
+ raise BuildReaderError(
+ list(self._execution_stack), sys.exc_info()[2], validation_error=ve
+ )
+
+ except Exception as e:
+ raise BuildReaderError(
+ list(self._execution_stack), sys.exc_info()[2], other_error=e
+ )
+
+ def _read_mozbuild(self, path, config, descend, metadata):
+ path = mozpath.normpath(path)
+ log(
+ self._log,
+ logging.DEBUG,
+ "read_mozbuild",
+ {"path": path},
+ "Reading file: {path}".format(path=path),
+ )
+
+ if path in self._read_files:
+ log(
+ self._log,
+ logging.WARNING,
+ "read_already",
+ {"path": path},
+ "File already read. Skipping: {path}".format(path=path),
+ )
+ return
+
+ self._read_files.add(path)
+
+ time_start = time.monotonic()
+
+ topobjdir = config.topobjdir
+
+ relpath = mozpath.relpath(path, config.topsrcdir)
+ reldir = mozpath.dirname(relpath)
+
+ if mozpath.dirname(relpath) == "js/src" and not config.substs.get(
+ "JS_STANDALONE"
+ ):
+ config = ConfigEnvironment.from_config_status(
+ mozpath.join(topobjdir, reldir, "config.status")
+ )
+ config.topobjdir = topobjdir
+
+ context = Context(VARIABLES, config, self.finder)
+ sandbox = MozbuildSandbox(context, metadata=metadata, finder=self.finder)
+ sandbox.exec_file(path)
+ self._execution_time += time.monotonic() - time_start
+ self._file_count += len(context.all_paths)
+
+ # Yield main context before doing any processing. This gives immediate
+ # consumers an opportunity to change state before our remaining
+ # processing is performed.
+ yield context
+
+ # We need the list of directories pre-gyp processing for later.
+ dirs = list(context.get("DIRS", []))
+
+ curdir = mozpath.dirname(path)
+
+ for target_dir in context.get("GYP_DIRS", []):
+ gyp_dir = context["GYP_DIRS"][target_dir]
+ for v in ("input", "variables"):
+ if not getattr(gyp_dir, v):
+ raise SandboxValidationError(
+ "Missing value for " 'GYP_DIRS["%s"].%s' % (target_dir, v),
+ context,
+ )
+
+ # The make backend assumes contexts for sub-directories are
+ # emitted after their parent, so accumulate the gyp contexts.
+ # We could emit the parent context before processing gyp
+ # configuration, but we need to add the gyp objdirs to that context
+ # first.
+ from .gyp_reader import GypProcessor
+
+ non_unified_sources = set()
+ for s in gyp_dir.non_unified_sources:
+ source = SourcePath(context, s)
+ if not self.finder.get(source.full_path):
+ raise SandboxValidationError("Cannot find %s." % source, context)
+ non_unified_sources.add(source)
+ action_overrides = {}
+ for action, script in six.iteritems(gyp_dir.action_overrides):
+ action_overrides[action] = SourcePath(context, script)
+
+ gyp_processor = GypProcessor(
+ context.config,
+ gyp_dir,
+ mozpath.join(curdir, gyp_dir.input),
+ mozpath.join(context.objdir, target_dir),
+ self._gyp_worker_pool,
+ action_overrides,
+ non_unified_sources,
+ )
+ self._gyp_processors.append(gyp_processor)
+
+ for subcontext in sandbox.subcontexts:
+ yield subcontext
+
+ # Traverse into referenced files.
+
+ # It's very tempting to use a set here. Unfortunately, the recursive
+ # make backend needs order preserved. Once we autogenerate all backend
+ # files, we should be able to convert this to a set.
+ recurse_info = OrderedDict()
+ for d in dirs:
+ if d in recurse_info:
+ raise SandboxValidationError(
+ "Directory (%s) registered multiple times"
+ % (mozpath.relpath(d.full_path, context.srcdir)),
+ context,
+ )
+
+ recurse_info[d] = {}
+ for key in sandbox.metadata:
+ if key == "exports":
+ sandbox.recompute_exports()
+
+ recurse_info[d][key] = dict(sandbox.metadata[key])
+
+ for path, child_metadata in recurse_info.items():
+ child_path = path.join("moz.build").full_path
+
+ # Ensure we don't break out of the topsrcdir. We don't do realpath
+ # because it isn't necessary. If there are symlinks in the srcdir,
+ # that's not our problem. We're not a hosted application: we don't
+ # need to worry about security too much.
+ if not is_read_allowed(child_path, context.config):
+ raise SandboxValidationError(
+ "Attempting to process file outside of allowed paths: %s"
+ % child_path,
+ context,
+ )
+
+ if not descend:
+ continue
+
+ for res in self.read_mozbuild(
+ child_path, context.config, metadata=child_metadata
+ ):
+ yield res
+
+ self._execution_stack.pop()
+
+ def _find_relevant_mozbuilds(self, paths):
+ """Given a set of filesystem paths, find all relevant moz.build files.
+
+ We assume that a moz.build file in the directory ancestry of a given path
+ is relevant to that path. Let's say we have the following files on disk::
+
+ moz.build
+ foo/moz.build
+ foo/baz/moz.build
+ foo/baz/file1
+ other/moz.build
+ other/file2
+
+ If ``foo/baz/file1`` is passed in, the relevant moz.build files are
+ ``moz.build``, ``foo/moz.build``, and ``foo/baz/moz.build``. For
+ ``other/file2``, the relevant moz.build files are ``moz.build`` and
+ ``other/moz.build``.
+
+ Returns a dict of input paths to a list of relevant moz.build files.
+ The root moz.build file is first and the leaf-most moz.build is last.
+ """
+ root = self.config.topsrcdir
+ result = {}
+
+ @memoize
+ def exists(path):
+ return self._relevant_mozbuild_finder.get(path) is not None
+
+ def itermozbuild(path):
+ subpath = ""
+ yield "moz.build"
+ for part in mozpath.split(path):
+ subpath = mozpath.join(subpath, part)
+ yield mozpath.join(subpath, "moz.build")
+
+ for path in sorted(paths):
+ path = mozpath.normpath(path)
+ if os.path.isabs(path):
+ if not mozpath.basedir(path, [root]):
+ raise Exception("Path outside topsrcdir: %s" % path)
+ path = mozpath.relpath(path, root)
+
+ result[path] = [p for p in itermozbuild(path) if exists(p)]
+
+ return result
+
+ def read_relevant_mozbuilds(self, paths):
+ """Read and process moz.build files relevant for a set of paths.
+
+ For an iterable of relative-to-root filesystem paths ``paths``,
+ find all moz.build files that may apply to them based on filesystem
+ hierarchy and read those moz.build files.
+
+ The return value is a 2-tuple. The first item is a dict mapping each
+ input filesystem path to a list of Context instances that are relevant
+ to that path. The second item is a list of all Context instances. Each
+ Context instance is in both data structures.
+ """
+ relevants = self._find_relevant_mozbuilds(paths)
+
+ topsrcdir = self.config.topsrcdir
+
+ # Source moz.build file to directories to traverse.
+ dirs = defaultdict(set)
+ # Relevant path to absolute paths of relevant contexts.
+ path_mozbuilds = {}
+
+ # There is room to improve this code (and the code in
+ # _find_relevant_mozbuilds) to better handle multiple files in the same
+ # directory. Bug 1136966 tracks.
+ for path, mbpaths in relevants.items():
+ path_mozbuilds[path] = [mozpath.join(topsrcdir, p) for p in mbpaths]
+
+ for i, mbpath in enumerate(mbpaths[0:-1]):
+ source_dir = mozpath.dirname(mbpath)
+ target_dir = mozpath.dirname(mbpaths[i + 1])
+
+ d = mozpath.normpath(mozpath.join(topsrcdir, mbpath))
+ dirs[d].add(mozpath.relpath(target_dir, source_dir))
+
+ # Exporting doesn't work reliably in tree traversal mode. Override
+ # the function to no-op.
+ functions = dict(FUNCTIONS)
+
+ def export(sandbox):
+ return lambda varname: None
+
+ functions["export"] = tuple([export] + list(FUNCTIONS["export"][1:]))
+
+ metadata = {
+ "functions": functions,
+ }
+
+ contexts = defaultdict(list)
+ all_contexts = []
+ for context in self.read_mozbuild(
+ mozpath.join(topsrcdir, "moz.build"), self.config, metadata=metadata
+ ):
+ # Explicitly set directory traversal variables to override default
+ # traversal rules.
+ if not isinstance(context, SubContext):
+ for v in ("DIRS", "GYP_DIRS"):
+ context[v][:] = []
+
+ context["DIRS"] = sorted(dirs[context.main_path])
+
+ contexts[context.main_path].append(context)
+ all_contexts.append(context)
+
+ result = {}
+ for path, paths in path_mozbuilds.items():
+ result[path] = six.moves.reduce(
+ lambda x, y: x + y, (contexts[p] for p in paths), []
+ )
+
+ return result, all_contexts
+
+ def files_info(self, paths):
+ """Obtain aggregate data from Files for a set of files.
+
+ Given a set of input paths, determine which moz.build files may
+ define metadata for them, evaluate those moz.build files, and
+ apply file metadata rules defined within to determine metadata
+ values for each file requested.
+
+ Essentially, for each input path:
+
+ 1. Determine the set of moz.build files relevant to that file by
+ looking for moz.build files in ancestor directories.
+ 2. Evaluate moz.build files starting with the most distant.
+ 3. Iterate over Files sub-contexts.
+ 4. If the file pattern matches the file we're seeking info on,
+ apply attribute updates.
+ 5. Return the most recent value of attributes.
+ """
+ paths, _ = self.read_relevant_mozbuilds(paths)
+
+ r = {}
+
+ # Only do wildcard matching if the '*' character is present.
+ # Otherwise, mozpath.match will match directories, which we've
+ # arbitrarily chosen to not allow.
+ def path_matches_pattern(relpath, pattern):
+ if pattern == relpath:
+ return True
+
+ return "*" in pattern and mozpath.match(relpath, pattern)
+
+ for path, ctxs in paths.items():
+ # Should be normalized by read_relevant_mozbuilds.
+ assert "\\" not in path
+
+ flags = Files(Context())
+
+ for ctx in ctxs:
+ if not isinstance(ctx, Files):
+ continue
+
+ # read_relevant_mozbuilds() normalizes paths and ensures that
+ # the contexts have paths in the ancestry of the path. When
+ # iterating over tens of thousands of paths, mozpath.relpath()
+ # can be very expensive. So, given our assumptions about paths,
+ # we implement an optimized version.
+ ctx_rel_dir = ctx.relsrcdir
+ if ctx_rel_dir:
+ assert path.startswith(ctx_rel_dir)
+ relpath = path[len(ctx_rel_dir) + 1 :]
+ else:
+ relpath = path
+
+ if any(path_matches_pattern(relpath, p) for p in ctx.patterns):
+ flags += ctx
+
+ r[path] = flags
+
+ return r
diff --git a/python/mozbuild/mozbuild/frontend/sandbox.py b/python/mozbuild/mozbuild/frontend/sandbox.py
new file mode 100644
index 0000000000..088e817cb0
--- /dev/null
+++ b/python/mozbuild/mozbuild/frontend/sandbox.py
@@ -0,0 +1,313 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+r"""Python sandbox implementation for build files.
+
+This module contains classes for Python sandboxes that execute in a
+highly-controlled environment.
+
+The main class is `Sandbox`. This provides an execution environment for Python
+code and is used to fill a Context instance for the takeaway information from
+the execution.
+
+Code in this module takes a different approach to exception handling compared
+to what you'd see elsewhere in Python. Arguments to built-in exceptions like
+KeyError are machine parseable. This machine-friendly data is used to present
+user-friendly error messages in the case of errors.
+"""
+
+import os
+import sys
+import weakref
+
+import six
+from mozpack.files import FileFinder
+
+from mozbuild.util import ReadOnlyDict, exec_
+
+from .context import Context
+
+default_finder = FileFinder("/")
+
+
+def alphabetical_sorted(iterable, key=lambda x: x.lower(), reverse=False):
+ """sorted() replacement for the sandbox, ordering alphabetically by
+ default.
+ """
+ return sorted(iterable, key=key, reverse=reverse)
+
+
+class SandboxError(Exception):
+ def __init__(self, file_stack):
+ self.file_stack = file_stack
+
+
+class SandboxExecutionError(SandboxError):
+ """Represents errors encountered during execution of a Sandbox.
+
+ This is a simple container exception. It's purpose is to capture state
+ so something else can report on it.
+ """
+
+ def __init__(self, file_stack, exc_type, exc_value, trace):
+ SandboxError.__init__(self, file_stack)
+
+ self.exc_type = exc_type
+ self.exc_value = exc_value
+ self.trace = trace
+
+
+class SandboxLoadError(SandboxError):
+ """Represents errors encountered when loading a file for execution.
+
+ This exception represents errors in a Sandbox that occurred as part of
+ loading a file. The error could have occurred in the course of executing
+ a file. If so, the file_stack will be non-empty and the file that caused
+ the load will be on top of the stack.
+ """
+
+ def __init__(self, file_stack, trace, illegal_path=None, read_error=None):
+ SandboxError.__init__(self, file_stack)
+
+ self.trace = trace
+ self.illegal_path = illegal_path
+ self.read_error = read_error
+
+
+class Sandbox(dict):
+ """Represents a sandbox for executing Python code.
+
+ This class provides a sandbox for execution of a single mozbuild frontend
+ file. The results of that execution is stored in the Context instance given
+ as the ``context`` argument.
+
+ Sandbox is effectively a glorified wrapper around compile() + exec(). You
+ point it at some Python code and it executes it. The main difference from
+ executing Python code like normal is that the executed code is very limited
+ in what it can do: the sandbox only exposes a very limited set of Python
+ functionality. Only specific types and functions are available. This
+ prevents executed code from doing things like import modules, open files,
+ etc.
+
+ Sandbox instances act as global namespace for the sandboxed execution
+ itself. They shall not be used to access the results of the execution.
+ Those results are available in the given Context instance after execution.
+
+ The Sandbox itself is responsible for enforcing rules such as forbidding
+ reassignment of variables.
+
+ Implementation note: Sandbox derives from dict because exec() insists that
+ what it is given for namespaces is a dict.
+ """
+
+ # The default set of builtins.
+ BUILTINS = ReadOnlyDict(
+ {
+ # Only real Python built-ins should go here.
+ "None": None,
+ "False": False,
+ "True": True,
+ "sorted": alphabetical_sorted,
+ "int": int,
+ "set": set,
+ "tuple": tuple,
+ }
+ )
+
+ def __init__(self, context, finder=default_finder):
+ """Initialize a Sandbox ready for execution."""
+ self._builtins = self.BUILTINS
+ dict.__setitem__(self, "__builtins__", self._builtins)
+
+ assert isinstance(self._builtins, ReadOnlyDict)
+ assert isinstance(context, Context)
+
+ # Contexts are modeled as a stack because multiple context managers
+ # may be active.
+ self._active_contexts = [context]
+
+ # Seen sub-contexts. Will be populated with other Context instances
+ # that were related to execution of this instance.
+ self.subcontexts = []
+
+ # We need to record this because it gets swallowed as part of
+ # evaluation.
+ self._last_name_error = None
+
+ # Current literal source being executed.
+ self._current_source = None
+
+ self._finder = finder
+
+ @property
+ def _context(self):
+ return self._active_contexts[-1]
+
+ def exec_file(self, path):
+ """Execute code at a path in the sandbox.
+
+ The path must be absolute.
+ """
+ assert os.path.isabs(path)
+
+ try:
+ source = six.ensure_text(self._finder.get(path).read())
+ except Exception:
+ raise SandboxLoadError(
+ self._context.source_stack, sys.exc_info()[2], read_error=path
+ )
+
+ self.exec_source(source, path)
+
+ def exec_source(self, source, path=""):
+ """Execute Python code within a string.
+
+ The passed string should contain Python code to be executed. The string
+ will be compiled and executed.
+
+ You should almost always go through exec_file() because exec_source()
+ does not perform extra path normalization. This can cause relative
+ paths to behave weirdly.
+ """
+
+ def execute():
+ # compile() inherits the __future__ from the module by default. We
+ # do want Unicode literals.
+ code = compile(source, path, "exec")
+ # We use ourself as the global namespace for the execution. There
+ # is no need for a separate local namespace as moz.build execution
+ # is flat, namespace-wise.
+ old_source = self._current_source
+ self._current_source = source
+ try:
+ exec_(code, self)
+ finally:
+ self._current_source = old_source
+
+ self.exec_function(execute, path=path)
+
+ def exec_function(
+ self, func, args=(), kwargs={}, path="", becomes_current_path=True
+ ):
+ """Execute function with the given arguments in the sandbox."""
+ if path and becomes_current_path:
+ self._context.push_source(path)
+
+ old_sandbox = self._context._sandbox
+ self._context._sandbox = weakref.ref(self)
+
+ # We don't have to worry about bytecode generation here because we are
+ # too low-level for that. However, we could add bytecode generation via
+ # the marshall module if parsing performance were ever an issue.
+
+ old_source = self._current_source
+ self._current_source = None
+ try:
+ func(*args, **kwargs)
+ except SandboxError as e:
+ raise e
+ except NameError as e:
+ # A NameError is raised when a variable could not be found.
+ # The original KeyError has been dropped by the interpreter.
+ # However, we should have it cached in our instance!
+
+ # Unless a script is doing something wonky like catching NameError
+ # itself (that would be silly), if there is an exception on the
+ # global namespace, that's our error.
+ actual = e
+
+ if self._last_name_error is not None:
+ actual = self._last_name_error
+ source_stack = self._context.source_stack
+ if not becomes_current_path:
+ # Add current file to the stack because it wasn't added before
+ # sandbox execution.
+ source_stack.append(path)
+ raise SandboxExecutionError(
+ source_stack, type(actual), actual, sys.exc_info()[2]
+ )
+
+ except Exception:
+ # Need to copy the stack otherwise we get a reference and that is
+ # mutated during the finally.
+ exc = sys.exc_info()
+ source_stack = self._context.source_stack
+ if not becomes_current_path:
+ # Add current file to the stack because it wasn't added before
+ # sandbox execution.
+ source_stack.append(path)
+ raise SandboxExecutionError(source_stack, exc[0], exc[1], exc[2])
+ finally:
+ self._current_source = old_source
+ self._context._sandbox = old_sandbox
+ if path and becomes_current_path:
+ self._context.pop_source()
+
+ def push_subcontext(self, context):
+ """Push a SubContext onto the execution stack.
+
+ When called, the active context will be set to the specified context,
+ meaning all variable accesses will go through it. We also record this
+ SubContext as having been executed as part of this sandbox.
+ """
+ self._active_contexts.append(context)
+ if context not in self.subcontexts:
+ self.subcontexts.append(context)
+
+ def pop_subcontext(self, context):
+ """Pop a SubContext off the execution stack.
+
+ SubContexts must be pushed and popped in opposite order. This is
+ validated as part of the function call to ensure proper consumer API
+ use.
+ """
+ popped = self._active_contexts.pop()
+ assert popped == context
+
+ def __getitem__(self, key):
+ if key.isupper():
+ try:
+ return self._context[key]
+ except Exception as e:
+ self._last_name_error = e
+ raise
+
+ return dict.__getitem__(self, key)
+
+ def __setitem__(self, key, value):
+ if key in self._builtins or key == "__builtins__":
+ raise KeyError("Cannot reassign builtins")
+
+ if key.isupper():
+ # Forbid assigning over a previously set value. Interestingly, when
+ # doing FOO += ['bar'], python actually does something like:
+ # foo = namespace.__getitem__('FOO')
+ # foo.__iadd__(['bar'])
+ # namespace.__setitem__('FOO', foo)
+ # This means __setitem__ is called with the value that is already
+ # in the dict, when doing +=, which is permitted.
+ if key in self._context and self._context[key] is not value:
+ raise KeyError("global_ns", "reassign", key)
+
+ if (
+ key not in self._context
+ and isinstance(value, (list, dict))
+ and not value
+ ):
+ raise KeyError("Variable %s assigned an empty value." % key)
+
+ self._context[key] = value
+ else:
+ dict.__setitem__(self, key, value)
+
+ def get(self, key, default=None):
+ raise NotImplementedError("Not supported")
+
+ def __iter__(self):
+ raise NotImplementedError("Not supported")
+
+ def __contains__(self, key):
+ if key.isupper():
+ return key in self._context
+ return dict.__contains__(self, key)
diff --git a/python/mozbuild/mozbuild/gen_test_backend.py b/python/mozbuild/mozbuild/gen_test_backend.py
new file mode 100644
index 0000000000..ce499fe90a
--- /dev/null
+++ b/python/mozbuild/mozbuild/gen_test_backend.py
@@ -0,0 +1,53 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import sys
+
+import mozpack.path as mozpath
+
+from mozbuild.backend.test_manifest import TestManifestBackend
+from mozbuild.base import BuildEnvironmentNotFoundException, MozbuildObject
+from mozbuild.frontend.emitter import TreeMetadataEmitter
+from mozbuild.frontend.reader import BuildReader, EmptyConfig
+
+
+def gen_test_backend():
+ build_obj = MozbuildObject.from_environment()
+ try:
+ config = build_obj.config_environment
+ except BuildEnvironmentNotFoundException:
+ # Create a stub config.status file, since the TestManifest backend needs
+ # to be re-created if configure runs. If the file doesn't exist,
+ # mozbuild continually thinks the TestManifest backend is out of date
+ # and tries to regenerate it.
+
+ if not os.path.isdir(build_obj.topobjdir):
+ os.makedirs(build_obj.topobjdir)
+
+ config_status = mozpath.join(build_obj.topobjdir, "config.status")
+ open(config_status, "w").close()
+
+ print("No build detected, test metadata may be incomplete.")
+
+ # If 'JS_STANDALONE' is set, tests that don't require an objdir won't
+ # be picked up due to bug 1345209.
+ substs = EmptyConfig.default_substs
+ if "JS_STANDALONE" in substs:
+ del substs["JS_STANDALONE"]
+
+ config = EmptyConfig(build_obj.topsrcdir, substs)
+ config.topobjdir = build_obj.topobjdir
+
+ reader = BuildReader(config)
+ emitter = TreeMetadataEmitter(config)
+ backend = TestManifestBackend(config)
+
+ context = reader.read_topsrcdir()
+ data = emitter.emit(context, emitfn=emitter._process_test_manifests)
+ backend.consume(data)
+
+
+if __name__ == "__main__":
+ sys.exit(gen_test_backend())
diff --git a/python/mozbuild/mozbuild/generated_sources.py b/python/mozbuild/mozbuild/generated_sources.py
new file mode 100644
index 0000000000..e22e71e5f6
--- /dev/null
+++ b/python/mozbuild/mozbuild/generated_sources.py
@@ -0,0 +1,75 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import hashlib
+import json
+import os
+
+import mozpack.path as mozpath
+from mozpack.files import FileFinder
+
+GENERATED_SOURCE_EXTS = (".rs", ".c", ".h", ".cc", ".cpp")
+
+
+def sha512_digest(data):
+ """
+ Generate the SHA-512 digest of `data` and return it as a hex string.
+ """
+ return hashlib.sha512(data).hexdigest()
+
+
+def get_filename_with_digest(name, contents):
+ """
+ Return the filename that will be used to store the generated file
+ in the S3 bucket, consisting of the SHA-512 digest of `contents`
+ joined with the relative path `name`.
+ """
+ digest = sha512_digest(contents)
+ return mozpath.join(digest, name)
+
+
+def get_generated_sources():
+ """
+ Yield tuples of `(objdir-rel-path, file)` for generated source files
+ in this objdir, where `file` is either an absolute path to the file or
+ a `mozpack.File` instance.
+ """
+ import buildconfig
+
+ # First, get the list of generated sources produced by the build backend.
+ gen_sources = os.path.join(buildconfig.topobjdir, "generated-sources.json")
+ with open(gen_sources, "r") as f:
+ data = json.load(f)
+ for f in data["sources"]:
+ # Exclute symverscript
+ if mozpath.basename(f) != "symverscript":
+ yield f, mozpath.join(buildconfig.topobjdir, f)
+ # Next, return all the files in $objdir/ipc/ipdl/_ipdlheaders.
+ base = "ipc/ipdl/_ipdlheaders"
+ finder = FileFinder(mozpath.join(buildconfig.topobjdir, base))
+ for p, f in finder.find("**/*.h"):
+ yield mozpath.join(base, p), f
+ # Next, return any source files that were generated into the Rust
+ # object directory.
+ rust_build_kind = "debug" if buildconfig.substs.get("MOZ_DEBUG_RUST") else "release"
+ base = mozpath.join(buildconfig.substs["RUST_TARGET"], rust_build_kind, "build")
+ finder = FileFinder(mozpath.join(buildconfig.topobjdir, base))
+ for p, f in finder:
+ if p.endswith(GENERATED_SOURCE_EXTS):
+ yield mozpath.join(base, p), f
+
+
+def get_s3_region_and_bucket():
+ """
+ Return a tuple of (region, bucket) giving the AWS region and S3
+ bucket to which generated sources should be uploaded.
+ """
+ region = "us-west-2"
+ level = os.environ.get("MOZ_SCM_LEVEL", "1")
+ bucket = {
+ "1": "gecko-generated-sources-l1",
+ "2": "gecko-generated-sources-l2",
+ "3": "gecko-generated-sources",
+ }[level]
+ return (region, bucket)
diff --git a/python/mozbuild/mozbuild/gn_processor.py b/python/mozbuild/mozbuild/gn_processor.py
new file mode 100644
index 0000000000..b6c51ee010
--- /dev/null
+++ b/python/mozbuild/mozbuild/gn_processor.py
@@ -0,0 +1,788 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import argparse
+import json
+import os
+import subprocess
+import sys
+import tempfile
+from collections import defaultdict, deque
+from copy import deepcopy
+from pathlib import Path
+from shutil import which
+
+import mozpack.path as mozpath
+import six
+
+from mozbuild.bootstrap import bootstrap_toolchain
+from mozbuild.frontend.sandbox import alphabetical_sorted
+from mozbuild.util import mkdir
+
+license_header = """# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+
+generated_header = """
+ ### This moz.build was AUTOMATICALLY GENERATED from a GN config, ###
+ ### DO NOT edit it by hand. ###
+"""
+
+
+class MozbuildWriter(object):
+ def __init__(self, fh):
+ self._fh = fh
+ self.indent = ""
+ self._indent_increment = 4
+
+ # We need to correlate a small amount of state here to figure out
+ # which library template to use ("Library()" or "SharedLibrary()")
+ self._library_name = None
+ self._shared_library = None
+
+ def mb_serialize(self, v):
+ if isinstance(v, list):
+ if len(v) <= 1:
+ return repr(v)
+ # Pretty print a list
+ raw = json.dumps(v, indent=self._indent_increment)
+ # Add the indent of the current indentation level
+ return raw.replace("\n", "\n" + self.indent)
+ if isinstance(v, bool):
+ return repr(v)
+ return '"%s"' % v
+
+ def finalize(self):
+ if self._library_name:
+ self.write("\n")
+ if self._shared_library:
+ self.write_ln(
+ "SharedLibrary(%s)" % self.mb_serialize(self._library_name)
+ )
+ else:
+ self.write_ln("Library(%s)" % self.mb_serialize(self._library_name))
+
+ def write(self, content):
+ self._fh.write(content)
+
+ def write_ln(self, line):
+ self.write(self.indent)
+ self.write(line)
+ self.write("\n")
+
+ def write_attrs(self, context_attrs):
+ for k in sorted(context_attrs.keys()):
+ v = context_attrs[k]
+ if isinstance(v, (list, set)):
+ self.write_mozbuild_list(k, v)
+ elif isinstance(v, dict):
+ self.write_mozbuild_dict(k, v)
+ else:
+ self.write_mozbuild_value(k, v)
+
+ def write_mozbuild_list(self, key, value):
+ if value:
+ self.write("\n")
+ self.write(self.indent + key)
+ self.write(" += [\n " + self.indent)
+ self.write(
+ (",\n " + self.indent).join(
+ alphabetical_sorted(self.mb_serialize(v) for v in value)
+ )
+ )
+ self.write("\n")
+ self.write_ln("]")
+
+ def write_mozbuild_value(self, key, value):
+ if value:
+ if key == "LIBRARY_NAME":
+ self._library_name = value
+ elif key == "FORCE_SHARED_LIB":
+ self._shared_library = True
+ else:
+ self.write("\n")
+ self.write_ln("%s = %s" % (key, self.mb_serialize(value)))
+ self.write("\n")
+
+ def write_mozbuild_dict(self, key, value):
+ # Templates we need to use instead of certain values.
+ replacements = (
+ (
+ ("COMPILE_FLAGS", '"WARNINGS_AS_ERRORS"', "[]"),
+ "AllowCompilerWarnings()",
+ ),
+ )
+ if value:
+ self.write("\n")
+ if key == "GeneratedFile":
+ self.write_ln("GeneratedFile(")
+ self.indent += " " * self._indent_increment
+ for o in value["outputs"]:
+ self.write_ln("%s," % (self.mb_serialize(o)))
+ for k, v in sorted(value.items()):
+ if k == "outputs":
+ continue
+ self.write_ln("%s=%s," % (k, self.mb_serialize(v)))
+ self.indent = self.indent[self._indent_increment :]
+ self.write_ln(")")
+ return
+ for k in sorted(value.keys()):
+ v = value[k]
+ subst_vals = key, self.mb_serialize(k), self.mb_serialize(v)
+ wrote_ln = False
+ for flags, tmpl in replacements:
+ if subst_vals == flags:
+ self.write_ln(tmpl)
+ wrote_ln = True
+
+ if not wrote_ln:
+ self.write_ln("%s[%s] = %s" % subst_vals)
+
+ def write_condition(self, values):
+ def mk_condition(k, v):
+ if not v:
+ return 'not CONFIG["%s"]' % k
+ return 'CONFIG["%s"] == %s' % (k, self.mb_serialize(v))
+
+ self.write("\n")
+ self.write("if ")
+ self.write(
+ " and ".join(mk_condition(k, values[k]) for k in sorted(values.keys()))
+ )
+ self.write(":\n")
+ self.indent += " " * self._indent_increment
+
+ def terminate_condition(self):
+ assert len(self.indent) >= self._indent_increment
+ self.indent = self.indent[self._indent_increment :]
+
+
+def find_deps(all_targets, target):
+ all_deps = set()
+ queue = deque([target])
+ while queue:
+ item = queue.popleft()
+ all_deps.add(item)
+ for dep in all_targets[item]["deps"]:
+ if dep not in all_deps:
+ queue.append(dep)
+ return all_deps
+
+
+def filter_gn_config(path, gn_result, sandbox_vars, input_vars, gn_target):
+ gen_path = path / "gen"
+ # Translates the raw output of gn into just what we'll need to generate a
+ # mozbuild configuration.
+ gn_out = {"targets": {}, "sandbox_vars": sandbox_vars}
+
+ cpus = {
+ "arm64": "aarch64",
+ "x64": "x86_64",
+ "mipsel": "mips32",
+ "mips64el": "mips64",
+ }
+ oses = {
+ "android": "Android",
+ "linux": "Linux",
+ "mac": "Darwin",
+ "openbsd": "OpenBSD",
+ "win": "WINNT",
+ }
+
+ mozbuild_args = {
+ "MOZ_DEBUG": "1" if input_vars.get("is_debug") else None,
+ "OS_TARGET": oses[input_vars["target_os"]],
+ "CPU_ARCH": cpus.get(input_vars["target_cpu"], input_vars["target_cpu"]),
+ }
+ if "use_x11" in input_vars:
+ mozbuild_args["MOZ_X11"] = "1" if input_vars["use_x11"] else None
+
+ gn_out["mozbuild_args"] = mozbuild_args
+ all_deps = find_deps(gn_result["targets"], gn_target)
+
+ for target_fullname in all_deps:
+ raw_spec = gn_result["targets"][target_fullname]
+
+ if raw_spec["type"] == "action":
+ # Special handling for the action type to avoid putting empty
+ # arrays of args, script and outputs on all other types in `spec`.
+ spec = {}
+ for spec_attr in (
+ "type",
+ "args",
+ "script",
+ "outputs",
+ ):
+ spec[spec_attr] = raw_spec.get(spec_attr, [])
+ if spec_attr == "outputs":
+ # Rebase outputs from an absolute path in the temp dir to a
+ # path relative to the target dir.
+ spec[spec_attr] = [
+ mozpath.relpath(d, path) for d in spec[spec_attr]
+ ]
+ gn_out["targets"][target_fullname] = spec
+
+ # TODO: 'executable' will need to be handled here at some point as well.
+ if raw_spec["type"] not in ("static_library", "shared_library", "source_set"):
+ continue
+
+ spec = {}
+ for spec_attr in (
+ "type",
+ "sources",
+ "defines",
+ "include_dirs",
+ "cflags",
+ "cflags_c",
+ "cflags_cc",
+ "cflags_objc",
+ "cflags_objcc",
+ "deps",
+ "libs",
+ ):
+ spec[spec_attr] = raw_spec.get(spec_attr, [])
+ if spec_attr == "defines":
+ spec[spec_attr] = [
+ d
+ for d in spec[spec_attr]
+ if "CR_XCODE_VERSION" not in d
+ and "CR_SYSROOT_HASH" not in d
+ and "_FORTIFY_SOURCE" not in d
+ ]
+ if spec_attr == "include_dirs":
+ # Rebase outputs from an absolute path in the temp dir to a path
+ # relative to the target dir.
+ spec[spec_attr] = [
+ d if gen_path != Path(d) else "!//gen" for d in spec[spec_attr]
+ ]
+
+ gn_out["targets"][target_fullname] = spec
+
+ return gn_out
+
+
+def process_gn_config(
+ gn_config, topsrcdir, srcdir, non_unified_sources, sandbox_vars, mozilla_flags
+):
+ # Translates a json gn config into attributes that can be used to write out
+ # moz.build files for this configuration.
+
+ # Much of this code is based on similar functionality in `gyp_reader.py`.
+
+ mozbuild_attrs = {"mozbuild_args": gn_config.get("mozbuild_args", None), "dirs": {}}
+
+ targets = gn_config["targets"]
+
+ project_relsrcdir = mozpath.relpath(srcdir, topsrcdir)
+
+ non_unified_sources = set([mozpath.normpath(s) for s in non_unified_sources])
+
+ def target_info(fullname):
+ path, name = target_fullname.split(":")
+ # Stripping '//' gives us a path relative to the project root,
+ # adding a suffix avoids name collisions with libraries already
+ # in the tree (like "webrtc").
+ return path.lstrip("//"), name + "_gn"
+
+ def resolve_path(path):
+ # GN will have resolved all these paths relative to the root of the
+ # project indicated by "//".
+ if path.startswith("//"):
+ path = path[2:]
+ if not path.startswith("/"):
+ path = "/%s/%s" % (project_relsrcdir, path)
+ return path
+
+ # Process all targets from the given gn project and its dependencies.
+ for target_fullname, spec in six.iteritems(targets):
+
+ target_path, target_name = target_info(target_fullname)
+ context_attrs = {}
+
+ # Remove leading 'lib' from the target_name if any, and use as
+ # library name.
+ name = target_name
+ if spec["type"] in ("static_library", "shared_library", "source_set", "action"):
+ if name.startswith("lib"):
+ name = name[3:]
+ context_attrs["LIBRARY_NAME"] = six.ensure_text(name)
+ else:
+ raise Exception(
+ "The following GN target type is not currently "
+ 'consumed by moz.build: "%s". It may need to be '
+ "added, or you may need to re-run the "
+ "`GnConfigGen` step." % spec["type"]
+ )
+
+ if spec["type"] == "shared_library":
+ context_attrs["FORCE_SHARED_LIB"] = True
+
+ if spec["type"] == "action" and "script" in spec:
+ flags = [
+ resolve_path(spec["script"]),
+ resolve_path(""),
+ ] + spec.get("args", [])
+ context_attrs["GeneratedFile"] = {
+ "script": "/python/mozbuild/mozbuild/action/file_generate_wrapper.py",
+ "entry_point": "action",
+ "outputs": [resolve_path(f) for f in spec["outputs"]],
+ "flags": flags,
+ }
+
+ sources = []
+ unified_sources = []
+ extensions = set()
+ use_defines_in_asflags = False
+
+ for f in spec.get("sources", []):
+ f = f.lstrip("//")
+ ext = mozpath.splitext(f)[-1]
+ extensions.add(ext)
+ src = "%s/%s" % (project_relsrcdir, f)
+ if ext == ".h" or ext == ".inc":
+ continue
+ elif ext == ".def":
+ context_attrs["SYMBOLS_FILE"] = src
+ elif ext != ".S" and src not in non_unified_sources:
+ unified_sources.append("/%s" % src)
+ else:
+ sources.append("/%s" % src)
+ # The Mozilla build system doesn't use DEFINES for building
+ # ASFILES.
+ if ext == ".s":
+ use_defines_in_asflags = True
+
+ context_attrs["SOURCES"] = sources
+ context_attrs["UNIFIED_SOURCES"] = unified_sources
+
+ context_attrs["DEFINES"] = {}
+ for define in spec.get("defines", []):
+ if "=" in define:
+ name, value = define.split("=", 1)
+ context_attrs["DEFINES"][name] = value
+ else:
+ context_attrs["DEFINES"][define] = True
+
+ context_attrs["LOCAL_INCLUDES"] = []
+ for include in spec.get("include_dirs", []):
+ if include.startswith("!"):
+ include = "!" + resolve_path(include[1:])
+ else:
+ include = resolve_path(include)
+ # moz.build expects all LOCAL_INCLUDES to exist, so ensure they do.
+ resolved = mozpath.abspath(mozpath.join(topsrcdir, include[1:]))
+ if not os.path.exists(resolved):
+ # GN files may refer to include dirs that are outside of the
+ # tree or we simply didn't vendor. Print a warning in this case.
+ if not resolved.endswith("gn-output/gen"):
+ print(
+ "Included path: '%s' does not exist, dropping include from GN "
+ "configuration." % resolved,
+ file=sys.stderr,
+ )
+ continue
+ context_attrs["LOCAL_INCLUDES"] += [include]
+
+ context_attrs["ASFLAGS"] = spec.get("asflags_mozilla", [])
+ if use_defines_in_asflags and context_attrs["DEFINES"]:
+ context_attrs["ASFLAGS"] += ["-D" + d for d in context_attrs["DEFINES"]]
+ suffix_map = {
+ ".c": ("CFLAGS", ["cflags", "cflags_c"]),
+ ".cpp": ("CXXFLAGS", ["cflags", "cflags_cc"]),
+ ".cc": ("CXXFLAGS", ["cflags", "cflags_cc"]),
+ ".m": ("CMFLAGS", ["cflags", "cflags_objc"]),
+ ".mm": ("CMMFLAGS", ["cflags", "cflags_objcc"]),
+ }
+ variables = (suffix_map[e] for e in extensions if e in suffix_map)
+ for (var, flag_keys) in variables:
+ flags = [
+ _f for _k in flag_keys for _f in spec.get(_k, []) if _f in mozilla_flags
+ ]
+ for f in flags:
+ # the result may be a string or a list.
+ if isinstance(f, six.string_types):
+ context_attrs.setdefault(var, []).append(f)
+ else:
+ context_attrs.setdefault(var, []).extend(f)
+
+ context_attrs["OS_LIBS"] = []
+ for lib in spec.get("libs", []):
+ lib_name = os.path.splitext(lib)[0]
+ if lib.endswith(".framework"):
+ context_attrs["OS_LIBS"] += ["-framework " + lib_name]
+ else:
+ context_attrs["OS_LIBS"] += [lib_name]
+
+ # Add some features to all contexts. Put here in case LOCAL_INCLUDES
+ # order matters.
+ context_attrs["LOCAL_INCLUDES"] += [
+ "!/ipc/ipdl/_ipdlheaders",
+ "/ipc/chromium/src",
+ "/tools/profiler/public",
+ ]
+ # These get set via VC project file settings for normal GYP builds.
+ # TODO: Determine if these defines are needed for GN builds.
+ if gn_config["mozbuild_args"]["OS_TARGET"] == "WINNT":
+ context_attrs["DEFINES"]["UNICODE"] = True
+ context_attrs["DEFINES"]["_UNICODE"] = True
+
+ context_attrs["COMPILE_FLAGS"] = {"OS_INCLUDES": []}
+
+ for key, value in sandbox_vars.items():
+ if context_attrs.get(key) and isinstance(context_attrs[key], list):
+ # If we have a key from sandbox_vars that's also been
+ # populated here we use the value from sandbox_vars as our
+ # basis rather than overriding outright.
+ context_attrs[key] = value + context_attrs[key]
+ elif context_attrs.get(key) and isinstance(context_attrs[key], dict):
+ context_attrs[key].update(value)
+ else:
+ context_attrs[key] = value
+
+ target_relsrcdir = mozpath.join(project_relsrcdir, target_path, target_name)
+ mozbuild_attrs["dirs"][target_relsrcdir] = context_attrs
+
+ return mozbuild_attrs
+
+
+def find_common_attrs(config_attributes):
+ # Returns the intersection of the given configs and prunes the inputs
+ # to no longer contain these common attributes.
+
+ common_attrs = deepcopy(config_attributes[0])
+
+ def make_intersection(reference, input_attrs):
+ # Modifies `reference` so that after calling this function it only
+ # contains parts it had in common with in `input_attrs`.
+
+ for k, input_value in input_attrs.items():
+ # Anything in `input_attrs` must match what's already in
+ # `reference`.
+ common_value = reference.get(k)
+ if common_value:
+ if isinstance(input_value, list):
+ reference[k] = [
+ i
+ for i in common_value
+ if input_value.count(i) == common_value.count(i)
+ ]
+ elif isinstance(input_value, dict):
+ reference[k] = {
+ key: value
+ for key, value in common_value.items()
+ if key in input_value and value == input_value[key]
+ }
+ elif input_value != common_value:
+ del reference[k]
+ elif k in reference:
+ del reference[k]
+
+ # Additionally, any keys in `reference` that aren't in `input_attrs`
+ # must be deleted.
+ for k in set(reference.keys()) - set(input_attrs.keys()):
+ del reference[k]
+
+ def make_difference(reference, input_attrs):
+ # Modifies `input_attrs` so that after calling this function it contains
+ # no parts it has in common with in `reference`.
+ for k, input_value in list(six.iteritems(input_attrs)):
+ common_value = reference.get(k)
+ if common_value:
+ if isinstance(input_value, list):
+ input_attrs[k] = [
+ i
+ for i in input_value
+ if common_value.count(i) != input_value.count(i)
+ ]
+ elif isinstance(input_value, dict):
+ input_attrs[k] = {
+ key: value
+ for key, value in input_value.items()
+ if key not in common_value
+ }
+ else:
+ del input_attrs[k]
+
+ for config_attr_set in config_attributes[1:]:
+ make_intersection(common_attrs, config_attr_set)
+
+ for config_attr_set in config_attributes:
+ make_difference(common_attrs, config_attr_set)
+
+ return common_attrs
+
+
+def write_mozbuild(
+ topsrcdir,
+ srcdir,
+ non_unified_sources,
+ gn_configs,
+ mozilla_flags,
+ write_mozbuild_variables,
+):
+
+ all_mozbuild_results = []
+
+ for gn_config in gn_configs:
+ mozbuild_attrs = process_gn_config(
+ gn_config,
+ topsrcdir,
+ srcdir,
+ non_unified_sources,
+ gn_config["sandbox_vars"],
+ mozilla_flags,
+ )
+ all_mozbuild_results.append(mozbuild_attrs)
+
+ # Translate {config -> {dirs -> build info}} into
+ # {dirs -> [(config, build_info)]}
+ configs_by_dir = defaultdict(list)
+ for config_attrs in all_mozbuild_results:
+ mozbuild_args = config_attrs["mozbuild_args"]
+ dirs = config_attrs["dirs"]
+ for d, build_data in dirs.items():
+ configs_by_dir[d].append((mozbuild_args, build_data))
+
+ mozbuilds = set()
+ for relsrcdir, configs in sorted(configs_by_dir.items()):
+ target_srcdir = mozpath.join(topsrcdir, relsrcdir)
+ mkdir(target_srcdir)
+
+ target_mozbuild = mozpath.join(target_srcdir, "moz.build")
+ mozbuilds.add(target_mozbuild)
+ with open(target_mozbuild, "w") as fh:
+ mb = MozbuildWriter(fh)
+ mb.write(license_header)
+ mb.write("\n")
+ mb.write(generated_header)
+
+ try:
+ if relsrcdir in write_mozbuild_variables["INCLUDE_TK_CFLAGS_DIRS"]:
+ mb.write('if CONFIG["MOZ_WIDGET_TOOLKIT"] == "gtk":\n')
+ mb.write(' CXXFLAGS += CONFIG["MOZ_GTK3_CFLAGS"]\n')
+ except KeyError:
+ pass
+
+ all_args = [args for args, _ in configs]
+
+ # Start with attributes that will be a part of the mozconfig
+ # for every configuration, then factor by other potentially useful
+ # combinations.
+ # FIXME: this is a time-bomb. See bug 1775202.
+ for attrs in (
+ (),
+ ("MOZ_DEBUG",),
+ ("OS_TARGET",),
+ ("CPU_ARCH",),
+ ("MOZ_DEBUG", "OS_TARGET"),
+ ("OS_TARGET", "MOZ_X11"),
+ ("OS_TARGET", "CPU_ARCH"),
+ ("OS_TARGET", "CPU_ARCH", "MOZ_X11"),
+ ("OS_TARGET", "CPU_ARCH", "MOZ_DEBUG"),
+ ("OS_TARGET", "CPU_ARCH", "MOZ_DEBUG", "MOZ_X11"),
+ ):
+ conditions = set()
+ for args in all_args:
+ cond = tuple(((k, args.get(k) or "") for k in attrs))
+ conditions.add(cond)
+
+ for cond in sorted(conditions):
+ common_attrs = find_common_attrs(
+ [
+ attrs
+ for args, attrs in configs
+ if all((args.get(k) or "") == v for k, v in cond)
+ ]
+ )
+ if any(common_attrs.values()):
+ if cond:
+ mb.write_condition(dict(cond))
+ mb.write_attrs(common_attrs)
+ if cond:
+ mb.terminate_condition()
+
+ mb.finalize()
+
+ dirs_mozbuild = mozpath.join(srcdir, "moz.build")
+ mozbuilds.add(dirs_mozbuild)
+ with open(dirs_mozbuild, "w") as fh:
+ mb = MozbuildWriter(fh)
+ mb.write(license_header)
+ mb.write("\n")
+ mb.write(generated_header)
+
+ # Not every srcdir is present for every config, which needs to be
+ # reflected in the generated root moz.build.
+ dirs_by_config = {
+ tuple(v["mozbuild_args"].items()): set(v["dirs"].keys())
+ for v in all_mozbuild_results
+ }
+
+ for attrs in (
+ (),
+ ("OS_TARGET",),
+ ("OS_TARGET", "CPU_ARCH"),
+ ("OS_TARGET", "CPU_ARCH", "MOZ_X11"),
+ ):
+
+ conditions = set()
+ for args in dirs_by_config.keys():
+ cond = tuple(((k, dict(args).get(k) or "") for k in attrs))
+ conditions.add(cond)
+
+ for cond in sorted(conditions):
+ common_dirs = None
+ for args, dir_set in dirs_by_config.items():
+ if all((dict(args).get(k) or "") == v for k, v in cond):
+ if common_dirs is None:
+ common_dirs = deepcopy(dir_set)
+ else:
+ common_dirs &= dir_set
+
+ for args, dir_set in dirs_by_config.items():
+ if all(dict(args).get(k) == v for k, v in cond):
+ dir_set -= common_dirs
+
+ if common_dirs:
+ if cond:
+ mb.write_condition(dict(cond))
+ mb.write_mozbuild_list("DIRS", ["/%s" % d for d in common_dirs])
+ if cond:
+ mb.terminate_condition()
+
+ # Remove possibly stale moz.builds
+ for root, dirs, files in os.walk(srcdir):
+ if "moz.build" in files:
+ file = os.path.join(root, "moz.build")
+ if file not in mozbuilds:
+ os.unlink(file)
+
+
+def generate_gn_config(
+ srcdir,
+ gn_binary,
+ input_variables,
+ sandbox_variables,
+ gn_target,
+):
+ def str_for_arg(v):
+ if v in (True, False):
+ return str(v).lower()
+ return '"%s"' % v
+
+ input_variables = input_variables.copy()
+ input_variables.update(
+ {
+ "concurrent_links": 1,
+ "action_pool_depth": 1,
+ }
+ )
+
+ if input_variables["target_os"] == "win":
+ input_variables.update(
+ {
+ "visual_studio_path": "/",
+ "visual_studio_version": 2015,
+ "wdk_path": "/",
+ }
+ )
+ if input_variables["target_os"] == "mac":
+ input_variables.update(
+ {
+ "mac_sdk_path": "/",
+ "enable_wmax_tokens": False,
+ }
+ )
+
+ gn_args = "--args=%s" % " ".join(
+ ["%s=%s" % (k, str_for_arg(v)) for k, v in six.iteritems(input_variables)]
+ )
+ with tempfile.TemporaryDirectory() as tempdir:
+ # On Mac, `tempdir` starts with /var which is a symlink to /private/var.
+ # We resolve the symlinks in `tempdir` here so later usage with
+ # relpath() does not lead to unexpected results, should it be used
+ # together with another path that has symlinks resolved.
+ resolved_tempdir = Path(tempdir).resolve()
+ gen_args = [gn_binary, "gen", str(resolved_tempdir), gn_args, "--ide=json"]
+ print('Running "%s"' % " ".join(gen_args), file=sys.stderr)
+ subprocess.check_call(gen_args, cwd=srcdir, stderr=subprocess.STDOUT)
+
+ gn_config_file = resolved_tempdir / "project.json"
+
+ with open(gn_config_file, "r") as fh:
+ gn_out = json.load(fh)
+ gn_out = filter_gn_config(
+ resolved_tempdir, gn_out, sandbox_variables, input_variables, gn_target
+ )
+ return gn_out
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument("config", help="configuration in json format")
+ args = parser.parse_args()
+
+ gn_binary = bootstrap_toolchain("gn/gn") or which("gn")
+ if not gn_binary:
+ raise Exception("The GN program must be present to generate GN configs.")
+
+ with open(args.config, "r") as fh:
+ config = json.load(fh)
+
+ topsrcdir = Path(__file__).parent.parent.parent.parent.resolve()
+
+ vars_set = []
+ for is_debug in (True, False):
+ for target_os in ("android", "linux", "mac", "openbsd", "win"):
+ target_cpus = ["x64"]
+ if target_os in ("android", "linux", "mac", "win", "openbsd"):
+ target_cpus.append("arm64")
+ if target_os in ("android", "linux"):
+ target_cpus.append("arm")
+ if target_os in ("android", "linux", "win"):
+ target_cpus.append("x86")
+ if target_os == "linux":
+ target_cpus.extend(["ppc64", "riscv64", "mipsel", "mips64el"])
+ for target_cpu in target_cpus:
+ vars = {
+ "host_cpu": "x64",
+ "is_debug": is_debug,
+ "target_cpu": target_cpu,
+ "target_os": target_os,
+ }
+ if target_os == "linux":
+ for use_x11 in (True, False):
+ vars["use_x11"] = use_x11
+ vars_set.append(vars.copy())
+ else:
+ if target_os == "openbsd":
+ vars["use_x11"] = True
+ vars_set.append(vars)
+
+ gn_configs = []
+ for vars in vars_set:
+ gn_configs.append(
+ generate_gn_config(
+ topsrcdir / config["target_dir"],
+ gn_binary,
+ vars,
+ config["gn_sandbox_variables"],
+ config["gn_target"],
+ )
+ )
+
+ print("Writing moz.build files")
+ write_mozbuild(
+ topsrcdir,
+ topsrcdir / config["target_dir"],
+ config["non_unified_sources"],
+ gn_configs,
+ config["mozilla_flags"],
+ config["write_mozbuild_variables"],
+ )
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/html_build_viewer.py b/python/mozbuild/mozbuild/html_build_viewer.py
new file mode 100644
index 0000000000..0582e6f1be
--- /dev/null
+++ b/python/mozbuild/mozbuild/html_build_viewer.py
@@ -0,0 +1,118 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This module contains code for running an HTTP server to view build info.
+import http.server
+import json
+import os
+
+import requests
+
+
+class HTTPHandler(http.server.BaseHTTPRequestHandler):
+ def do_GET(self):
+ s = self.server.wrapper
+ p = self.path
+
+ if p == "/build_resources.json":
+ self.send_response(200)
+ self.send_header("Content-Type", "application/json; charset=utf-8")
+ self.end_headers()
+
+ keys = sorted(s.json_files.keys())
+ s = json.dumps({"files": ["resources/%s" % k for k in keys]})
+ self.wfile.write(s.encode("utf-8"))
+ return
+
+ if p.startswith("/resources/"):
+ key = p[len("/resources/") :]
+
+ if key not in s.json_files:
+ self.send_error(404)
+ return
+
+ self.send_response(200)
+ self.send_header("Content-Type", "application/json; charset=utf-8")
+ self.end_headers()
+
+ self.wfile.write(s.json_files[key])
+ return
+
+ if p == "/":
+ p = "/build_resources.html"
+
+ self.serve_docroot(s.doc_root, p[1:])
+
+ def do_POST(self):
+ if self.path == "/shutdown":
+ self.server.wrapper.do_shutdown = True
+ self.send_response(200)
+ return
+
+ self.send_error(404)
+
+ def serve_docroot(self, root, path):
+ local_path = os.path.normpath(os.path.join(root, path))
+
+ # Cheap security. This doesn't resolve symlinks, etc. But, it should be
+ # acceptable since this server only runs locally.
+ if not local_path.startswith(root):
+ self.send_error(404)
+
+ if not os.path.exists(local_path):
+ self.send_error(404)
+ return
+
+ if os.path.isdir(local_path):
+ self.send_error(500)
+ return
+
+ self.send_response(200)
+ ct = "text/plain"
+ if path.endswith(".html"):
+ ct = "text/html"
+
+ self.send_header("Content-Type", ct)
+ self.end_headers()
+
+ with open(local_path, "rb") as fh:
+ self.wfile.write(fh.read())
+
+
+class BuildViewerServer(object):
+ def __init__(self, address="localhost", port=0):
+ # TODO use pkg_resources to obtain HTML resources.
+ pkg_dir = os.path.dirname(os.path.abspath(__file__))
+ doc_root = os.path.join(pkg_dir, "resources", "html-build-viewer")
+ assert os.path.isdir(doc_root)
+
+ self.doc_root = doc_root
+ self.json_files = {}
+
+ self.server = http.server.HTTPServer((address, port), HTTPHandler)
+ self.server.wrapper = self
+ self.do_shutdown = False
+
+ @property
+ def url(self):
+ hostname, port = self.server.server_address
+ return "http://%s:%d/" % (hostname, port)
+
+ def add_resource_json_file(self, key, path):
+ """Register a resource JSON file with the server.
+
+ The file will be made available under the name/key specified."""
+ with open(path, "rb") as fh:
+ self.json_files[key] = fh.read()
+
+ def add_resource_json_url(self, key, url):
+ """Register a resource JSON file at a URL."""
+ r = requests.get(url)
+ if r.status_code != 200:
+ raise Exception("Non-200 HTTP response code")
+ self.json_files[key] = r.text
+
+ def run(self):
+ while not self.do_shutdown:
+ self.server.handle_request()
diff --git a/python/mozbuild/mozbuild/jar.py b/python/mozbuild/mozbuild/jar.py
new file mode 100644
index 0000000000..f7d10f7fed
--- /dev/null
+++ b/python/mozbuild/mozbuild/jar.py
@@ -0,0 +1,648 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""jarmaker.py provides a python class to package up chrome content by
+processing jar.mn files.
+
+See the documentation for jar.mn on MDC for further details on the format.
+"""
+
+import errno
+import io
+import logging
+import os
+import re
+import sys
+from time import localtime
+
+import mozpack.path as mozpath
+import six
+from mozpack.files import FileFinder
+from MozZipFile import ZipFile
+from six import BytesIO
+
+from mozbuild.action.buildlist import addEntriesToListFile
+from mozbuild.preprocessor import Preprocessor
+from mozbuild.util import ensure_bytes
+
+if sys.platform == "win32":
+ from ctypes import WinError, windll
+
+ CreateHardLink = windll.kernel32.CreateHardLinkA
+
+__all__ = ["JarMaker"]
+
+
+class ZipEntry(object):
+ """Helper class for jar output.
+
+ This class defines a simple file-like object for a zipfile.ZipEntry
+ so that we can consecutively write to it and then close it.
+ This methods hooks into ZipFile.writestr on close().
+ """
+
+ def __init__(self, name, zipfile):
+ self._zipfile = zipfile
+ self._name = name
+ self._inner = BytesIO()
+
+ def write(self, content):
+ """Append the given content to this zip entry"""
+
+ self._inner.write(ensure_bytes(content))
+ return
+
+ def close(self):
+ """The close method writes the content back to the zip file."""
+
+ self._zipfile.writestr(self._name, self._inner.getvalue())
+
+
+def getModTime(aPath):
+ if not os.path.isfile(aPath):
+ return localtime(0)
+ mtime = os.stat(aPath).st_mtime
+ return localtime(mtime)
+
+
+class JarManifestEntry(object):
+ def __init__(self, output, source, is_locale=False, preprocess=False):
+ self.output = output
+ self.source = source
+ self.is_locale = is_locale
+ self.preprocess = preprocess
+
+
+class JarInfo(object):
+ def __init__(self, base_or_jarinfo, name=None):
+ if name is None:
+ assert isinstance(base_or_jarinfo, JarInfo)
+ self.base = base_or_jarinfo.base
+ self.name = base_or_jarinfo.name
+ else:
+ assert not isinstance(base_or_jarinfo, JarInfo)
+ self.base = base_or_jarinfo or ""
+ self.name = name
+ # For compatibility with existing jar.mn files, if there is no
+ # base, the jar name is under chrome/
+ if not self.base:
+ self.name = mozpath.join("chrome", self.name)
+ self.relativesrcdir = None
+ self.chrome_manifests = []
+ self.entries = []
+
+
+class DeprecatedJarManifest(Exception):
+ pass
+
+
+class JarManifestParser(object):
+
+ ignore = re.compile("\s*(\#.*)?$")
+ jarline = re.compile(
+ """
+ (?:
+ (?:\[(?P<base>[\w\d.\-\_\\\/{}@]+)\]\s*)? # optional [base/path]
+ (?P<jarfile>[\w\d.\-\_\\\/{}]+).jar\: # filename.jar:
+ |
+ (?:\s*(\#.*)?) # comment
+ )\s*$ # whitespaces
+ """,
+ re.VERBOSE,
+ )
+ relsrcline = re.compile("relativesrcdir\s+(?P<relativesrcdir>.+?):")
+ regline = re.compile("\%\s+(.*)$")
+ entryre = "(?P<optPreprocess>\*)?(?P<optOverwrite>\+?)\s+"
+ entryline = re.compile(
+ entryre
+ + (
+ "(?P<output>[\w\d.\-\_\\\/\+\@]+)\s*"
+ "(\((?P<locale>\%?)(?P<source>[\w\d.\-\_\\\/\@\*]+)\))?\s*$"
+ )
+ )
+
+ def __init__(self):
+ self._current_jar = None
+ self._jars = []
+
+ def write(self, line):
+ # A Preprocessor instance feeds the parser through calls to this method.
+
+ # Ignore comments and empty lines
+ if self.ignore.match(line):
+ return
+
+ # A jar manifest file can declare several different sections, each of
+ # which applies to a given "jar file". Each of those sections starts
+ # with "<name>.jar:", in which case the path is assumed relative to
+ # a "chrome" directory, or "[<base/path>] <subpath/name>.jar:", where
+ # a base directory is given (usually pointing at the root of the
+ # application or addon) and the jar path is given relative to the base
+ # directory.
+ if self._current_jar is None:
+ m = self.jarline.match(line)
+ if not m:
+ raise RuntimeError(line)
+ if m.group("jarfile"):
+ self._current_jar = JarInfo(m.group("base"), m.group("jarfile"))
+ self._jars.append(self._current_jar)
+ return
+
+ # Within each section, there can be three different types of entries:
+
+ # - indications of the relative source directory we pretend to be in
+ # when considering localization files, in the following form;
+ # "relativesrcdir <path>:"
+ m = self.relsrcline.match(line)
+ if m:
+ if self._current_jar.chrome_manifests or self._current_jar.entries:
+ self._current_jar = JarInfo(self._current_jar)
+ self._jars.append(self._current_jar)
+ self._current_jar.relativesrcdir = m.group("relativesrcdir")
+ return
+
+ # - chrome manifest entries, prefixed with "%".
+ m = self.regline.match(line)
+ if m:
+ rline = " ".join(m.group(1).split())
+ if rline not in self._current_jar.chrome_manifests:
+ self._current_jar.chrome_manifests.append(rline)
+ return
+
+ # - entries indicating files to be part of the given jar. They are
+ # formed thusly:
+ # "<dest_path>"
+ # or
+ # "<dest_path> (<source_path>)"
+ # The <dest_path> is where the file(s) will be put in the chrome jar.
+ # The <source_path> is where the file(s) can be found in the source
+ # directory. The <source_path> may start with a "%" for files part
+ # of a localization directory, in which case the "%" counts as the
+ # locale.
+ # Each entry can be prefixed with "*" for preprocessing.
+ m = self.entryline.match(line)
+ if m:
+ if m.group("optOverwrite"):
+ raise DeprecatedJarManifest('The "+" prefix is not supported anymore')
+ self._current_jar.entries.append(
+ JarManifestEntry(
+ m.group("output"),
+ m.group("source") or mozpath.basename(m.group("output")),
+ is_locale=bool(m.group("locale")),
+ preprocess=bool(m.group("optPreprocess")),
+ )
+ )
+ return
+
+ self._current_jar = None
+ self.write(line)
+
+ def __iter__(self):
+ return iter(self._jars)
+
+
+class JarMaker(object):
+ """JarMaker reads jar.mn files and process those into jar files or
+ flat directories, along with chrome.manifest files.
+ """
+
+ def __init__(
+ self, outputFormat="flat", useJarfileManifest=True, useChromeManifest=False
+ ):
+
+ self.outputFormat = outputFormat
+ self.useJarfileManifest = useJarfileManifest
+ self.useChromeManifest = useChromeManifest
+ self.pp = Preprocessor()
+ self.topsourcedir = None
+ self.sourcedirs = []
+ self.localedirs = None
+ self.l10nbase = None
+ self.relativesrcdir = None
+ self.rootManifestAppId = None
+ self._seen_output = set()
+
+ def getCommandLineParser(self):
+ """Get a optparse.OptionParser for jarmaker.
+
+ This OptionParser has the options for jarmaker as well as
+ the options for the inner PreProcessor.
+ """
+
+ # HACK, we need to unescape the string variables we get,
+ # the perl versions didn't grok strings right
+
+ p = self.pp.getCommandLineParser(unescapeDefines=True)
+ p.add_option(
+ "-f",
+ type="choice",
+ default="jar",
+ choices=("jar", "flat", "symlink"),
+ help="fileformat used for output",
+ metavar="[jar, flat, symlink]",
+ )
+ p.add_option("-v", action="store_true", dest="verbose", help="verbose output")
+ p.add_option("-q", action="store_false", dest="verbose", help="verbose output")
+ p.add_option(
+ "-e",
+ action="store_true",
+ help="create chrome.manifest instead of jarfile.manifest",
+ )
+ p.add_option(
+ "-s", type="string", action="append", default=[], help="source directory"
+ )
+ p.add_option("-t", type="string", help="top source directory")
+ p.add_option(
+ "-c",
+ "--l10n-src",
+ type="string",
+ action="append",
+ help="localization directory",
+ )
+ p.add_option(
+ "--l10n-base",
+ type="string",
+ action="store",
+ help="merged directory to be used for localization (requires relativesrcdir)",
+ )
+ p.add_option(
+ "--relativesrcdir",
+ type="string",
+ help="relativesrcdir to be used for localization",
+ )
+ p.add_option("-d", type="string", help="base directory")
+ p.add_option(
+ "--root-manifest-entry-appid",
+ type="string",
+ help="add an app id specific root chrome manifest entry.",
+ )
+ return p
+
+ def finalizeJar(
+ self, jardir, jarbase, jarname, chromebasepath, register, doZip=True
+ ):
+ """Helper method to write out the chrome registration entries to
+ jarfile.manifest or chrome.manifest, or both.
+
+ The actual file processing is done in updateManifest.
+ """
+
+ # rewrite the manifest, if entries given
+ if not register:
+ return
+
+ chromeManifest = os.path.join(jardir, jarbase, "chrome.manifest")
+
+ if self.useJarfileManifest:
+ self.updateManifest(
+ os.path.join(jardir, jarbase, jarname + ".manifest"),
+ chromebasepath.format(""),
+ register,
+ )
+ if jarname != "chrome":
+ addEntriesToListFile(
+ chromeManifest, ["manifest {0}.manifest".format(jarname)]
+ )
+ if self.useChromeManifest:
+ chromebase = os.path.dirname(jarname) + "/"
+ self.updateManifest(
+ chromeManifest, chromebasepath.format(chromebase), register
+ )
+
+ # If requested, add a root chrome manifest entry (assumed to be in the parent directory
+ # of chromeManifest) with the application specific id. In cases where we're building
+ # lang packs, the root manifest must know about application sub directories.
+
+ if self.rootManifestAppId:
+ rootChromeManifest = os.path.join(
+ os.path.normpath(os.path.dirname(chromeManifest)),
+ "..",
+ "chrome.manifest",
+ )
+ rootChromeManifest = os.path.normpath(rootChromeManifest)
+ chromeDir = os.path.basename(
+ os.path.dirname(os.path.normpath(chromeManifest))
+ )
+ logging.info(
+ "adding '%s' entry to root chrome manifest appid=%s"
+ % (chromeDir, self.rootManifestAppId)
+ )
+ addEntriesToListFile(
+ rootChromeManifest,
+ [
+ "manifest %s/chrome.manifest application=%s"
+ % (chromeDir, self.rootManifestAppId)
+ ],
+ )
+
+ def updateManifest(self, manifestPath, chromebasepath, register):
+ """updateManifest replaces the % in the chrome registration entries
+ with the given chrome base path, and updates the given manifest file.
+ """
+ myregister = dict.fromkeys(
+ map(lambda s: s.replace("%", chromebasepath), register)
+ )
+ addEntriesToListFile(manifestPath, six.iterkeys(myregister))
+
+ def makeJar(self, infile, jardir):
+ """makeJar is the main entry point to JarMaker.
+
+ It takes the input file, the output directory, the source dirs and the
+ top source dir as argument, and optionally the l10n dirs.
+ """
+
+ # making paths absolute, guess srcdir if file and add to sourcedirs
+ def _normpath(p):
+ return os.path.normpath(os.path.abspath(p))
+
+ self.topsourcedir = _normpath(self.topsourcedir)
+ self.sourcedirs = [_normpath(p) for p in self.sourcedirs]
+ if self.localedirs:
+ self.localedirs = [_normpath(p) for p in self.localedirs]
+ elif self.relativesrcdir:
+ self.localedirs = self.generateLocaleDirs(self.relativesrcdir)
+ if isinstance(infile, six.text_type):
+ logging.info("processing " + infile)
+ self.sourcedirs.append(_normpath(os.path.dirname(infile)))
+ pp = self.pp.clone()
+ pp.out = JarManifestParser()
+ pp.do_include(infile)
+
+ for info in pp.out:
+ self.processJarSection(info, jardir)
+
+ def generateLocaleDirs(self, relativesrcdir):
+ if os.path.basename(relativesrcdir) == "locales":
+ # strip locales
+ l10nrelsrcdir = os.path.dirname(relativesrcdir)
+ else:
+ l10nrelsrcdir = relativesrcdir
+ locdirs = []
+
+ # generate locales merge or en-US
+ if self.l10nbase:
+ locdirs.append(os.path.join(self.l10nbase, l10nrelsrcdir))
+ else:
+ # add en-US if it's not l10n
+ locdirs.append(os.path.join(self.topsourcedir, relativesrcdir, "en-US"))
+ return locdirs
+
+ def processJarSection(self, jarinfo, jardir):
+ """Internal method called by makeJar to actually process a section
+ of a jar.mn file.
+ """
+
+ # chromebasepath is used for chrome registration manifests
+ # {0} is getting replaced with chrome/ for chrome.manifest, and with
+ # an empty string for jarfile.manifest
+
+ chromebasepath = "{0}" + os.path.basename(jarinfo.name)
+ if self.outputFormat == "jar":
+ chromebasepath = "jar:" + chromebasepath + ".jar!"
+ chromebasepath += "/"
+
+ jarfile = os.path.join(jardir, jarinfo.base, jarinfo.name)
+ jf = None
+ if self.outputFormat == "jar":
+ # jar
+ jarfilepath = jarfile + ".jar"
+ try:
+ os.makedirs(os.path.dirname(jarfilepath))
+ except OSError as error:
+ if error.errno != errno.EEXIST:
+ raise
+ jf = ZipFile(jarfilepath, "a", lock=True)
+ outHelper = self.OutputHelper_jar(jf)
+ else:
+ outHelper = getattr(self, "OutputHelper_" + self.outputFormat)(jarfile)
+
+ if jarinfo.relativesrcdir:
+ self.localedirs = self.generateLocaleDirs(jarinfo.relativesrcdir)
+
+ for e in jarinfo.entries:
+ self._processEntryLine(e, outHelper, jf)
+
+ self.finalizeJar(
+ jardir, jarinfo.base, jarinfo.name, chromebasepath, jarinfo.chrome_manifests
+ )
+ if jf is not None:
+ jf.close()
+
+ def _processEntryLine(self, e, outHelper, jf):
+ out = e.output
+ src = e.source
+
+ # pick the right sourcedir -- l10n, topsrc or src
+
+ if e.is_locale:
+ # If the file is a Fluent l10n resource, we want to skip the
+ # 'en-US' fallbacking.
+ #
+ # To achieve that, we're testing if we have more than one localedir,
+ # and if the last of those has 'en-US' in it.
+ # If that's the case, we're removing the last one.
+ if (
+ e.source.endswith(".ftl")
+ and len(self.localedirs) > 1
+ and "en-US" in self.localedirs[-1]
+ ):
+ src_base = self.localedirs[:-1]
+ else:
+ src_base = self.localedirs
+ elif src.startswith("/"):
+ # path/in/jar/file_name.xul (/path/in/sourcetree/file_name.xul)
+ # refers to a path relative to topsourcedir, use that as base
+ # and strip the leading '/'
+ src_base = [self.topsourcedir]
+ src = src[1:]
+ else:
+ # use srcdirs and the objdir (current working dir) for relative paths
+ src_base = self.sourcedirs + [os.getcwd()]
+
+ if "*" in src:
+
+ def _prefix(s):
+ for p in s.split("/"):
+ if "*" not in p:
+ yield p + "/"
+
+ prefix = "".join(_prefix(src))
+ emitted = set()
+ for _srcdir in src_base:
+ finder = FileFinder(_srcdir)
+ for path, _ in finder.find(src):
+ # If the path was already seen in one of the other source
+ # directories, skip it. That matches the non-wildcard case
+ # below, where we pick the first existing file.
+ reduced_path = path[len(prefix) :]
+ if reduced_path in emitted:
+ continue
+ emitted.add(reduced_path)
+ e = JarManifestEntry(
+ mozpath.join(out, reduced_path),
+ path,
+ is_locale=e.is_locale,
+ preprocess=e.preprocess,
+ )
+ self._processEntryLine(e, outHelper, jf)
+ return
+
+ # check if the source file exists
+ realsrc = None
+ for _srcdir in src_base:
+ if os.path.isfile(os.path.join(_srcdir, src)):
+ realsrc = os.path.join(_srcdir, src)
+ break
+ if realsrc is None:
+ if jf is not None:
+ jf.close()
+ raise RuntimeError(
+ 'File "{0}" not found in {1}'.format(src, ", ".join(src_base))
+ )
+
+ if out in self._seen_output:
+ raise RuntimeError("%s already added" % out)
+ self._seen_output.add(out)
+
+ if e.preprocess:
+ outf = outHelper.getOutput(out, mode="w")
+ inf = io.open(realsrc, encoding="utf-8")
+ pp = self.pp.clone()
+ if src[-4:] == ".css":
+ pp.setMarker("%")
+ pp.out = outf
+ pp.do_include(inf)
+ pp.failUnused(realsrc)
+ outf.close()
+ inf.close()
+ return
+
+ # copy or symlink if newer
+
+ if getModTime(realsrc) > outHelper.getDestModTime(e.output):
+ if self.outputFormat == "symlink":
+ outHelper.symlink(realsrc, out)
+ return
+ outf = outHelper.getOutput(out)
+
+ # open in binary mode, this can be images etc
+
+ inf = open(realsrc, "rb")
+ outf.write(inf.read())
+ outf.close()
+ inf.close()
+
+ class OutputHelper_jar(object):
+ """Provide getDestModTime and getOutput for a given jarfile."""
+
+ def __init__(self, jarfile):
+ self.jarfile = jarfile
+
+ def getDestModTime(self, aPath):
+ try:
+ info = self.jarfile.getinfo(aPath)
+ return info.date_time
+ except Exception:
+ return localtime(0)
+
+ def getOutput(self, name, mode="wb"):
+ return ZipEntry(name, self.jarfile)
+
+ class OutputHelper_flat(object):
+ """Provide getDestModTime and getOutput for a given flat
+ output directory. The helper method ensureDirFor is used by
+ the symlink subclass.
+ """
+
+ def __init__(self, basepath):
+ self.basepath = basepath
+
+ def getDestModTime(self, aPath):
+ return getModTime(os.path.join(self.basepath, aPath))
+
+ def getOutput(self, name, mode="wb"):
+ out = self.ensureDirFor(name)
+
+ # remove previous link or file
+ try:
+ os.remove(out)
+ except OSError as e:
+ if e.errno != errno.ENOENT:
+ raise
+ if "b" in mode:
+ return io.open(out, mode)
+ else:
+ return io.open(out, mode, encoding="utf-8", newline="\n")
+
+ def ensureDirFor(self, name):
+ out = os.path.join(self.basepath, name)
+ outdir = os.path.dirname(out)
+ if not os.path.isdir(outdir):
+ try:
+ os.makedirs(outdir)
+ except OSError as error:
+ if error.errno != errno.EEXIST:
+ raise
+ return out
+
+ class OutputHelper_symlink(OutputHelper_flat):
+ """Subclass of OutputHelper_flat that provides a helper for
+ creating a symlink including creating the parent directories.
+ """
+
+ def symlink(self, src, dest):
+ out = self.ensureDirFor(dest)
+
+ # remove previous link or file
+ try:
+ os.remove(out)
+ except OSError as e:
+ if e.errno != errno.ENOENT:
+ raise
+ if sys.platform != "win32":
+ os.symlink(src, out)
+ else:
+ # On Win32, use ctypes to create a hardlink
+ rv = CreateHardLink(ensure_bytes(out), ensure_bytes(src), None)
+ if rv == 0:
+ raise WinError()
+
+
+def main(args=None):
+ args = args or sys.argv
+ jm = JarMaker()
+ p = jm.getCommandLineParser()
+ (options, args) = p.parse_args(args)
+ jm.outputFormat = options.f
+ jm.sourcedirs = options.s
+ jm.topsourcedir = options.t
+ if options.e:
+ jm.useChromeManifest = True
+ jm.useJarfileManifest = False
+ if options.l10n_base:
+ if not options.relativesrcdir:
+ p.error("relativesrcdir required when using l10n-base")
+ if options.l10n_src:
+ p.error("both l10n-src and l10n-base are not supported")
+ jm.l10nbase = options.l10n_base
+ jm.relativesrcdir = options.relativesrcdir
+ jm.localedirs = options.l10n_src
+ if options.root_manifest_entry_appid:
+ jm.rootManifestAppId = options.root_manifest_entry_appid
+ noise = logging.INFO
+ if options.verbose is not None:
+ noise = options.verbose and logging.DEBUG or logging.WARN
+ if sys.version_info[:2] > (2, 3):
+ logging.basicConfig(format="%(message)s")
+ else:
+ logging.basicConfig()
+ logging.getLogger().setLevel(noise)
+ topsrc = options.t
+ topsrc = os.path.normpath(os.path.abspath(topsrc))
+ if not args:
+ infile = sys.stdin
+ else:
+ (infile,) = args
+ infile = six.ensure_text(infile)
+ jm.makeJar(infile, options.d)
diff --git a/python/mozbuild/mozbuild/mach_commands.py b/python/mozbuild/mozbuild/mach_commands.py
new file mode 100644
index 0000000000..2297d586b8
--- /dev/null
+++ b/python/mozbuild/mozbuild/mach_commands.py
@@ -0,0 +1,2941 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, # You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import argparse
+import errno
+import itertools
+import json
+import logging
+import operator
+import os
+import os.path
+import platform
+import re
+import shutil
+import subprocess
+import sys
+import tempfile
+import time
+from os import path
+from pathlib import Path
+
+import mozpack.path as mozpath
+import yaml
+from mach.decorators import (
+ Command,
+ CommandArgument,
+ CommandArgumentGroup,
+ SettingsProvider,
+ SubCommand,
+)
+from voluptuous import All, Boolean, Required, Schema
+
+import mozbuild.settings # noqa need @SettingsProvider hook to execute
+from mozbuild.base import (
+ BinaryNotFoundException,
+ BuildEnvironmentNotFoundException,
+ MozbuildObject,
+)
+from mozbuild.base import MachCommandConditions as conditions
+from mozbuild.util import MOZBUILD_METRICS_PATH
+
+here = os.path.abspath(os.path.dirname(__file__))
+
+EXCESSIVE_SWAP_MESSAGE = """
+===================
+PERFORMANCE WARNING
+
+Your machine experienced a lot of swap activity during the build. This is
+possibly a sign that your machine doesn't have enough physical memory or
+not enough available memory to perform the build. It's also possible some
+other system activity during the build is to blame.
+
+If you feel this message is not appropriate for your machine configuration,
+please file a Firefox Build System :: General bug at
+https://bugzilla.mozilla.org/enter_bug.cgi?product=Firefox%20Build%20System&component=General
+and tell us about your machine and build configuration so we can adjust the
+warning heuristic.
+===================
+"""
+
+
+class StoreDebugParamsAndWarnAction(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ sys.stderr.write(
+ "The --debugparams argument is deprecated. Please "
+ + "use --debugger-args instead.\n\n"
+ )
+ setattr(namespace, self.dest, values)
+
+
+@Command(
+ "watch",
+ category="post-build",
+ description="Watch and re-build (parts of) the tree.",
+ conditions=[conditions.is_firefox],
+ virtualenv_name="watch",
+)
+@CommandArgument(
+ "-v",
+ "--verbose",
+ action="store_true",
+ help="Verbose output for what commands the watcher is running.",
+)
+def watch(command_context, verbose=False):
+ """Watch and re-build (parts of) the source tree."""
+ if not conditions.is_artifact_build(command_context):
+ print(
+ "WARNING: mach watch only rebuilds the `mach build faster` parts of the tree!"
+ )
+
+ if not command_context.substs.get("WATCHMAN", None):
+ print(
+ "mach watch requires watchman to be installed and found at configure time. See "
+ "https://developer.mozilla.org/docs/Mozilla/Developer_guide/Build_Instructions/Incremental_builds_with_filesystem_watching" # noqa
+ )
+ return 1
+
+ from mozbuild.faster_daemon import Daemon
+
+ daemon = Daemon(command_context.config_environment)
+
+ try:
+ return daemon.watch()
+ except KeyboardInterrupt:
+ # Suppress ugly stack trace when user hits Ctrl-C.
+ sys.exit(3)
+
+
+CARGO_CONFIG_NOT_FOUND_ERROR_MSG = """\
+The sub-command {subcommand} is not currently configured to be used with ./mach cargo.
+To do so, add the corresponding file in <mozilla-root-dir>/build/cargo, following other examples in this directory"""
+
+
+def _cargo_config_yaml_schema():
+ def starts_with_cargo(s):
+ if s.startswith("cargo-"):
+ return s
+ else:
+ raise ValueError
+
+ return Schema(
+ {
+ # The name of the command (not checked for now, but maybe
+ # later)
+ Required("command"): All(str, starts_with_cargo),
+ # Whether `make` should stop immediately in case
+ # of error returned by the command. Default: False
+ "continue_on_error": Boolean,
+ # Whether this command requires pre_export and export build
+ # targets to have run. Defaults to bool(cargo_build_flags).
+ "requires_export": Boolean,
+ # Build flags to use. If this variable is not
+ # defined here, the build flags are generated automatically and are
+ # the same as for `cargo build`. See available substitutions at the
+ # end.
+ "cargo_build_flags": [str],
+ # Extra build flags to use. These flags are added
+ # after the cargo_build_flags both when they are provided or
+ # automatically generated. See available substitutions at the end.
+ "cargo_extra_flags": [str],
+ # Available substitutions for `cargo_*_flags`:
+ # * {arch}: architecture target
+ # * {crate}: current crate name
+ # * {directory}: Directory of the current crate within the source tree
+ # * {features}: Rust features (for `--features`)
+ # * {manifest}: full path of `Cargo.toml` file
+ # * {target}: `--lib` for library, `--bin CRATE` for executables
+ # * {topsrcdir}: Top directory of sources
+ }
+ )
+
+
+@Command(
+ "cargo",
+ category="build",
+ description="Run `cargo <cargo_command>` on a given crate. Defaults to gkrust.",
+ metrics_path=MOZBUILD_METRICS_PATH,
+)
+@CommandArgument(
+ "cargo_command",
+ default=None,
+ help="Target to cargo, must be one of the commands in config/cargo/",
+)
+@CommandArgument(
+ "--all-crates",
+ action="store_true",
+ help="Check all of the crates in the tree.",
+)
+@CommandArgument(
+ "-p", "--package", default=None, help="The specific crate name to check."
+)
+@CommandArgument(
+ "--jobs",
+ "-j",
+ default="0",
+ nargs="?",
+ metavar="jobs",
+ type=int,
+ help="Run the tests in parallel using multiple processes.",
+)
+@CommandArgument("-v", "--verbose", action="store_true", help="Verbose output.")
+@CommandArgument(
+ "--message-format-json",
+ action="store_true",
+ help="Emit error messages as JSON.",
+)
+@CommandArgument(
+ "--continue-on-error",
+ action="store_true",
+ help="Do not return an error exit code if the subcommands errors out.",
+)
+@CommandArgument(
+ "subcommand_args",
+ nargs=argparse.REMAINDER,
+ help="These arguments are passed as-is to the cargo subcommand.",
+)
+def cargo(
+ command_context,
+ cargo_command,
+ all_crates=None,
+ package=None,
+ jobs=0,
+ verbose=False,
+ message_format_json=False,
+ continue_on_error=False,
+ subcommand_args=[],
+):
+
+ from mozbuild.controller.building import BuildDriver
+
+ command_context.log_manager.enable_all_structured_loggers()
+
+ topsrcdir = Path(mozpath.normpath(command_context.topsrcdir))
+ cargodir = Path(topsrcdir / "build" / "cargo")
+
+ cargo_command_basename = "cargo-" + cargo_command + ".yaml"
+ cargo_command_fullname = Path(cargodir / cargo_command_basename)
+ if path.exists(cargo_command_fullname):
+ with open(cargo_command_fullname) as fh:
+ yaml_config = yaml.load(fh, Loader=yaml.FullLoader)
+ schema = _cargo_config_yaml_schema()
+ schema(yaml_config)
+ if not yaml_config:
+ yaml_config = {}
+ else:
+ print(CARGO_CONFIG_NOT_FOUND_ERROR_MSG.format(subcommand=cargo_command))
+ return 1
+
+ # print("yaml_config = ", yaml_config)
+
+ yaml_config.setdefault("continue_on_error", False)
+ continue_on_error = continue_on_error or yaml_config["continue_on_error"] is True
+
+ cargo_build_flags = yaml_config.get("cargo_build_flags")
+ if cargo_build_flags is not None:
+ cargo_build_flags = " ".join(cargo_build_flags)
+ cargo_extra_flags = yaml_config.get("cargo_extra_flags")
+ if cargo_extra_flags is not None:
+ cargo_extra_flags = " ".join(cargo_extra_flags)
+ requires_export = yaml_config.get("requires_export", bool(cargo_build_flags))
+
+ ret = 0
+ if requires_export:
+ # This directory is created during export. If it's not there,
+ # export hasn't run already.
+ deps = Path(command_context.topobjdir) / ".deps"
+ if not deps.exists():
+ build = command_context._spawn(BuildDriver)
+ ret = build.build(
+ command_context.metrics,
+ what=["pre-export", "export"],
+ jobs=jobs,
+ verbose=verbose,
+ mach_context=command_context._mach_context,
+ )
+ else:
+ try:
+ command_context.config_environment
+ except BuildEnvironmentNotFoundException:
+ build = command_context._spawn(BuildDriver)
+ ret = build.configure(
+ command_context.metrics,
+ buildstatus_messages=False,
+ )
+ if ret != 0:
+ return ret
+
+ # XXX duplication with `mach vendor rust`
+ crates_and_roots = {
+ "gkrust": {"directory": "toolkit/library/rust", "library": True},
+ "gkrust-gtest": {"directory": "toolkit/library/gtest/rust", "library": True},
+ "geckodriver": {"directory": "testing/geckodriver", "library": False},
+ }
+
+ if all_crates:
+ crates = crates_and_roots.keys()
+ elif package:
+ crates = [package]
+ else:
+ crates = ["gkrust"]
+
+ if subcommand_args:
+ subcommand_args = " ".join(subcommand_args)
+
+ for crate in crates:
+ crate_info = crates_and_roots.get(crate, None)
+ if not crate_info:
+ print(
+ "Cannot locate crate %s. Please check your spelling or "
+ "add the crate information to the list." % crate
+ )
+ return 1
+
+ targets = [
+ "force-cargo-library-%s" % cargo_command,
+ "force-cargo-host-library-%s" % cargo_command,
+ "force-cargo-program-%s" % cargo_command,
+ "force-cargo-host-program-%s" % cargo_command,
+ ]
+
+ directory = crate_info["directory"]
+ # you can use these variables in 'cargo_build_flags'
+ subst = {
+ "arch": '"$(RUST_TARGET)"',
+ "crate": crate,
+ "directory": directory,
+ "features": '"$(RUST_LIBRARY_FEATURES)"',
+ "manifest": str(Path(topsrcdir / directory / "Cargo.toml")),
+ "target": "--lib" if crate_info["library"] else "--bin " + crate,
+ "topsrcdir": str(topsrcdir),
+ }
+
+ if subcommand_args:
+ targets = targets + [
+ "cargo_extra_cli_flags=%s" % (subcommand_args.format(**subst))
+ ]
+ if cargo_build_flags:
+ targets = targets + [
+ "cargo_build_flags=%s" % (cargo_build_flags.format(**subst))
+ ]
+
+ append_env = {}
+ if cargo_extra_flags:
+ append_env["CARGO_EXTRA_FLAGS"] = cargo_extra_flags.format(**subst)
+ if message_format_json:
+ append_env["USE_CARGO_JSON_MESSAGE_FORMAT"] = "1"
+ if continue_on_error:
+ append_env["CARGO_CONTINUE_ON_ERROR"] = "1"
+ if cargo_build_flags:
+ append_env["CARGO_NO_AUTO_ARG"] = "1"
+ else:
+ append_env[
+ "ADD_RUST_LTOABLE"
+ ] = "force-cargo-library-{s:s} force-cargo-program-{s:s}".format(
+ s=cargo_command
+ )
+
+ ret = command_context._run_make(
+ srcdir=False,
+ directory=directory,
+ ensure_exit_code=0,
+ silent=not verbose,
+ print_directory=False,
+ target=targets,
+ num_jobs=jobs,
+ append_env=append_env,
+ )
+ if ret != 0:
+ return ret
+
+ return 0
+
+
+@SubCommand(
+ "cargo",
+ "vet",
+ description="Run `cargo vet`.",
+)
+@CommandArgument("arguments", nargs=argparse.REMAINDER)
+def cargo_vet(command_context, arguments, stdout=None, env=os.environ):
+ from mozbuild.bootstrap import bootstrap_toolchain
+
+ # Logging of commands enables logging from `bootstrap_toolchain` that we
+ # don't want to expose. Disable them temporarily.
+ logger = logging.getLogger("gecko_taskgraph.generator")
+ level = logger.getEffectiveLevel()
+ logger.setLevel(logging.ERROR)
+
+ env = env.copy()
+ cargo_vet = bootstrap_toolchain("cargo-vet")
+ if cargo_vet:
+ env["PATH"] = os.pathsep.join([cargo_vet, env["PATH"]])
+ logger.setLevel(level)
+ try:
+ cargo = command_context.substs["CARGO"]
+ except (BuildEnvironmentNotFoundException, KeyError):
+ # Default if this tree isn't configured.
+ from mozfile import which
+
+ cargo = which("cargo", path=env["PATH"])
+ if not cargo:
+ raise OSError(
+ errno.ENOENT,
+ (
+ "Could not find 'cargo' on your $PATH. "
+ "Hint: have you run `mach build` or `mach configure`?"
+ ),
+ )
+
+ locked = "--locked" in arguments
+ if locked:
+ # The use of --locked requires .cargo/config to exist, but other things,
+ # like cargo update, don't want it there, so remove it once we're done.
+ topsrcdir = Path(command_context.topsrcdir)
+ shutil.copyfile(
+ topsrcdir / ".cargo" / "config.in", topsrcdir / ".cargo" / "config"
+ )
+
+ try:
+ res = subprocess.run(
+ [cargo, "vet"] + arguments,
+ cwd=command_context.topsrcdir,
+ stdout=stdout,
+ env=env,
+ )
+ finally:
+ if locked:
+ (topsrcdir / ".cargo" / "config").unlink()
+
+ # When the function is invoked without stdout set (the default when running
+ # as a mach subcommand), exit with the returncode from cargo vet.
+ # When the function is invoked with stdout (direct function call), return
+ # the full result from subprocess.run.
+ return res if stdout else res.returncode
+
+
+@Command(
+ "doctor",
+ category="devenv",
+ description="Diagnose and fix common development environment issues.",
+)
+@CommandArgument(
+ "--fix",
+ default=False,
+ action="store_true",
+ help="Attempt to fix found problems.",
+)
+@CommandArgument(
+ "--verbose",
+ default=False,
+ action="store_true",
+ help="Print verbose information found by checks.",
+)
+def doctor(command_context, fix=False, verbose=False):
+ """Diagnose common build environment problems"""
+ from mozbuild.doctor import run_doctor
+
+ return run_doctor(
+ topsrcdir=command_context.topsrcdir,
+ topobjdir=command_context.topobjdir,
+ configure_args=command_context.mozconfig["configure_args"],
+ fix=fix,
+ verbose=verbose,
+ )
+
+
+CLOBBER_CHOICES = {"objdir", "python", "gradle"}
+
+
+@Command(
+ "clobber",
+ category="build",
+ description="Clobber the tree (delete the object directory).",
+ no_auto_log=True,
+)
+@CommandArgument(
+ "what",
+ default=["objdir", "python"],
+ nargs="*",
+ help="Target to clobber, must be one of {{{}}} (default "
+ "objdir and python).".format(", ".join(CLOBBER_CHOICES)),
+)
+@CommandArgument("--full", action="store_true", help="Perform a full clobber")
+def clobber(command_context, what, full=False):
+ """Clean up the source and object directories.
+
+ Performing builds and running various commands generate various files.
+
+ Sometimes it is necessary to clean up these files in order to make
+ things work again. This command can be used to perform that cleanup.
+
+ The `objdir` target removes most files in the current object directory
+ (where build output is stored). Some files (like Visual Studio project
+ files) are not removed by default. If you would like to remove the
+ object directory in its entirety, run with `--full`.
+
+ The `python` target will clean up Python's generated files (virtualenvs,
+ ".pyc", "__pycache__", etc).
+
+ The `gradle` target will remove the "gradle" subdirectory of the object
+ directory.
+
+ By default, the command clobbers the `objdir` and `python` targets.
+ """
+ what = set(what)
+ invalid = what - CLOBBER_CHOICES
+ if invalid:
+ print(
+ "Unknown clobber target(s): {}. Choose from {{{}}}".format(
+ ", ".join(invalid), ", ".join(CLOBBER_CHOICES)
+ )
+ )
+ return 1
+
+ ret = 0
+ if "objdir" in what:
+ from mozbuild.controller.clobber import Clobberer
+
+ try:
+ substs = command_context.substs
+ except BuildEnvironmentNotFoundException:
+ substs = {}
+
+ try:
+ Clobberer(
+ command_context.topsrcdir, command_context.topobjdir, substs
+ ).remove_objdir(full)
+ except OSError as e:
+ if sys.platform.startswith("win"):
+ if isinstance(e, WindowsError) and e.winerror in (5, 32):
+ command_context.log(
+ logging.ERROR,
+ "file_access_error",
+ {"error": e},
+ "Could not clobber because a file was in use. If the "
+ "application is running, try closing it. {error}",
+ )
+ return 1
+ raise
+
+ if "python" in what:
+ if conditions.is_hg(command_context):
+ cmd = [
+ "hg",
+ "--config",
+ "extensions.purge=",
+ "purge",
+ "--all",
+ "-I",
+ "glob:**.py[cdo]",
+ "-I",
+ "glob:**/__pycache__",
+ ]
+ elif conditions.is_git(command_context):
+ cmd = ["git", "clean", "-d", "-f", "-x", "*.py[cdo]", "*/__pycache__/*"]
+ else:
+ cmd = ["find", ".", "-type", "f", "-name", "*.py[cdo]", "-delete"]
+ subprocess.call(cmd, cwd=command_context.topsrcdir)
+ cmd = [
+ "find",
+ ".",
+ "-type",
+ "d",
+ "-name",
+ "__pycache__",
+ "-empty",
+ "-delete",
+ ]
+ ret = subprocess.call(cmd, cwd=command_context.topsrcdir)
+ shutil.rmtree(
+ mozpath.join(command_context.topobjdir, "_virtualenvs"),
+ ignore_errors=True,
+ )
+
+ if "gradle" in what:
+ shutil.rmtree(
+ mozpath.join(command_context.topobjdir, "gradle"), ignore_errors=True
+ )
+
+ return ret
+
+
+@Command(
+ "show-log", category="post-build", description="Display mach logs", no_auto_log=True
+)
+@CommandArgument(
+ "log_file",
+ nargs="?",
+ type=argparse.FileType("rb"),
+ help="Filename to read log data from. Defaults to the log of the last "
+ "mach command.",
+)
+def show_log(command_context, log_file=None):
+ """Show mach logs
+ If we're in a terminal context, the log is piped to 'less'
+ for more convenient viewing.
+ (https://man7.org/linux/man-pages/man1/less.1.html)
+ """
+ if not log_file:
+ path = command_context._get_state_filename("last_log.json")
+ log_file = open(path, "rb")
+
+ if os.isatty(sys.stdout.fileno()):
+ env = dict(os.environ)
+ if "LESS" not in env:
+ # Sensible default flags if none have been set in the user environment.
+ env["LESS"] = "FRX"
+ less = subprocess.Popen(
+ ["less"], stdin=subprocess.PIPE, env=env, encoding="UTF-8"
+ )
+
+ try:
+ # Create a new logger handler with the stream being the stdin of our 'less'
+ # process so that we can pipe the logger output into 'less'
+ less_handler = logging.StreamHandler(stream=less.stdin)
+ less_handler.setFormatter(
+ command_context.log_manager.terminal_handler.formatter
+ )
+ less_handler.setLevel(command_context.log_manager.terminal_handler.level)
+
+ # replace the existing terminal handler with the new one for 'less' while
+ # still keeping the original one to set back later
+ original_handler = command_context.log_manager.replace_terminal_handler(
+ less_handler
+ )
+
+ # Save this value so we can set it back to the original value later
+ original_logging_raise_exceptions = logging.raiseExceptions
+
+ # We need to explicitly disable raising exceptions inside logging so
+ # that we can catch them here ourselves to ignore the ones we want
+ logging.raiseExceptions = False
+
+ # Parses the log file line by line and streams
+ # (to less.stdin) the relevant records we want
+ handle_log_file(command_context, log_file)
+
+ # At this point we've piped the entire log file to
+ # 'less', so we can close the input stream
+ less.stdin.close()
+
+ # Wait for the user to manually terminate `less`
+ less.wait()
+ except OSError as os_error:
+ # (POSIX) errno.EPIPE: BrokenPipeError: [Errno 32] Broken pipe
+ # (Windows) errno.EINVAL: OSError: [Errno 22] Invalid argument
+ if os_error.errno == errno.EPIPE or os_error.errno == errno.EINVAL:
+ # If the user manually terminates 'less' before the entire log file
+ # is piped (without scrolling close enough to the bottom) we will get
+ # one of these errors (depends on the OS) because the logger will still
+ # attempt to stream to the now invalid less.stdin. To prevent a bunch
+ # of errors being shown after a user terminates 'less', we just catch
+ # the first of those exceptions here, and stop parsing the log file.
+ pass
+ else:
+ raise
+ except Exception:
+ raise
+ finally:
+ # Ensure these values are changed back to the originals, regardless of outcome
+ command_context.log_manager.replace_terminal_handler(original_handler)
+ logging.raiseExceptions = original_logging_raise_exceptions
+ else:
+ # Not in a terminal context, so just handle the log file with the
+ # default stream without piping it to a pager (less)
+ handle_log_file(command_context, log_file)
+
+
+def handle_log_file(command_context, log_file):
+ start_time = 0
+ for line in log_file:
+ created, action, params = json.loads(line)
+ if not start_time:
+ start_time = created
+ command_context.log_manager.terminal_handler.formatter.start_time = created
+ if "line" in params:
+ record = logging.makeLogRecord(
+ {
+ "created": created,
+ "name": command_context._logger.name,
+ "levelno": logging.INFO,
+ "msg": "{line}",
+ "params": params,
+ "action": action,
+ }
+ )
+ command_context._logger.handle(record)
+
+
+# Provide commands for inspecting warnings.
+
+
+def database_path(command_context):
+ return command_context._get_state_filename("warnings.json")
+
+
+def get_warnings_database(command_context):
+ from mozbuild.compilation.warnings import WarningsDatabase
+
+ path = database_path(command_context)
+
+ database = WarningsDatabase()
+
+ if os.path.exists(path):
+ database.load_from_file(path)
+
+ return database
+
+
+@Command(
+ "warnings-summary",
+ category="post-build",
+ description="Show a summary of compiler warnings.",
+)
+@CommandArgument(
+ "-C",
+ "--directory",
+ default=None,
+ help="Change to a subdirectory of the build directory first.",
+)
+@CommandArgument(
+ "report",
+ default=None,
+ nargs="?",
+ help="Warnings report to display. If not defined, show the most recent report.",
+)
+def summary(command_context, directory=None, report=None):
+ database = get_warnings_database(command_context)
+
+ if directory:
+ dirpath = join_ensure_dir(command_context.topsrcdir, directory)
+ if not dirpath:
+ return 1
+ else:
+ dirpath = None
+
+ type_counts = database.type_counts(dirpath)
+ sorted_counts = sorted(type_counts.items(), key=operator.itemgetter(1))
+
+ total = 0
+ for k, v in sorted_counts:
+ print("%d\t%s" % (v, k))
+ total += v
+
+ print("%d\tTotal" % total)
+
+
+@Command(
+ "warnings-list",
+ category="post-build",
+ description="Show a list of compiler warnings.",
+)
+@CommandArgument(
+ "-C",
+ "--directory",
+ default=None,
+ help="Change to a subdirectory of the build directory first.",
+)
+@CommandArgument(
+ "--flags", default=None, nargs="+", help="Which warnings flags to match."
+)
+@CommandArgument(
+ "report",
+ default=None,
+ nargs="?",
+ help="Warnings report to display. If not defined, show the most recent report.",
+)
+def list_warnings(command_context, directory=None, flags=None, report=None):
+ database = get_warnings_database(command_context)
+
+ by_name = sorted(database.warnings)
+
+ topsrcdir = mozpath.normpath(command_context.topsrcdir)
+
+ if directory:
+ directory = mozpath.normsep(directory)
+ dirpath = join_ensure_dir(topsrcdir, directory)
+ if not dirpath:
+ return 1
+
+ if flags:
+ # Flatten lists of flags.
+ flags = set(itertools.chain(*[flaglist.split(",") for flaglist in flags]))
+
+ for warning in by_name:
+ filename = mozpath.normsep(warning["filename"])
+
+ if filename.startswith(topsrcdir):
+ filename = filename[len(topsrcdir) + 1 :]
+
+ if directory and not filename.startswith(directory):
+ continue
+
+ if flags and warning["flag"] not in flags:
+ continue
+
+ if warning["column"] is not None:
+ print(
+ "%s:%d:%d [%s] %s"
+ % (
+ filename,
+ warning["line"],
+ warning["column"],
+ warning["flag"],
+ warning["message"],
+ )
+ )
+ else:
+ print(
+ "%s:%d [%s] %s"
+ % (filename, warning["line"], warning["flag"], warning["message"])
+ )
+
+
+def join_ensure_dir(dir1, dir2):
+ dir1 = mozpath.normpath(dir1)
+ dir2 = mozpath.normsep(dir2)
+ joined_path = mozpath.join(dir1, dir2)
+ if os.path.isdir(joined_path):
+ return joined_path
+ print("Specified directory not found.")
+ return None
+
+
+@Command("gtest", category="testing", description="Run GTest unit tests (C++ tests).")
+@CommandArgument(
+ "gtest_filter",
+ default="*",
+ nargs="?",
+ metavar="gtest_filter",
+ help="test_filter is a ':'-separated list of wildcard patterns "
+ "(called the positive patterns), optionally followed by a '-' "
+ "and another ':'-separated pattern list (called the negative patterns)."
+ "Test names are of the format SUITE.NAME. Use --list-tests to see all.",
+)
+@CommandArgument("--list-tests", action="store_true", help="list all available tests")
+@CommandArgument(
+ "--jobs",
+ "-j",
+ default="1",
+ nargs="?",
+ metavar="jobs",
+ type=int,
+ help="Run the tests in parallel using multiple processes.",
+)
+@CommandArgument(
+ "--tbpl-parser",
+ "-t",
+ action="store_true",
+ help="Output test results in a format that can be parsed by TBPL.",
+)
+@CommandArgument(
+ "--shuffle",
+ "-s",
+ action="store_true",
+ help="Randomize the execution order of tests.",
+)
+@CommandArgument(
+ "--enable-webrender",
+ action="store_true",
+ default=False,
+ dest="enable_webrender",
+ help="Enable the WebRender compositor in Gecko.",
+)
+@CommandArgumentGroup("Android")
+@CommandArgument(
+ "--package",
+ default="org.mozilla.geckoview.test_runner",
+ group="Android",
+ help="Package name of test app.",
+)
+@CommandArgument(
+ "--adbpath", dest="adb_path", group="Android", help="Path to adb binary."
+)
+@CommandArgument(
+ "--deviceSerial",
+ dest="device_serial",
+ group="Android",
+ help="adb serial number of remote device. "
+ "Required when more than one device is connected to the host. "
+ "Use 'adb devices' to see connected devices.",
+)
+@CommandArgument(
+ "--remoteTestRoot",
+ dest="remote_test_root",
+ group="Android",
+ help="Remote directory to use as test root (eg. /data/local/tmp/test_root).",
+)
+@CommandArgument(
+ "--libxul", dest="libxul_path", group="Android", help="Path to gtest libxul.so."
+)
+@CommandArgument(
+ "--no-install",
+ action="store_true",
+ default=False,
+ group="Android",
+ help="Skip the installation of the APK.",
+)
+@CommandArgumentGroup("debugging")
+@CommandArgument(
+ "--debug",
+ action="store_true",
+ group="debugging",
+ help="Enable the debugger. Not specifying a --debugger option will result in "
+ "the default debugger being used.",
+)
+@CommandArgument(
+ "--debugger",
+ default=None,
+ type=str,
+ group="debugging",
+ help="Name of debugger to use.",
+)
+@CommandArgument(
+ "--debugger-args",
+ default=None,
+ metavar="params",
+ type=str,
+ group="debugging",
+ help="Command-line arguments to pass to the debugger itself; "
+ "split as the Bourne shell would.",
+)
+def gtest(
+ command_context,
+ shuffle,
+ jobs,
+ gtest_filter,
+ list_tests,
+ tbpl_parser,
+ enable_webrender,
+ package,
+ adb_path,
+ device_serial,
+ remote_test_root,
+ libxul_path,
+ no_install,
+ debug,
+ debugger,
+ debugger_args,
+):
+
+ # We lazy build gtest because it's slow to link
+ try:
+ command_context.config_environment
+ except Exception:
+ print("Please run |./mach build| before |./mach gtest|.")
+ return 1
+
+ res = command_context._mach_context.commands.dispatch(
+ "build", command_context._mach_context, what=["recurse_gtest"]
+ )
+ if res:
+ print("Could not build xul-gtest")
+ return res
+
+ if command_context.substs.get("MOZ_WIDGET_TOOLKIT") == "cocoa":
+ command_context._run_make(
+ directory="browser/app", target="repackage", ensure_exit_code=True
+ )
+
+ cwd = os.path.join(command_context.topobjdir, "_tests", "gtest")
+
+ if not os.path.isdir(cwd):
+ os.makedirs(cwd)
+
+ if conditions.is_android(command_context):
+ if jobs != 1:
+ print("--jobs is not supported on Android and will be ignored")
+ if debug or debugger or debugger_args:
+ print("--debug options are not supported on Android and will be ignored")
+ from mozrunner.devices.android_device import InstallIntent
+
+ return android_gtest(
+ command_context,
+ cwd,
+ shuffle,
+ gtest_filter,
+ package,
+ adb_path,
+ device_serial,
+ remote_test_root,
+ libxul_path,
+ InstallIntent.NO if no_install else InstallIntent.YES,
+ )
+
+ if (
+ package
+ or adb_path
+ or device_serial
+ or remote_test_root
+ or libxul_path
+ or no_install
+ ):
+ print("One or more Android-only options will be ignored")
+
+ app_path = command_context.get_binary_path("app")
+ args = [app_path, "-unittest", "--gtest_death_test_style=threadsafe"]
+
+ if (
+ sys.platform.startswith("win")
+ and "MOZ_LAUNCHER_PROCESS" in command_context.defines
+ ):
+ args.append("--wait-for-browser")
+
+ if list_tests:
+ args.append("--gtest_list_tests")
+
+ if debug or debugger or debugger_args:
+ args = _prepend_debugger_args(args, debugger, debugger_args)
+ if not args:
+ return 1
+
+ # Use GTest environment variable to control test execution
+ # For details see:
+ # https://google.github.io/googletest/advanced.html#running-test-programs-advanced-options
+ gtest_env = {"GTEST_FILTER": gtest_filter}
+
+ # Note: we must normalize the path here so that gtest on Windows sees
+ # a MOZ_GMP_PATH which has only Windows dir seperators, because
+ # nsIFile cannot open the paths with non-Windows dir seperators.
+ xre_path = os.path.join(os.path.normpath(command_context.topobjdir), "dist", "bin")
+ gtest_env["MOZ_XRE_DIR"] = xre_path
+ gtest_env["MOZ_GMP_PATH"] = os.pathsep.join(
+ os.path.join(xre_path, p, "1.0") for p in ("gmp-fake", "gmp-fakeopenh264")
+ )
+
+ gtest_env["MOZ_RUN_GTEST"] = "True"
+
+ if shuffle:
+ gtest_env["GTEST_SHUFFLE"] = "True"
+
+ if tbpl_parser:
+ gtest_env["MOZ_TBPL_PARSER"] = "True"
+
+ if enable_webrender:
+ gtest_env["MOZ_WEBRENDER"] = "1"
+ gtest_env["MOZ_ACCELERATED"] = "1"
+ else:
+ gtest_env["MOZ_WEBRENDER"] = "0"
+
+ if jobs == 1:
+ return command_context.run_process(
+ args=args,
+ append_env=gtest_env,
+ cwd=cwd,
+ ensure_exit_code=False,
+ pass_thru=True,
+ )
+
+ import functools
+
+ from mozprocess import ProcessHandlerMixin
+
+ def handle_line(job_id, line):
+ # Prepend the jobId
+ line = "[%d] %s" % (job_id + 1, line.strip())
+ command_context.log(logging.INFO, "GTest", {"line": line}, "{line}")
+
+ gtest_env["GTEST_TOTAL_SHARDS"] = str(jobs)
+ processes = {}
+ for i in range(0, jobs):
+ gtest_env["GTEST_SHARD_INDEX"] = str(i)
+ processes[i] = ProcessHandlerMixin(
+ [app_path, "-unittest"],
+ cwd=cwd,
+ env=gtest_env,
+ processOutputLine=[functools.partial(handle_line, i)],
+ universal_newlines=True,
+ )
+ processes[i].run()
+
+ exit_code = 0
+ for process in processes.values():
+ status = process.wait()
+ if status:
+ exit_code = status
+
+ # Clamp error code to 255 to prevent overflowing multiple of
+ # 256 into 0
+ if exit_code > 255:
+ exit_code = 255
+
+ return exit_code
+
+
+def android_gtest(
+ command_context,
+ test_dir,
+ shuffle,
+ gtest_filter,
+ package,
+ adb_path,
+ device_serial,
+ remote_test_root,
+ libxul_path,
+ install,
+):
+ # setup logging for mozrunner
+ from mozlog.commandline import setup_logging
+
+ format_args = {"level": command_context._mach_context.settings["test"]["level"]}
+ default_format = command_context._mach_context.settings["test"]["format"]
+ setup_logging("mach-gtest", {}, {default_format: sys.stdout}, format_args)
+
+ # ensure that a device is available and test app is installed
+ from mozrunner.devices.android_device import get_adb_path, verify_android_device
+
+ verify_android_device(
+ command_context, install=install, app=package, device_serial=device_serial
+ )
+
+ if not adb_path:
+ adb_path = get_adb_path(command_context)
+ if not libxul_path:
+ libxul_path = os.path.join(
+ command_context.topobjdir, "dist", "bin", "gtest", "libxul.so"
+ )
+
+ # run gtest via remotegtests.py
+ exit_code = 0
+ import imp
+
+ path = os.path.join("testing", "gtest", "remotegtests.py")
+ with open(path, "r") as fh:
+ imp.load_module("remotegtests", fh, path, (".py", "r", imp.PY_SOURCE))
+ import remotegtests
+
+ tester = remotegtests.RemoteGTests()
+ if not tester.run_gtest(
+ test_dir,
+ shuffle,
+ gtest_filter,
+ package,
+ adb_path,
+ device_serial,
+ remote_test_root,
+ libxul_path,
+ None,
+ ):
+ exit_code = 1
+ tester.cleanup()
+
+ return exit_code
+
+
+@Command(
+ "package",
+ category="post-build",
+ description="Package the built product for distribution as an APK, DMG, etc.",
+)
+@CommandArgument(
+ "-v",
+ "--verbose",
+ action="store_true",
+ help="Verbose output for what commands the packaging process is running.",
+)
+def package(command_context, verbose=False):
+ """Package the built product for distribution."""
+ ret = command_context._run_make(
+ directory=".", target="package", silent=not verbose, ensure_exit_code=False
+ )
+ if ret == 0:
+ command_context.notify("Packaging complete")
+ return ret
+
+
+def _get_android_install_parser():
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ "--app",
+ default="org.mozilla.geckoview_example",
+ help="Android package to install (default: org.mozilla.geckoview_example)",
+ )
+ parser.add_argument(
+ "--verbose",
+ "-v",
+ action="store_true",
+ help="Print verbose output when installing.",
+ )
+ parser.add_argument(
+ "--aab",
+ action="store_true",
+ help="Install as AAB (Android App Bundle)",
+ )
+ return parser
+
+
+def setup_install_parser():
+ build = MozbuildObject.from_environment(cwd=here)
+ if conditions.is_android(build):
+ return _get_android_install_parser()
+ return argparse.ArgumentParser()
+
+
+@Command(
+ "install",
+ category="post-build",
+ conditions=[conditions.has_build],
+ parser=setup_install_parser,
+ description="Install the package on the machine (or device in the case of Android).",
+)
+def install(command_context, **kwargs):
+ """Install a package."""
+ if conditions.is_android(command_context):
+ from mozrunner.devices.android_device import (
+ InstallIntent,
+ verify_android_device,
+ )
+
+ ret = (
+ verify_android_device(command_context, install=InstallIntent.YES, **kwargs)
+ == 0
+ )
+ else:
+ ret = command_context._run_make(
+ directory=".", target="install", ensure_exit_code=False
+ )
+
+ if ret == 0:
+ command_context.notify("Install complete")
+ return ret
+
+
+@SettingsProvider
+class RunSettings:
+ config_settings = [
+ (
+ "runprefs.*",
+ "string",
+ """
+Pass a pref into Firefox when using `mach run`, of the form `foo.bar=value`.
+Prefs will automatically be cast into the appropriate type. Integers can be
+single quoted to force them to be strings.
+""".strip(),
+ )
+ ]
+
+
+def _get_android_run_parser():
+ parser = argparse.ArgumentParser()
+ group = parser.add_argument_group("The compiled program")
+ group.add_argument(
+ "--app",
+ default="org.mozilla.geckoview_example",
+ help="Android package to run (default: org.mozilla.geckoview_example)",
+ )
+ group.add_argument(
+ "--intent",
+ default="android.intent.action.VIEW",
+ help="Android intent action to launch with "
+ "(default: android.intent.action.VIEW)",
+ )
+ group.add_argument(
+ "--setenv",
+ dest="env",
+ action="append",
+ default=[],
+ help="Set target environment variable, like FOO=BAR",
+ )
+ group.add_argument(
+ "--profile",
+ "-P",
+ default=None,
+ help="Path to Gecko profile, like /path/to/host/profile "
+ "or /path/to/target/profile",
+ )
+ group.add_argument("--url", default=None, help="URL to open")
+ group.add_argument(
+ "--aab",
+ action="store_true",
+ default=False,
+ help="Install app ass App Bundle (AAB).",
+ )
+ group.add_argument(
+ "--no-install",
+ action="store_true",
+ default=False,
+ help="Do not try to install application on device before running "
+ "(default: False)",
+ )
+ group.add_argument(
+ "--no-wait",
+ action="store_true",
+ default=False,
+ help="Do not wait for application to start before returning "
+ "(default: False)",
+ )
+ group.add_argument(
+ "--enable-fission",
+ action="store_true",
+ help="Run the program with Fission (site isolation) enabled.",
+ )
+ group.add_argument(
+ "--fail-if-running",
+ action="store_true",
+ default=False,
+ help="Fail if application is already running (default: False)",
+ )
+ group.add_argument(
+ "--restart",
+ action="store_true",
+ default=False,
+ help="Stop the application if it is already running (default: False)",
+ )
+
+ group = parser.add_argument_group("Debugging")
+ group.add_argument("--debug", action="store_true", help="Enable the lldb debugger.")
+ group.add_argument(
+ "--debugger",
+ default=None,
+ type=str,
+ help="Name of lldb compatible debugger to use.",
+ )
+ group.add_argument(
+ "--debugger-args",
+ default=None,
+ metavar="params",
+ type=str,
+ help="Command-line arguments to pass to the debugger itself; "
+ "split as the Bourne shell would.",
+ )
+ group.add_argument(
+ "--no-attach",
+ action="store_true",
+ default=False,
+ help="Start the debugging servers on the device but do not "
+ "attach any debuggers.",
+ )
+ group.add_argument(
+ "--use-existing-process",
+ action="store_true",
+ default=False,
+ help="Select an existing process to debug.",
+ )
+ return parser
+
+
+def _get_jsshell_run_parser():
+ parser = argparse.ArgumentParser()
+ group = parser.add_argument_group("the compiled program")
+ group.add_argument(
+ "params",
+ nargs="...",
+ default=[],
+ help="Command-line arguments to be passed through to the program. Not "
+ "specifying a --profile or -P option will result in a temporary profile "
+ "being used.",
+ )
+
+ group = parser.add_argument_group("debugging")
+ group.add_argument(
+ "--debug",
+ action="store_true",
+ help="Enable the debugger. Not specifying a --debugger option will result "
+ "in the default debugger being used.",
+ )
+ group.add_argument(
+ "--debugger", default=None, type=str, help="Name of debugger to use."
+ )
+ group.add_argument(
+ "--debugger-args",
+ default=None,
+ metavar="params",
+ type=str,
+ help="Command-line arguments to pass to the debugger itself; "
+ "split as the Bourne shell would.",
+ )
+ group.add_argument(
+ "--debugparams",
+ action=StoreDebugParamsAndWarnAction,
+ default=None,
+ type=str,
+ dest="debugger_args",
+ help=argparse.SUPPRESS,
+ )
+
+ return parser
+
+
+def _get_desktop_run_parser():
+ parser = argparse.ArgumentParser()
+ group = parser.add_argument_group("the compiled program")
+ group.add_argument(
+ "params",
+ nargs="...",
+ default=[],
+ help="Command-line arguments to be passed through to the program. Not "
+ "specifying a --profile or -P option will result in a temporary profile "
+ "being used.",
+ )
+ group.add_argument("--packaged", action="store_true", help="Run a packaged build.")
+ group.add_argument(
+ "--app", help="Path to executable to run (default: output of ./mach build)"
+ )
+ group.add_argument(
+ "--remote",
+ "-r",
+ action="store_true",
+ help="Do not pass the --no-remote argument by default.",
+ )
+ group.add_argument(
+ "--background",
+ "-b",
+ action="store_true",
+ help="Do not pass the --foreground argument by default on Mac.",
+ )
+ group.add_argument(
+ "--noprofile",
+ "-n",
+ action="store_true",
+ help="Do not pass the --profile argument by default.",
+ )
+ group.add_argument(
+ "--disable-e10s",
+ action="store_true",
+ help="Run the program with electrolysis disabled.",
+ )
+ group.add_argument(
+ "--enable-crash-reporter",
+ action="store_true",
+ help="Run the program with the crash reporter enabled.",
+ )
+ group.add_argument(
+ "--disable-fission",
+ action="store_true",
+ help="Run the program with Fission (site isolation) disabled.",
+ )
+ group.add_argument(
+ "--setpref",
+ action="append",
+ default=[],
+ help="Set the specified pref before starting the program. Can be set "
+ "multiple times. Prefs can also be set in ~/.mozbuild/machrc in the "
+ "[runprefs] section - see `./mach settings` for more information.",
+ )
+ group.add_argument(
+ "--temp-profile",
+ action="store_true",
+ help="Run the program using a new temporary profile created inside "
+ "the objdir.",
+ )
+ group.add_argument(
+ "--macos-open",
+ action="store_true",
+ help="On macOS, run the program using the open(1) command. Per open(1), "
+ "the browser is launched \"just as if you had double-clicked the file's "
+ 'icon". The browser can not be launched under a debugger with this '
+ "option.",
+ )
+
+ group = parser.add_argument_group("debugging")
+ group.add_argument(
+ "--debug",
+ action="store_true",
+ help="Enable the debugger. Not specifying a --debugger option will result "
+ "in the default debugger being used.",
+ )
+ group.add_argument(
+ "--debugger", default=None, type=str, help="Name of debugger to use."
+ )
+ group.add_argument(
+ "--debugger-args",
+ default=None,
+ metavar="params",
+ type=str,
+ help="Command-line arguments to pass to the debugger itself; "
+ "split as the Bourne shell would.",
+ )
+ group.add_argument(
+ "--debugparams",
+ action=StoreDebugParamsAndWarnAction,
+ default=None,
+ type=str,
+ dest="debugger_args",
+ help=argparse.SUPPRESS,
+ )
+
+ group = parser.add_argument_group("DMD")
+ group.add_argument(
+ "--dmd",
+ action="store_true",
+ help="Enable DMD. The following arguments have no effect without this.",
+ )
+ group.add_argument(
+ "--mode",
+ choices=["live", "dark-matter", "cumulative", "scan"],
+ help="Profiling mode. The default is 'dark-matter'.",
+ )
+ group.add_argument(
+ "--stacks",
+ choices=["partial", "full"],
+ help="Allocation stack trace coverage. The default is 'partial'.",
+ )
+ group.add_argument(
+ "--show-dump-stats", action="store_true", help="Show stats when doing dumps."
+ )
+
+ return parser
+
+
+def setup_run_parser():
+ build = MozbuildObject.from_environment(cwd=here)
+ if conditions.is_android(build):
+ return _get_android_run_parser()
+ if conditions.is_jsshell(build):
+ return _get_jsshell_run_parser()
+ return _get_desktop_run_parser()
+
+
+@Command(
+ "run",
+ category="post-build",
+ conditions=[conditions.has_build_or_shell],
+ parser=setup_run_parser,
+ description="Run the compiled program, possibly under a debugger or DMD.",
+)
+def run(command_context, **kwargs):
+ """Run the compiled program."""
+ if conditions.is_android(command_context):
+ return _run_android(command_context, **kwargs)
+ if conditions.is_jsshell(command_context):
+ return _run_jsshell(command_context, **kwargs)
+ return _run_desktop(command_context, **kwargs)
+
+
+def _run_android(
+ command_context,
+ app="org.mozilla.geckoview_example",
+ intent=None,
+ env=[],
+ profile=None,
+ url=None,
+ aab=False,
+ no_install=None,
+ no_wait=None,
+ fail_if_running=None,
+ restart=None,
+ enable_fission=False,
+ debug=False,
+ debugger=None,
+ debugger_args=None,
+ no_attach=False,
+ use_existing_process=False,
+):
+ from mozrunner.devices.android_device import (
+ InstallIntent,
+ _get_device,
+ verify_android_device,
+ )
+ from six.moves import shlex_quote
+
+ if app == "org.mozilla.geckoview_example":
+ activity_name = "org.mozilla.geckoview_example.GeckoViewActivity"
+ elif app == "org.mozilla.geckoview.test_runner":
+ activity_name = "org.mozilla.geckoview.test_runner.TestRunnerActivity"
+ elif "fennec" in app or "firefox" in app:
+ activity_name = "org.mozilla.gecko.BrowserApp"
+ else:
+ raise RuntimeError("Application not recognized: {}".format(app))
+
+ # If we want to debug an existing process, we implicitly do not want
+ # to kill it and pave over its installation with a new one.
+ if debug and use_existing_process:
+ no_install = True
+
+ # `verify_android_device` respects `DEVICE_SERIAL` if it is set and sets it otherwise.
+ verify_android_device(
+ command_context,
+ app=app,
+ aab=aab,
+ debugger=debug,
+ install=InstallIntent.NO if no_install else InstallIntent.YES,
+ )
+ device_serial = os.environ.get("DEVICE_SERIAL")
+ if not device_serial:
+ print("No ADB devices connected.")
+ return 1
+
+ device = _get_device(command_context.substs, device_serial=device_serial)
+
+ if debug:
+ # This will terminate any existing processes, so we skip it when we
+ # want to attach to an existing one.
+ if not use_existing_process:
+ command_context.log(
+ logging.INFO,
+ "run",
+ {"app": app},
+ "Setting {app} as the device debug app",
+ )
+ device.shell("am set-debug-app -w --persistent %s" % app)
+ else:
+ # Make sure that the app doesn't block waiting for jdb
+ device.shell("am clear-debug-app")
+
+ if not debug or not use_existing_process:
+ args = []
+ if profile:
+ if os.path.isdir(profile):
+ host_profile = profile
+ # Always /data/local/tmp, rather than `device.test_root`, because
+ # GeckoView only takes its configuration file from /data/local/tmp,
+ # and we want to follow suit.
+ target_profile = "/data/local/tmp/{}-profile".format(app)
+ device.rm(target_profile, recursive=True, force=True)
+ device.push(host_profile, target_profile)
+ command_context.log(
+ logging.INFO,
+ "run",
+ {
+ "host_profile": host_profile,
+ "target_profile": target_profile,
+ },
+ 'Pushed profile from host "{host_profile}" to '
+ 'target "{target_profile}"',
+ )
+ else:
+ target_profile = profile
+ command_context.log(
+ logging.INFO,
+ "run",
+ {"target_profile": target_profile},
+ 'Using profile from target "{target_profile}"',
+ )
+
+ args = ["--profile", shlex_quote(target_profile)]
+
+ # FIXME: When android switches to using Fission by default,
+ # MOZ_FORCE_DISABLE_FISSION will need to be configured correctly.
+ if enable_fission:
+ env.append("MOZ_FORCE_ENABLE_FISSION=1")
+
+ extras = {}
+ for i, e in enumerate(env):
+ extras["env{}".format(i)] = e
+ if args:
+ extras["args"] = " ".join(args)
+
+ if env or args:
+ restart = True
+
+ if restart:
+ fail_if_running = False
+ command_context.log(
+ logging.INFO,
+ "run",
+ {"app": app},
+ "Stopping {app} to ensure clean restart.",
+ )
+ device.stop_application(app)
+
+ # We'd prefer to log the actual `am start ...` command, but it's not trivial
+ # to wire the device's logger to mach's logger.
+ command_context.log(
+ logging.INFO,
+ "run",
+ {"app": app, "activity_name": activity_name},
+ "Starting {app}/{activity_name}.",
+ )
+
+ device.launch_application(
+ app_name=app,
+ activity_name=activity_name,
+ intent=intent,
+ extras=extras,
+ url=url,
+ wait=not no_wait,
+ fail_if_running=fail_if_running,
+ )
+
+ if not debug:
+ return 0
+
+ from mozrunner.devices.android_device import run_lldb_server
+
+ socket_file = run_lldb_server(app, command_context.substs, device_serial)
+ if not socket_file:
+ command_context.log(
+ logging.ERROR,
+ "run",
+ {"msg": "Failed to obtain a socket file!"},
+ "{msg}",
+ )
+ return 1
+
+ # Give lldb-server a chance to start
+ command_context.log(
+ logging.INFO,
+ "run",
+ {"msg": "Pausing to ensure lldb-server has started..."},
+ "{msg}",
+ )
+ time.sleep(1)
+
+ if use_existing_process:
+
+ def _is_geckoview_process(proc_name, pkg_name):
+ if not proc_name.startswith(pkg_name):
+ # Definitely not our package
+ return False
+ if len(proc_name) == len(pkg_name):
+ # Parent process from our package
+ return True
+ if proc_name[len(pkg_name)] == ":":
+ # Child process from our package
+ return True
+ # Process name is a prefix of our package name
+ return False
+
+ # If we're going to attach to an existing process, we need to know
+ # who we're attaching to. Obtain a list of all processes associated
+ # with our desired app.
+ proc_list = [
+ proc[:-1]
+ for proc in device.get_process_list()
+ if _is_geckoview_process(proc[1], app)
+ ]
+
+ if not proc_list:
+ command_context.log(
+ logging.ERROR,
+ "run",
+ {"app": app},
+ "No existing {app} processes found",
+ )
+ return 1
+ elif len(proc_list) == 1:
+ pid = proc_list[0][0]
+ else:
+ # Prompt the user to determine which process we should use
+ entries = [
+ "%2d: %6d %s" % (n, p[0], p[1])
+ for n, p in enumerate(proc_list, start=1)
+ ]
+ prompt = "\n".join(["\nPlease select a process:\n"] + entries) + "\n\n"
+ valid_range = range(1, len(proc_list) + 1)
+
+ while True:
+ response = int(input(prompt).strip())
+ if response in valid_range:
+ break
+ command_context.log(
+ logging.ERROR, "run", {"msg": "Invalid response"}, "{msg}"
+ )
+ pid = proc_list[response - 1][0]
+ else:
+ # We're not using an existing process, so there should only be our
+ # parent process at this time.
+ pids = device.pidof(app_name=app)
+ if len(pids) != 1:
+ command_context.log(
+ logging.ERROR,
+ "run",
+ {"msg": "Not sure which pid to attach to!"},
+ "{msg}",
+ )
+ return 1
+ pid = pids[0]
+
+ command_context.log(
+ logging.INFO, "run", {"pid": str(pid)}, "Debuggee pid set to {pid}..."
+ )
+
+ lldb_connect_url = "unix-abstract-connect://" + socket_file
+ local_jdb_port = device.forward("tcp:0", "jdwp:%d" % pid)
+
+ if no_attach:
+ command_context.log(
+ logging.INFO,
+ "run",
+ {"pid": str(pid), "url": lldb_connect_url},
+ "To debug native code, connect lldb to {url} and attach to pid {pid}",
+ )
+ command_context.log(
+ logging.INFO,
+ "run",
+ {"port": str(local_jdb_port)},
+ "To debug Java code, connect jdb using tcp to localhost:{port}",
+ )
+ return 0
+
+ # Beyond this point we want to be able to automatically clean up after ourselves,
+ # so we enter the following try block.
+ try:
+ command_context.log(
+ logging.INFO, "run", {"msg": "Starting debugger..."}, "{msg}"
+ )
+
+ if not use_existing_process:
+ # The app is waiting for jdb to attach and will not continue running
+ # until we do so.
+ def _jdb_ping(local_jdb_port):
+ jdb_process = subprocess.Popen(
+ ["jdb", "-attach", "localhost:%d" % local_jdb_port],
+ stdin=subprocess.PIPE,
+ stdout=subprocess.DEVNULL,
+ stderr=subprocess.DEVNULL,
+ encoding="utf-8",
+ )
+ # Wait a bit to provide enough time for jdb and lldb to connect
+ # to the debuggee
+ time.sleep(5)
+ # NOTE: jdb cannot detach while the debuggee is frozen in lldb,
+ # so its process might not necessarily exit immediately once the
+ # quit command has been issued.
+ jdb_process.communicate(input="quit\n")
+
+ # We run this in the background while lldb attaches in the foreground
+ from threading import Thread
+
+ jdb_thread = Thread(target=_jdb_ping, args=[local_jdb_port])
+ jdb_thread.start()
+
+ LLDBINIT = """
+settings set target.inline-breakpoint-strategy always
+settings append target.exec-search-paths {obj_xul}
+settings append target.exec-search-paths {obj_mozglue}
+settings append target.exec-search-paths {obj_nss}
+platform select remote-android
+platform connect {connect_url}
+process attach {continue_flag}-p {pid!s}
+""".lstrip()
+
+ obj_xul = os.path.join(command_context.topobjdir, "toolkit", "library", "build")
+ obj_mozglue = os.path.join(command_context.topobjdir, "mozglue", "build")
+ obj_nss = os.path.join(command_context.topobjdir, "security")
+
+ if use_existing_process:
+ continue_flag = ""
+ else:
+ # Tell lldb to continue after attaching; instead we'll break at
+ # the initial SEGVHandler, similarly to how things work when we
+ # attach using Android Studio. Doing this gives Android a chance
+ # to dismiss the "Waiting for Debugger" dialog.
+ continue_flag = "-c "
+
+ try:
+ # Write out our lldb startup commands to a temp file. We'll pass its
+ # name to lldb on its command line.
+ with tempfile.NamedTemporaryFile(
+ mode="wt", encoding="utf-8", newline="\n", delete=False
+ ) as tmp:
+ tmp_lldb_start_script = tmp.name
+ tmp.write(
+ LLDBINIT.format(
+ obj_xul=obj_xul,
+ obj_mozglue=obj_mozglue,
+ obj_nss=obj_nss,
+ connect_url=lldb_connect_url,
+ continue_flag=continue_flag,
+ pid=pid,
+ )
+ )
+
+ our_debugger_args = "-s %s" % tmp_lldb_start_script
+ if debugger_args:
+ full_debugger_args = " ".join([debugger_args, our_debugger_args])
+ else:
+ full_debugger_args = our_debugger_args
+
+ args = _prepend_debugger_args([], debugger, full_debugger_args)
+ if not args:
+ return 1
+
+ return command_context.run_process(
+ args=args, ensure_exit_code=False, pass_thru=True
+ )
+ finally:
+ os.remove(tmp_lldb_start_script)
+ finally:
+ device.remove_forwards("tcp:%d" % local_jdb_port)
+ device.shell("pkill -f lldb-server", enable_run_as=True)
+ if not use_existing_process:
+ device.shell("am clear-debug-app")
+
+
+def _run_jsshell(command_context, params, debug, debugger, debugger_args):
+ try:
+ binpath = command_context.get_binary_path("app")
+ except BinaryNotFoundException as e:
+ command_context.log(logging.ERROR, "run", {"error": str(e)}, "ERROR: {error}")
+ command_context.log(logging.INFO, "run", {"help": e.help()}, "{help}")
+ return 1
+
+ args = [binpath]
+
+ if params:
+ args.extend(params)
+
+ extra_env = {"RUST_BACKTRACE": "full"}
+
+ if debug or debugger or debugger_args:
+ if "INSIDE_EMACS" in os.environ:
+ command_context.log_manager.terminal_handler.setLevel(logging.WARNING)
+
+ import mozdebug
+
+ if not debugger:
+ # No debugger name was provided. Look for the default ones on
+ # current OS.
+ debugger = mozdebug.get_default_debugger_name(
+ mozdebug.DebuggerSearch.KeepLooking
+ )
+
+ if debugger:
+ debuggerInfo = mozdebug.get_debugger_info(debugger, debugger_args)
+
+ if not debugger or not debuggerInfo:
+ print("Could not find a suitable debugger in your PATH.")
+ return 1
+
+ # Prepend the debugger args.
+ args = [debuggerInfo.path] + debuggerInfo.args + args
+
+ return command_context.run_process(
+ args=args, ensure_exit_code=False, pass_thru=True, append_env=extra_env
+ )
+
+
+def _run_desktop(
+ command_context,
+ params,
+ packaged,
+ app,
+ remote,
+ background,
+ noprofile,
+ disable_e10s,
+ enable_crash_reporter,
+ disable_fission,
+ setpref,
+ temp_profile,
+ macos_open,
+ debug,
+ debugger,
+ debugger_args,
+ dmd,
+ mode,
+ stacks,
+ show_dump_stats,
+):
+ from mozprofile import Preferences, Profile
+
+ try:
+ if packaged:
+ binpath = command_context.get_binary_path(where="staged-package")
+ else:
+ binpath = app or command_context.get_binary_path("app")
+ except BinaryNotFoundException as e:
+ command_context.log(logging.ERROR, "run", {"error": str(e)}, "ERROR: {error}")
+ if packaged:
+ command_context.log(
+ logging.INFO,
+ "run",
+ {
+ "help": "It looks like your build isn't packaged. "
+ "You can run |./mach package| to package it."
+ },
+ "{help}",
+ )
+ else:
+ command_context.log(logging.INFO, "run", {"help": e.help()}, "{help}")
+ return 1
+
+ args = []
+ if macos_open:
+ if debug:
+ print(
+ "The browser can not be launched in the debugger "
+ "when using the macOS open command."
+ )
+ return 1
+ try:
+ m = re.search(r"^.+\.app", binpath)
+ apppath = m.group(0)
+ args = ["open", apppath, "--args"]
+ except Exception as e:
+ print(
+ "Couldn't get the .app path from the binary path. "
+ "The macOS open option can only be used on macOS"
+ )
+ print(e)
+ return 1
+ else:
+ args = [binpath]
+
+ if params:
+ args.extend(params)
+
+ if not remote:
+ args.append("-no-remote")
+
+ if not background and sys.platform == "darwin":
+ args.append("-foreground")
+
+ if (
+ sys.platform.startswith("win")
+ and "MOZ_LAUNCHER_PROCESS" in command_context.defines
+ ):
+ args.append("-wait-for-browser")
+
+ no_profile_option_given = all(
+ p not in params for p in ["-profile", "--profile", "-P"]
+ )
+ no_backgroundtask_mode_option_given = all(
+ p not in params for p in ["-backgroundtask", "--backgroundtask"]
+ )
+ if (
+ no_profile_option_given
+ and no_backgroundtask_mode_option_given
+ and not noprofile
+ ):
+ prefs = {
+ "browser.aboutConfig.showWarning": False,
+ "browser.shell.checkDefaultBrowser": False,
+ "general.warnOnAboutConfig": False,
+ }
+ prefs.update(command_context._mach_context.settings.runprefs)
+ prefs.update([p.split("=", 1) for p in setpref])
+ for pref in prefs:
+ prefs[pref] = Preferences.cast(prefs[pref])
+
+ tmpdir = os.path.join(command_context.topobjdir, "tmp")
+ if not os.path.exists(tmpdir):
+ os.makedirs(tmpdir)
+
+ if temp_profile:
+ path = tempfile.mkdtemp(dir=tmpdir, prefix="profile-")
+ else:
+ path = os.path.join(tmpdir, "profile-default")
+
+ profile = Profile(path, preferences=prefs)
+ args.append("-profile")
+ args.append(profile.profile)
+
+ if not no_profile_option_given and setpref:
+ print("setpref is only supported if a profile is not specified")
+ return 1
+
+ some_debugging_option = debug or debugger or debugger_args
+
+ # By default, because Firefox is a GUI app, on Windows it will not
+ # 'create' a console to which stdout/stderr is printed. This means
+ # printf/dump debugging is invisible. We default to adding the
+ # -attach-console argument to fix this. We avoid this if we're launched
+ # under a debugger (which can do its own picking up of stdout/stderr).
+ # We also check for both the -console and -attach-console flags:
+ # -console causes Firefox to create a separate window;
+ # -attach-console just ends us up with output that gets relayed via mach.
+ # We shouldn't override the user using -console. For more info, see
+ # https://bugzilla.mozilla.org/show_bug.cgi?id=1257155
+ if (
+ sys.platform.startswith("win")
+ and not some_debugging_option
+ and "-console" not in args
+ and "--console" not in args
+ and "-attach-console" not in args
+ and "--attach-console" not in args
+ ):
+ args.append("-attach-console")
+
+ extra_env = {
+ "MOZ_DEVELOPER_REPO_DIR": command_context.topsrcdir,
+ "MOZ_DEVELOPER_OBJ_DIR": command_context.topobjdir,
+ "RUST_BACKTRACE": "full",
+ }
+
+ if not enable_crash_reporter:
+ extra_env["MOZ_CRASHREPORTER_DISABLE"] = "1"
+ else:
+ extra_env["MOZ_CRASHREPORTER"] = "1"
+
+ if disable_e10s:
+ version_file = os.path.join(
+ command_context.topsrcdir, "browser", "config", "version.txt"
+ )
+ f = open(version_file, "r")
+ extra_env["MOZ_FORCE_DISABLE_E10S"] = f.read().strip()
+
+ if disable_fission:
+ extra_env["MOZ_FORCE_DISABLE_FISSION"] = "1"
+
+ if some_debugging_option:
+ if "INSIDE_EMACS" in os.environ:
+ command_context.log_manager.terminal_handler.setLevel(logging.WARNING)
+
+ import mozdebug
+
+ if not debugger:
+ # No debugger name was provided. Look for the default ones on
+ # current OS.
+ debugger = mozdebug.get_default_debugger_name(
+ mozdebug.DebuggerSearch.KeepLooking
+ )
+
+ if debugger:
+ debuggerInfo = mozdebug.get_debugger_info(debugger, debugger_args)
+
+ if not debugger or not debuggerInfo:
+ print("Could not find a suitable debugger in your PATH.")
+ return 1
+
+ # Parameters come from the CLI. We need to convert them before
+ # their use.
+ if debugger_args:
+ from mozbuild import shellutil
+
+ try:
+ debugger_args = shellutil.split(debugger_args)
+ except shellutil.MetaCharacterException as e:
+ print(
+ "The --debugger-args you passed require a real shell to parse them."
+ )
+ print("(We can't handle the %r character.)" % e.char)
+ return 1
+
+ # Prepend the debugger args.
+ args = [debuggerInfo.path] + debuggerInfo.args + args
+
+ if dmd:
+ dmd_params = []
+
+ if mode:
+ dmd_params.append("--mode=" + mode)
+ if stacks:
+ dmd_params.append("--stacks=" + stacks)
+ if show_dump_stats:
+ dmd_params.append("--show-dump-stats=yes")
+
+ if dmd_params:
+ extra_env["DMD"] = " ".join(dmd_params)
+ else:
+ extra_env["DMD"] = "1"
+
+ return command_context.run_process(
+ args=args, ensure_exit_code=False, pass_thru=True, append_env=extra_env
+ )
+
+
+@Command(
+ "buildsymbols",
+ category="post-build",
+ description="Produce a package of Breakpad-format symbols.",
+)
+def buildsymbols(command_context):
+ """Produce a package of debug symbols suitable for use with Breakpad."""
+ return command_context._run_make(
+ directory=".", target="buildsymbols", ensure_exit_code=False
+ )
+
+
+@Command(
+ "environment",
+ category="build-dev",
+ description="Show info about the mach and build environment.",
+)
+@CommandArgument(
+ "--format",
+ default="pretty",
+ choices=["pretty", "json"],
+ help="Print data in the given format.",
+)
+@CommandArgument("--output", "-o", type=str, help="Output to the given file.")
+@CommandArgument("--verbose", "-v", action="store_true", help="Print verbose output.")
+def environment(command_context, format, output=None, verbose=False):
+ func = {"pretty": _environment_pretty, "json": _environment_json}[
+ format.replace(".", "_")
+ ]
+
+ if output:
+ # We want to preserve mtimes if the output file already exists
+ # and the content hasn't changed.
+ from mozbuild.util import FileAvoidWrite
+
+ with FileAvoidWrite(output) as out:
+ return func(command_context, out, verbose)
+ return func(command_context, sys.stdout, verbose)
+
+
+def _environment_pretty(command_context, out, verbose):
+ state_dir = command_context._mach_context.state_dir
+
+ print("platform:\n\t%s" % platform.platform(), file=out)
+ print("python version:\n\t%s" % sys.version, file=out)
+ print("python prefix:\n\t%s" % sys.prefix, file=out)
+ print("mach cwd:\n\t%s" % command_context._mach_context.cwd, file=out)
+ print("os cwd:\n\t%s" % os.getcwd(), file=out)
+ print("mach directory:\n\t%s" % command_context._mach_context.topdir, file=out)
+ print("state directory:\n\t%s" % state_dir, file=out)
+
+ print("object directory:\n\t%s" % command_context.topobjdir, file=out)
+
+ if command_context.mozconfig["path"]:
+ print("mozconfig path:\n\t%s" % command_context.mozconfig["path"], file=out)
+ if command_context.mozconfig["configure_args"]:
+ print("mozconfig configure args:", file=out)
+ for arg in command_context.mozconfig["configure_args"]:
+ print("\t%s" % arg, file=out)
+
+ if command_context.mozconfig["make_extra"]:
+ print("mozconfig extra make args:", file=out)
+ for arg in command_context.mozconfig["make_extra"]:
+ print("\t%s" % arg, file=out)
+
+ if command_context.mozconfig["make_flags"]:
+ print("mozconfig make flags:", file=out)
+ for arg in command_context.mozconfig["make_flags"]:
+ print("\t%s" % arg, file=out)
+
+ config = None
+
+ try:
+ config = command_context.config_environment
+
+ except Exception:
+ pass
+
+ if config:
+ print("config topsrcdir:\n\t%s" % config.topsrcdir, file=out)
+ print("config topobjdir:\n\t%s" % config.topobjdir, file=out)
+
+ if verbose:
+ print("config substitutions:", file=out)
+ for k in sorted(config.substs):
+ print("\t%s: %s" % (k, config.substs[k]), file=out)
+
+ print("config defines:", file=out)
+ for k in sorted(config.defines):
+ print("\t%s" % k, file=out)
+
+
+def _environment_json(command_context, out, verbose):
+ import json
+
+ class EnvironmentEncoder(json.JSONEncoder):
+ def default(self, obj):
+ if isinstance(obj, MozbuildObject):
+ result = {
+ "topsrcdir": obj.topsrcdir,
+ "topobjdir": obj.topobjdir,
+ "mozconfig": obj.mozconfig,
+ }
+ if verbose:
+ result["substs"] = obj.substs
+ result["defines"] = obj.defines
+ return result
+ elif isinstance(obj, set):
+ return list(obj)
+ return json.JSONEncoder.default(self, obj)
+
+ json.dump(command_context, cls=EnvironmentEncoder, sort_keys=True, fp=out)
+
+
+@Command(
+ "repackage",
+ category="misc",
+ description="Repackage artifacts into different formats.",
+)
+def repackage(command_context):
+ """Repackages artifacts into different formats.
+
+ This is generally used after packages are signed by the signing
+ scriptworkers in order to bundle things up into shippable formats, such as a
+ .dmg on OSX or an installer exe on Windows.
+ """
+ print("Usage: ./mach repackage [dmg|pkg|installer|mar] [args...]")
+
+
+@SubCommand(
+ "repackage",
+ "deb",
+ description="Repackage a tar file into a .deb for Linux",
+ virtualenv_name="repackage-deb",
+)
+@CommandArgument(
+ "--input", "-i", type=str, required=True, help="Input tarfile filename"
+)
+@CommandArgument("--output", "-o", type=str, required=True, help="Output .deb filename")
+@CommandArgument("--arch", type=str, required=True, help="One of ['x86', 'x86_64']")
+@CommandArgument(
+ "--version",
+ type=str,
+ required=True,
+ help="The Firefox version used to create the installer",
+)
+@CommandArgument(
+ "--build-number",
+ type=str,
+ required=True,
+ help="The release's build number",
+)
+@CommandArgument(
+ "--templates",
+ type=str,
+ required=True,
+ help="Location of the templates used to generate the debian/ directory files",
+)
+@CommandArgument(
+ "--release-product",
+ type=str,
+ required=True,
+ help="The product being shipped. Used to disambiguate beta/devedition etc.",
+)
+@CommandArgument(
+ "--release-type",
+ type=str,
+ required=True,
+ help="The release being shipped. Used to disambiguate nightly/try etc.",
+)
+def repackage_deb(
+ command_context,
+ input,
+ output,
+ arch,
+ version,
+ build_number,
+ templates,
+ release_product,
+ release_type,
+):
+ if not os.path.exists(input):
+ print("Input file does not exist: %s" % input)
+ return 1
+
+ template_dir = os.path.join(
+ command_context.topsrcdir,
+ templates,
+ )
+
+ from fluent.runtime.fallback import FluentLocalization, FluentResourceLoader
+
+ from mozbuild.repackaging.deb import repackage_deb
+
+ repackage_deb(
+ command_context.log,
+ input,
+ output,
+ template_dir,
+ arch,
+ version,
+ build_number,
+ release_product,
+ release_type,
+ FluentLocalization,
+ FluentResourceLoader,
+ )
+
+
+@SubCommand(
+ "repackage",
+ "deb-l10n",
+ description="Repackage a .xpi langpack file into a .deb for Linux",
+)
+@CommandArgument(
+ "--input-xpi-file", type=str, required=True, help="Path to the XPI file"
+)
+@CommandArgument(
+ "--input-tar-file",
+ type=str,
+ required=True,
+ help="Path to tar archive that contains application.ini",
+)
+@CommandArgument(
+ "--version",
+ type=str,
+ required=True,
+ help="The Firefox version used to create the installer",
+)
+@CommandArgument(
+ "--build-number",
+ type=str,
+ required=True,
+ help="The release's build number",
+)
+@CommandArgument("--output", "-o", type=str, required=True, help="Output filename")
+@CommandArgument(
+ "--templates",
+ type=str,
+ required=True,
+ help="Location of the templates used to generate the debian/ directory files",
+)
+def repackage_deb_l10n(
+ command_context,
+ input_xpi_file,
+ input_tar_file,
+ output,
+ version,
+ build_number,
+ templates,
+):
+ for input_file in (input_xpi_file, input_tar_file):
+ if not os.path.exists(input_file):
+ print("Input file does not exist: %s" % input_file)
+ return 1
+
+ template_dir = os.path.join(
+ command_context.topsrcdir,
+ templates,
+ )
+
+ from mozbuild.repackaging.deb import repackage_deb_l10n
+
+ repackage_deb_l10n(
+ input_xpi_file, input_tar_file, output, template_dir, version, build_number
+ )
+
+
+@SubCommand("repackage", "dmg", description="Repackage a tar file into a .dmg for OSX")
+@CommandArgument("--input", "-i", type=str, required=True, help="Input filename")
+@CommandArgument("--output", "-o", type=str, required=True, help="Output filename")
+def repackage_dmg(command_context, input, output):
+ if not os.path.exists(input):
+ print("Input file does not exist: %s" % input)
+ return 1
+
+ from mozbuild.repackaging.dmg import repackage_dmg
+
+ repackage_dmg(input, output)
+
+
+@SubCommand("repackage", "pkg", description="Repackage a tar file into a .pkg for OSX")
+@CommandArgument("--input", "-i", type=str, required=True, help="Input filename")
+@CommandArgument("--output", "-o", type=str, required=True, help="Output filename")
+def repackage_pkg(command_context, input, output):
+ if not os.path.exists(input):
+ print("Input file does not exist: %s" % input)
+ return 1
+
+ from mozbuild.repackaging.pkg import repackage_pkg
+
+ repackage_pkg(input, output)
+
+
+@SubCommand(
+ "repackage", "installer", description="Repackage into a Windows installer exe"
+)
+@CommandArgument(
+ "--tag",
+ type=str,
+ required=True,
+ help="The .tag file used to build the installer",
+)
+@CommandArgument(
+ "--setupexe",
+ type=str,
+ required=True,
+ help="setup.exe file inside the installer",
+)
+@CommandArgument(
+ "--package",
+ type=str,
+ required=False,
+ help="Optional package .zip for building a full installer",
+)
+@CommandArgument("--output", "-o", type=str, required=True, help="Output filename")
+@CommandArgument(
+ "--package-name",
+ type=str,
+ required=False,
+ help="Name of the package being rebuilt",
+)
+@CommandArgument(
+ "--sfx-stub", type=str, required=True, help="Path to the self-extraction stub."
+)
+@CommandArgument(
+ "--use-upx",
+ required=False,
+ action="store_true",
+ help="Run UPX on the self-extraction stub.",
+)
+def repackage_installer(
+ command_context,
+ tag,
+ setupexe,
+ package,
+ output,
+ package_name,
+ sfx_stub,
+ use_upx,
+):
+ from mozbuild.repackaging.installer import repackage_installer
+
+ repackage_installer(
+ topsrcdir=command_context.topsrcdir,
+ tag=tag,
+ setupexe=setupexe,
+ package=package,
+ output=output,
+ package_name=package_name,
+ sfx_stub=sfx_stub,
+ use_upx=use_upx,
+ )
+
+
+@SubCommand("repackage", "msi", description="Repackage into a MSI")
+@CommandArgument(
+ "--wsx",
+ type=str,
+ required=True,
+ help="The wsx file used to build the installer",
+)
+@CommandArgument(
+ "--version",
+ type=str,
+ required=True,
+ help="The Firefox version used to create the installer",
+)
+@CommandArgument(
+ "--locale", type=str, required=True, help="The locale of the installer"
+)
+@CommandArgument(
+ "--arch", type=str, required=True, help="The architecture you are building."
+)
+@CommandArgument("--setupexe", type=str, required=True, help="setup.exe installer")
+@CommandArgument("--candle", type=str, required=False, help="location of candle binary")
+@CommandArgument("--light", type=str, required=False, help="location of light binary")
+@CommandArgument("--output", "-o", type=str, required=True, help="Output filename")
+def repackage_msi(
+ command_context,
+ wsx,
+ version,
+ locale,
+ arch,
+ setupexe,
+ candle,
+ light,
+ output,
+):
+ from mozbuild.repackaging.msi import repackage_msi
+
+ repackage_msi(
+ topsrcdir=command_context.topsrcdir,
+ wsx=wsx,
+ version=version,
+ locale=locale,
+ arch=arch,
+ setupexe=setupexe,
+ candle=candle,
+ light=light,
+ output=output,
+ )
+
+
+@SubCommand("repackage", "msix", description="Repackage into an MSIX")
+@CommandArgument(
+ "--input",
+ type=str,
+ help="Package (ZIP) or directory to repackage. Defaults to $OBJDIR/dist/bin",
+)
+@CommandArgument(
+ "--version",
+ type=str,
+ help="The Firefox version used to create the package "
+ "(Default: generated from package 'application.ini')",
+)
+@CommandArgument(
+ "--channel",
+ type=str,
+ choices=["official", "beta", "aurora", "nightly", "unofficial"],
+ help="Release channel.",
+)
+@CommandArgument(
+ "--distribution-dir",
+ metavar="DISTRIBUTION",
+ nargs="*",
+ dest="distribution_dirs",
+ default=[],
+ help="List of distribution directories to include.",
+)
+@CommandArgument(
+ "--arch",
+ type=str,
+ choices=["x86", "x86_64", "aarch64"],
+ help="The architecture you are building.",
+)
+@CommandArgument(
+ "--vendor",
+ type=str,
+ default="Mozilla",
+ required=False,
+ help="The vendor to use in the Package/Identity/Name string to use in the App Manifest."
+ + " Defaults to 'Mozilla'.",
+)
+@CommandArgument(
+ "--identity-name",
+ type=str,
+ default=None,
+ required=False,
+ help="The Package/Identity/Name string to use in the App Manifest."
+ + " Defaults to '<vendor>.Firefox', '<vendor>.FirefoxBeta', etc.",
+)
+@CommandArgument(
+ "--publisher",
+ type=str,
+ # This default is baked into enough places under `browser/` that we need
+ # not extract a constant.
+ default="CN=Mozilla Corporation, OU=MSIX Packaging",
+ required=False,
+ help="The Package/Identity/Publisher string to use in the App Manifest."
+ + " It must match the subject on the certificate used for signing.",
+)
+@CommandArgument(
+ "--publisher-display-name",
+ type=str,
+ default="Mozilla Corporation",
+ required=False,
+ help="The Package/Properties/PublisherDisplayName string to use in the App Manifest. "
+ + " Defaults to 'Mozilla Corporation'.",
+)
+@CommandArgument(
+ "--makeappx",
+ type=str,
+ default=None,
+ help="makeappx/makemsix binary name (required if you haven't run configure)",
+)
+@CommandArgument(
+ "--verbose",
+ default=False,
+ action="store_true",
+ help="Be verbose. (Default: false)",
+)
+@CommandArgument(
+ "--output", "-o", type=str, help="Output filename (Default: auto-generated)"
+)
+@CommandArgument(
+ "--sign",
+ default=False,
+ action="store_true",
+ help="Sign repackaged MSIX with self-signed certificate for local testing. "
+ "(Default: false)",
+)
+def repackage_msix(
+ command_context,
+ input,
+ version=None,
+ channel=None,
+ distribution_dirs=[],
+ arch=None,
+ identity_name=None,
+ vendor=None,
+ publisher=None,
+ publisher_display_name=None,
+ verbose=False,
+ output=None,
+ makeappx=None,
+ sign=False,
+):
+ from mozbuild.repackaging.msix import repackage_msix
+
+ command_context._set_log_level(verbose)
+
+ firefox_to_msix_channel = {
+ "release": "official",
+ "beta": "beta",
+ "aurora": "aurora",
+ "nightly": "nightly",
+ }
+
+ if not input:
+ if os.path.exists(command_context.bindir):
+ input = command_context.bindir
+ else:
+ command_context.log(
+ logging.ERROR,
+ "repackage-msix-no-input",
+ {},
+ "No build found in objdir, please run ./mach build or pass --input",
+ )
+ return 1
+
+ if not os.path.exists(input):
+ command_context.log(
+ logging.ERROR,
+ "repackage-msix-invalid-input",
+ {"input": input},
+ "Input file or directory for msix repackaging does not exist: {input}",
+ )
+ return 1
+
+ if not channel:
+ # Only try to guess the channel when this is clearly a local build.
+ if input.endswith("bin"):
+ channel = firefox_to_msix_channel.get(
+ command_context.defines.get("MOZ_UPDATE_CHANNEL"), "unofficial"
+ )
+ else:
+ command_context.log(
+ logging.ERROR,
+ "repackage-msix-invalid-channel",
+ {},
+ "Could not determine channel, please set --channel",
+ )
+ return 1
+
+ if not arch:
+ # Only try to guess the arch when this is clearly a local build.
+ if input.endswith("bin"):
+ if command_context.substs["TARGET_CPU"] in ("i686", "x86_64", "aarch64"):
+ arch = command_context.substs["TARGET_CPU"].replace("i686", "x86")
+
+ if not arch:
+ command_context.log(
+ logging.ERROR,
+ "repackage-msix-couldnt-detect-arch",
+ {},
+ "Could not automatically detect architecture for msix repackaging. "
+ "Please pass --arch",
+ )
+ return 1
+
+ output = repackage_msix(
+ input,
+ command_context.topsrcdir,
+ channel=channel,
+ arch=arch,
+ displayname=identity_name,
+ vendor=vendor,
+ publisher=publisher,
+ publisher_display_name=publisher_display_name,
+ version=version,
+ distribution_dirs=distribution_dirs,
+ # Configure this run.
+ force=True,
+ verbose=verbose,
+ log=command_context.log,
+ output=output,
+ makeappx=makeappx,
+ )
+
+ if sign:
+ repackage_sign_msix(command_context, output, force=False, verbose=verbose)
+
+ command_context.log(
+ logging.INFO,
+ "msix",
+ {"output": output},
+ "Wrote MSIX: {output}",
+ )
+
+
+@SubCommand("repackage", "sign-msix", description="Sign an MSIX for local testing")
+@CommandArgument("--input", type=str, required=True, help="MSIX to sign.")
+@CommandArgument(
+ "--force",
+ default=False,
+ action="store_true",
+ help="Force recreating self-signed certificate. (Default: false)",
+)
+@CommandArgument(
+ "--verbose",
+ default=False,
+ action="store_true",
+ help="Be verbose. (Default: false)",
+)
+def repackage_sign_msix(command_context, input, force=False, verbose=False):
+ from mozbuild.repackaging.msix import sign_msix
+
+ command_context._set_log_level(verbose)
+
+ sign_msix(input, force=force, log=command_context.log, verbose=verbose)
+
+ return 0
+
+
+@SubCommand("repackage", "mar", description="Repackage into complete MAR file")
+@CommandArgument("--input", "-i", type=str, required=True, help="Input filename")
+@CommandArgument("--mar", type=str, required=True, help="Mar binary path")
+@CommandArgument("--output", "-o", type=str, required=True, help="Output filename")
+@CommandArgument(
+ "--arch", type=str, required=True, help="The architecture you are building."
+)
+@CommandArgument("--mar-channel-id", type=str, help="Mar channel id")
+def repackage_mar(command_context, input, mar, output, arch, mar_channel_id):
+ from mozbuild.repackaging.mar import repackage_mar
+
+ repackage_mar(
+ command_context.topsrcdir,
+ input,
+ mar,
+ output,
+ arch=arch,
+ mar_channel_id=mar_channel_id,
+ )
+
+
+@Command(
+ "package-multi-locale",
+ category="post-build",
+ description="Package a multi-locale version of the built product "
+ "for distribution as an APK, DMG, etc.",
+)
+@CommandArgument(
+ "--locales",
+ metavar="LOCALES",
+ nargs="+",
+ required=True,
+ help="List of locales to package",
+)
+@CommandArgument(
+ "--verbose", action="store_true", help="Log informative status messages."
+)
+def package_l10n(command_context, verbose=False, locales=[]):
+ if "RecursiveMake" not in command_context.substs["BUILD_BACKENDS"]:
+ print(
+ "Artifact builds do not support localization. "
+ "If you know what you are doing, you can use:\n"
+ "ac_add_options --disable-compile-environment\n"
+ "export BUILD_BACKENDS=FasterMake,RecursiveMake\n"
+ "in your mozconfig."
+ )
+ return 1
+
+ locales = sorted(locale for locale in locales if locale != "en-US")
+
+ append_env = {
+ # We are only (re-)packaging, we don't want to (re-)build
+ # anything inside Gradle.
+ "GRADLE_INVOKED_WITHIN_MACH_BUILD": "1",
+ "MOZ_CHROME_MULTILOCALE": " ".join(locales),
+ }
+
+ command_context.log(
+ logging.INFO,
+ "package-multi-locale",
+ {"locales": locales},
+ "Processing chrome Gecko resources for locales {locales}",
+ )
+ command_context._run_make(
+ directory=command_context.topobjdir,
+ target=["chrome-{}".format(locale) for locale in locales],
+ append_env=append_env,
+ pass_thru=False,
+ print_directory=False,
+ ensure_exit_code=True,
+ )
+
+ if command_context.substs["MOZ_BUILD_APP"] == "mobile/android":
+ command_context.log(
+ logging.INFO,
+ "package-multi-locale",
+ {},
+ "Invoking `mach android assemble-app`",
+ )
+ command_context.run_process(
+ [
+ mozpath.join(command_context.topsrcdir, "mach"),
+ "android",
+ "assemble-app",
+ ],
+ append_env=append_env,
+ pass_thru=True,
+ ensure_exit_code=True,
+ cwd=mozpath.join(command_context.topsrcdir),
+ )
+
+ if command_context.substs["MOZ_BUILD_APP"] == "browser":
+ command_context.log(
+ logging.INFO, "package-multi-locale", {}, "Repackaging browser"
+ )
+ command_context._run_make(
+ directory=mozpath.join(command_context.topobjdir, "browser", "app"),
+ target=["tools"],
+ append_env=append_env,
+ pass_thru=True,
+ ensure_exit_code=True,
+ )
+
+ command_context.log(
+ logging.INFO,
+ "package-multi-locale",
+ {},
+ "Invoking multi-locale `mach package`",
+ )
+ target = ["package"]
+ if command_context.substs["MOZ_BUILD_APP"] == "mobile/android":
+ target.append("AB_CD=multi")
+
+ command_context._run_make(
+ directory=command_context.topobjdir,
+ target=target,
+ append_env=append_env,
+ pass_thru=True,
+ ensure_exit_code=True,
+ )
+
+ if command_context.substs["MOZ_BUILD_APP"] == "mobile/android":
+ command_context.log(
+ logging.INFO,
+ "package-multi-locale",
+ {},
+ "Invoking `mach android archive-geckoview`",
+ )
+ command_context.run_process(
+ [
+ mozpath.join(command_context.topsrcdir, "mach"),
+ "android",
+ "archive-geckoview",
+ ],
+ append_env=append_env,
+ pass_thru=True,
+ ensure_exit_code=True,
+ cwd=mozpath.join(command_context.topsrcdir),
+ )
+
+ # This is tricky: most Android build commands will regenerate the
+ # omnijar, producing a `res/multilocale.txt` that does not contain the
+ # set of locales packaged by this command. To avoid regenerating, we
+ # set a special environment variable.
+ print(
+ "Execute `env MOZ_CHROME_MULTILOCALE='{}' ".format(
+ append_env["MOZ_CHROME_MULTILOCALE"]
+ )
+ + "mach android install-geckoview_example` "
+ + "to install the multi-locale geckoview_example and test APKs."
+ )
+
+ return 0
+
+
+def _prepend_debugger_args(args, debugger, debugger_args):
+ """
+ Given an array with program arguments, prepend arguments to run it under a
+ debugger.
+
+ :param args: The executable and arguments used to run the process normally.
+ :param debugger: The debugger to use, or empty to use the default debugger.
+ :param debugger_args: Any additional parameters to pass to the debugger.
+ """
+
+ import mozdebug
+
+ if not debugger:
+ # No debugger name was provided. Look for the default ones on
+ # current OS.
+ debugger = mozdebug.get_default_debugger_name(
+ mozdebug.DebuggerSearch.KeepLooking
+ )
+
+ if debugger:
+ debuggerInfo = mozdebug.get_debugger_info(debugger, debugger_args)
+
+ if not debugger or not debuggerInfo:
+ print("Could not find a suitable debugger in your PATH.")
+ return None
+
+ # Parameters come from the CLI. We need to convert them before
+ # their use.
+ if debugger_args:
+ from mozbuild import shellutil
+
+ try:
+ debugger_args = shellutil.split(debugger_args)
+ except shellutil.MetaCharacterException as e:
+ print("The --debugger_args you passed require a real shell to parse them.")
+ print("(We can't handle the %r character.)" % e.char)
+ return None
+
+ # Prepend the debugger args.
+ args = [debuggerInfo.path] + debuggerInfo.args + args
+ return args
diff --git a/python/mozbuild/mozbuild/makeutil.py b/python/mozbuild/mozbuild/makeutil.py
new file mode 100644
index 0000000000..76691c5fa1
--- /dev/null
+++ b/python/mozbuild/mozbuild/makeutil.py
@@ -0,0 +1,209 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import re
+from collections.abc import Iterable
+
+import six
+
+
+class Makefile(object):
+ """Provides an interface for writing simple makefiles
+
+ Instances of this class are created, populated with rules, then
+ written.
+ """
+
+ def __init__(self):
+ self._statements = []
+
+ def create_rule(self, targets=()):
+ """
+ Create a new rule in the makefile for the given targets.
+ Returns the corresponding Rule instance.
+ """
+ targets = list(targets)
+ for target in targets:
+ assert isinstance(target, six.text_type)
+ rule = Rule(targets)
+ self._statements.append(rule)
+ return rule
+
+ def add_statement(self, statement):
+ """
+ Add a raw statement in the makefile. Meant to be used for
+ simple variable assignments.
+ """
+ assert isinstance(statement, six.text_type)
+ self._statements.append(statement)
+
+ def dump(self, fh, removal_guard=True):
+ """
+ Dump all the rules to the given file handle. Optionally (and by
+ default), add guard rules for file removals (empty rules for other
+ rules' dependencies)
+ """
+ all_deps = set()
+ all_targets = set()
+ for statement in self._statements:
+ if isinstance(statement, Rule):
+ statement.dump(fh)
+ all_deps.update(statement.dependencies())
+ all_targets.update(statement.targets())
+ else:
+ fh.write("%s\n" % statement)
+ if removal_guard:
+ guard = Rule(sorted(all_deps - all_targets))
+ guard.dump(fh)
+
+
+class _SimpleOrderedSet(object):
+ """
+ Simple ordered set, specialized for used in Rule below only.
+ It doesn't expose a complete API, and normalizes path separators
+ at insertion.
+ """
+
+ def __init__(self):
+ self._list = []
+ self._set = set()
+
+ def __nonzero__(self):
+ return bool(self._set)
+
+ def __bool__(self):
+ return bool(self._set)
+
+ def __iter__(self):
+ return iter(self._list)
+
+ def __contains__(self, key):
+ return key in self._set
+
+ def update(self, iterable):
+ def _add(iterable):
+ emitted = set()
+ for i in iterable:
+ i = i.replace(os.sep, "/")
+ if i not in self._set and i not in emitted:
+ yield i
+ emitted.add(i)
+
+ added = list(_add(iterable))
+ self._set.update(added)
+ self._list.extend(added)
+
+
+class Rule(object):
+ """Class handling simple rules in the form:
+ target1 target2 ... : dep1 dep2 ...
+ command1 command2 ...
+ """
+
+ def __init__(self, targets=()):
+ self._targets = _SimpleOrderedSet()
+ self._dependencies = _SimpleOrderedSet()
+ self._commands = []
+ self.add_targets(targets)
+
+ def add_targets(self, targets):
+ """Add additional targets to the rule."""
+ assert isinstance(targets, Iterable) and not isinstance(
+ targets, six.string_types
+ )
+ targets = list(targets)
+ for target in targets:
+ assert isinstance(target, six.text_type)
+ self._targets.update(targets)
+ return self
+
+ def add_dependencies(self, deps):
+ """Add dependencies to the rule."""
+ assert isinstance(deps, Iterable) and not isinstance(deps, six.string_types)
+ deps = list(deps)
+ for dep in deps:
+ assert isinstance(dep, six.text_type)
+ self._dependencies.update(deps)
+ return self
+
+ def add_commands(self, commands):
+ """Add commands to the rule."""
+ assert isinstance(commands, Iterable) and not isinstance(
+ commands, six.string_types
+ )
+ commands = list(commands)
+ for command in commands:
+ assert isinstance(command, six.text_type)
+ self._commands.extend(commands)
+ return self
+
+ def targets(self):
+ """Return an iterator on the rule targets."""
+ # Ensure the returned iterator is actually just that, an iterator.
+ # Avoids caller fiddling with the set itself.
+ return iter(self._targets)
+
+ def dependencies(self):
+ """Return an iterator on the rule dependencies."""
+ return iter(d for d in self._dependencies if d not in self._targets)
+
+ def commands(self):
+ """Return an iterator on the rule commands."""
+ return iter(self._commands)
+
+ def dump(self, fh):
+ """
+ Dump the rule to the given file handle.
+ """
+ if not self._targets:
+ return
+ fh.write("%s:" % " ".join(self._targets))
+ if self._dependencies:
+ fh.write(" %s" % " ".join(self.dependencies()))
+ fh.write("\n")
+ for cmd in self._commands:
+ fh.write("\t%s\n" % cmd)
+
+
+# colon followed by anything except a slash (Windows path detection)
+_depfilesplitter = re.compile(r":(?![\\/])")
+
+
+def read_dep_makefile(fh):
+ """
+ Read the file handler containing a dep makefile (simple makefile only
+ containing dependencies) and returns an iterator of the corresponding Rules
+ it contains. Ignores removal guard rules.
+ """
+
+ rule = ""
+ for line in fh.readlines():
+ line = six.ensure_text(line)
+ assert not line.startswith("\t")
+ line = line.strip()
+ if line.endswith("\\"):
+ rule += line[:-1]
+ else:
+ rule += line
+ split_rule = _depfilesplitter.split(rule, 1)
+ if len(split_rule) > 1 and split_rule[1].strip():
+ yield Rule(split_rule[0].strip().split()).add_dependencies(
+ split_rule[1].strip().split()
+ )
+ rule = ""
+
+ if rule:
+ raise Exception("Makefile finishes with a backslash. Expected more input.")
+
+
+def write_dep_makefile(fh, target, deps):
+ """
+ Write a Makefile containing only target's dependencies to the file handle
+ specified.
+ """
+ mk = Makefile()
+ rule = mk.create_rule(targets=[target])
+ rule.add_dependencies(deps)
+ mk.dump(fh, removal_guard=True)
diff --git a/python/mozbuild/mozbuild/mozconfig.py b/python/mozbuild/mozbuild/mozconfig.py
new file mode 100644
index 0000000000..5cb5a5e859
--- /dev/null
+++ b/python/mozbuild/mozbuild/mozconfig.py
@@ -0,0 +1,403 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import re
+import subprocess
+import sys
+import traceback
+from pathlib import Path
+from textwrap import dedent
+
+import six
+from mozboot.mozconfig import find_mozconfig
+from mozpack import path as mozpath
+
+MOZCONFIG_BAD_EXIT_CODE = """
+Evaluation of your mozconfig exited with an error. This could be triggered
+by a command inside your mozconfig failing. Please change your mozconfig
+to not error and/or to catch errors in executed commands.
+""".strip()
+
+MOZCONFIG_BAD_OUTPUT = """
+Evaluation of your mozconfig produced unexpected output. This could be
+triggered by a command inside your mozconfig failing or producing some warnings
+or error messages. Please change your mozconfig to not error and/or to catch
+errors in executed commands.
+""".strip()
+
+
+class MozconfigLoadException(Exception):
+ """Raised when a mozconfig could not be loaded properly.
+
+ This typically indicates a malformed or misbehaving mozconfig file.
+ """
+
+ def __init__(self, path, message, output=None):
+ self.path = path
+ self.output = output
+
+ message = (
+ dedent(
+ """
+ Error loading mozconfig: {path}
+
+ {message}
+ """
+ )
+ .format(path=self.path, message=message)
+ .lstrip()
+ )
+
+ if self.output:
+ message += dedent(
+ """
+ mozconfig output:
+
+ {output}
+ """
+ ).format(output="\n".join([six.ensure_text(s) for s in self.output]))
+
+ Exception.__init__(self, message)
+
+
+class MozconfigLoader(object):
+ """Handles loading and parsing of mozconfig files."""
+
+ RE_MAKE_VARIABLE = re.compile(
+ """
+ ^\s* # Leading whitespace
+ (?P<var>[a-zA-Z_0-9]+) # Variable name
+ \s* [?:]?= \s* # Assignment operator surrounded by optional
+ # spaces
+ (?P<value>.*$)""", # Everything else (likely the value)
+ re.VERBOSE,
+ )
+
+ IGNORE_SHELL_VARIABLES = {"_", "BASH_ARGV", "BASH_ARGV0", "BASH_ARGC"}
+
+ ENVIRONMENT_VARIABLES = {"CC", "CXX", "CFLAGS", "CXXFLAGS", "LDFLAGS", "MOZ_OBJDIR"}
+
+ AUTODETECT = object()
+
+ def __init__(self, topsrcdir):
+ self.topsrcdir = topsrcdir
+
+ @property
+ def _loader_script(self):
+ our_dir = os.path.abspath(os.path.dirname(__file__))
+
+ return os.path.join(our_dir, "mozconfig_loader")
+
+ def read_mozconfig(self, path=None):
+ """Read the contents of a mozconfig into a data structure.
+
+ This takes the path to a mozconfig to load. If the given path is
+ AUTODETECT, will try to find a mozconfig from the environment using
+ find_mozconfig().
+
+ mozconfig files are shell scripts. So, we can't just parse them.
+ Instead, we run the shell script in a wrapper which allows us to record
+ state from execution. Thus, the output from a mozconfig is a friendly
+ static data structure.
+ """
+ if path is self.AUTODETECT:
+ path = find_mozconfig(self.topsrcdir)
+ if isinstance(path, Path):
+ path = str(path)
+
+ result = {
+ "path": path,
+ "topobjdir": None,
+ "configure_args": None,
+ "make_flags": None,
+ "make_extra": None,
+ "env": None,
+ "vars": None,
+ }
+
+ if path is None:
+ if "MOZ_OBJDIR" in os.environ:
+ result["topobjdir"] = os.environ["MOZ_OBJDIR"]
+ return result
+
+ path = mozpath.normsep(path)
+
+ result["configure_args"] = []
+ result["make_extra"] = []
+ result["make_flags"] = []
+
+ # Since mozconfig_loader is a shell script, running it "normally"
+ # actually leads to two shell executions on Windows. Avoid this by
+ # directly calling sh mozconfig_loader.
+ shell = "sh"
+ env = dict(os.environ)
+ env["PYTHONIOENCODING"] = "utf-8"
+
+ if "MOZILLABUILD" in os.environ:
+ mozillabuild = os.environ["MOZILLABUILD"]
+ if (Path(mozillabuild) / "msys2").exists():
+ shell = mozillabuild + "/msys2/usr/bin/sh"
+ else:
+ shell = mozillabuild + "/msys/bin/sh"
+ prefer_mozillabuild_path = [
+ os.path.dirname(shell),
+ str(Path(mozillabuild) / "bin"),
+ env["PATH"],
+ ]
+ env["PATH"] = os.pathsep.join(prefer_mozillabuild_path)
+ if sys.platform == "win32":
+ shell = shell + ".exe"
+
+ command = [
+ mozpath.normsep(shell),
+ mozpath.normsep(self._loader_script),
+ mozpath.normsep(self.topsrcdir),
+ mozpath.normsep(path),
+ mozpath.normsep(sys.executable),
+ mozpath.join(mozpath.dirname(self._loader_script), "action", "dump_env.py"),
+ ]
+
+ try:
+ # We need to capture stderr because that's where the shell sends
+ # errors if execution fails.
+ output = six.ensure_text(
+ subprocess.check_output(
+ command,
+ stderr=subprocess.STDOUT,
+ cwd=self.topsrcdir,
+ env=env,
+ universal_newlines=True,
+ encoding="utf-8",
+ )
+ )
+ except subprocess.CalledProcessError as e:
+ lines = e.output.splitlines()
+
+ # Output before actual execution shouldn't be relevant.
+ try:
+ index = lines.index("------END_BEFORE_SOURCE")
+ lines = lines[index + 1 :]
+ except ValueError:
+ pass
+
+ raise MozconfigLoadException(path, MOZCONFIG_BAD_EXIT_CODE, lines)
+
+ try:
+ parsed = self._parse_loader_output(output)
+ except AssertionError:
+ # _parse_loader_output uses assertions to verify the
+ # well-formedness of the shell output; when these fail, it
+ # generally means there was a problem with the output, but we
+ # include the assertion traceback just to be sure.
+ print("Assertion failed in _parse_loader_output:")
+ traceback.print_exc()
+ raise MozconfigLoadException(
+ path, MOZCONFIG_BAD_OUTPUT, output.splitlines()
+ )
+
+ def diff_vars(vars_before, vars_after):
+ set1 = set(vars_before.keys()) - self.IGNORE_SHELL_VARIABLES
+ set2 = set(vars_after.keys()) - self.IGNORE_SHELL_VARIABLES
+ added = set2 - set1
+ removed = set1 - set2
+ maybe_modified = set1 & set2
+ changed = {"added": {}, "removed": {}, "modified": {}, "unmodified": {}}
+
+ for key in added:
+ changed["added"][key] = vars_after[key]
+
+ for key in removed:
+ changed["removed"][key] = vars_before[key]
+
+ for key in maybe_modified:
+ if vars_before[key] != vars_after[key]:
+ changed["modified"][key] = (vars_before[key], vars_after[key])
+ elif key in self.ENVIRONMENT_VARIABLES:
+ # In order for irrelevant environment variable changes not
+ # to incur in re-running configure, only a set of
+ # environment variables are stored when they are
+ # unmodified. Otherwise, changes such as using a different
+ # terminal window, or even rebooting, would trigger
+ # reconfigures.
+ changed["unmodified"][key] = vars_after[key]
+
+ return changed
+
+ result["env"] = diff_vars(parsed["env_before"], parsed["env_after"])
+
+ # Environment variables also appear as shell variables, but that's
+ # uninteresting duplication of information. Filter them out.
+ def filt(x, y):
+ return {k: v for k, v in x.items() if k not in y}
+
+ result["vars"] = diff_vars(
+ filt(parsed["vars_before"], parsed["env_before"]),
+ filt(parsed["vars_after"], parsed["env_after"]),
+ )
+
+ result["configure_args"] = [self._expand(o) for o in parsed["ac"]]
+
+ if "MOZ_OBJDIR" in parsed["env_before"]:
+ result["topobjdir"] = parsed["env_before"]["MOZ_OBJDIR"]
+
+ mk = [self._expand(o) for o in parsed["mk"]]
+
+ for o in mk:
+ match = self.RE_MAKE_VARIABLE.match(o)
+
+ if match is None:
+ result["make_extra"].append(o)
+ continue
+
+ name, value = match.group("var"), match.group("value")
+
+ if name == "MOZ_MAKE_FLAGS":
+ result["make_flags"] = value.split()
+ continue
+
+ if name == "MOZ_OBJDIR":
+ result["topobjdir"] = value
+ if parsed["env_before"].get("MOZ_PROFILE_GENERATE") == "1":
+ # If MOZ_OBJDIR is specified in the mozconfig, we need to
+ # make sure that the '/instrumented' directory gets appended
+ # for the first build to avoid an objdir mismatch when
+ # running 'mach package' on Windows.
+ result["topobjdir"] = mozpath.join(
+ result["topobjdir"], "instrumented"
+ )
+ continue
+
+ result["make_extra"].append(o)
+
+ return result
+
+ def _parse_loader_output(self, output):
+ mk_options = []
+ ac_options = []
+ before_source = {}
+ after_source = {}
+ env_before_source = {}
+ env_after_source = {}
+
+ current = None
+ current_type = None
+ in_variable = None
+
+ for line in output.splitlines():
+
+ if not line:
+ continue
+
+ if line.startswith("------BEGIN_"):
+ assert current_type is None
+ assert current is None
+ assert not in_variable
+ current_type = line[len("------BEGIN_") :]
+ current = []
+ continue
+
+ if line.startswith("------END_"):
+ assert not in_variable
+ section = line[len("------END_") :]
+ assert current_type == section
+
+ if current_type == "AC_OPTION":
+ ac_options.append("\n".join(current))
+ elif current_type == "MK_OPTION":
+ mk_options.append("\n".join(current))
+
+ current = None
+ current_type = None
+ continue
+
+ assert current_type is not None
+
+ vars_mapping = {
+ "BEFORE_SOURCE": before_source,
+ "AFTER_SOURCE": after_source,
+ "ENV_BEFORE_SOURCE": env_before_source,
+ "ENV_AFTER_SOURCE": env_after_source,
+ }
+
+ if current_type in vars_mapping:
+ # mozconfigs are sourced using the Bourne shell (or at least
+ # in Bourne shell mode). This means |set| simply lists
+ # variables from the current shell (not functions). (Note that
+ # if Bash is installed in /bin/sh it acts like regular Bourne
+ # and doesn't print functions.) So, lines should have the
+ # form:
+ #
+ # key='value'
+ # key=value
+ #
+ # The only complication is multi-line variables. Those have the
+ # form:
+ #
+ # key='first
+ # second'
+
+ # TODO Bug 818377 Properly handle multi-line variables of form:
+ # $ foo="a='b'
+ # c='d'"
+ # $ set
+ # foo='a='"'"'b'"'"'
+ # c='"'"'d'"'"
+
+ name = in_variable
+ value = None
+ if in_variable:
+ # Reached the end of a multi-line variable.
+ if line.endswith("'") and not line.endswith("\\'"):
+ current.append(line[:-1])
+ value = "\n".join(current)
+ in_variable = None
+ else:
+ current.append(line)
+ continue
+ else:
+ equal_pos = line.find("=")
+
+ if equal_pos < 1:
+ # TODO log warning?
+ continue
+
+ name = line[0:equal_pos]
+ value = line[equal_pos + 1 :]
+
+ if len(value):
+ has_quote = value[0] == "'"
+
+ if has_quote:
+ value = value[1:]
+
+ # Lines with a quote not ending in a quote are multi-line.
+ if has_quote and not value.endswith("'"):
+ in_variable = name
+ current.append(value)
+ continue
+ else:
+ value = value[:-1] if has_quote else value
+
+ assert name is not None
+
+ vars_mapping[current_type][name] = value
+
+ current = []
+
+ continue
+
+ current.append(line)
+
+ return {
+ "mk": mk_options,
+ "ac": ac_options,
+ "vars_before": before_source,
+ "vars_after": after_source,
+ "env_before": env_before_source,
+ "env_after": env_after_source,
+ }
+
+ def _expand(self, s):
+ return s.replace("@TOPSRCDIR@", self.topsrcdir)
diff --git a/python/mozbuild/mozbuild/mozconfig_loader b/python/mozbuild/mozbuild/mozconfig_loader
new file mode 100755
index 0000000000..29355c69a2
--- /dev/null
+++ b/python/mozbuild/mozbuild/mozconfig_loader
@@ -0,0 +1,48 @@
+#!/bin/sh
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This script provides an execution environment for mozconfig scripts.
+# This script is not meant to be called by users. Instead, some
+# higher-level driver invokes it and parses the machine-tailored output.
+
+set -e
+
+ac_add_options() {
+ for _mozconfig_opt; do
+ echo "------BEGIN_AC_OPTION"
+ echo $_mozconfig_opt
+ echo "------END_AC_OPTION"
+ done
+}
+
+mk_add_options() {
+ for _mozconfig_opt; do
+ echo "------BEGIN_MK_OPTION"
+ echo $_mozconfig_opt
+ echo "------END_MK_OPTION"
+ done
+}
+
+echo "------BEGIN_ENV_BEFORE_SOURCE"
+"$3" "$4"
+echo "------END_ENV_BEFORE_SOURCE"
+
+echo "------BEGIN_BEFORE_SOURCE"
+set
+echo "------END_BEFORE_SOURCE"
+
+topsrcdir="$1"
+
+. "$2"
+
+unset topsrcdir
+
+echo "------BEGIN_AFTER_SOURCE"
+set
+echo "------END_AFTER_SOURCE"
+
+echo "------BEGIN_ENV_AFTER_SOURCE"
+"$3" "$4"
+echo "------END_ENV_AFTER_SOURCE"
diff --git a/python/mozbuild/mozbuild/mozinfo.py b/python/mozbuild/mozbuild/mozinfo.py
new file mode 100644
index 0000000000..42edabe32d
--- /dev/null
+++ b/python/mozbuild/mozbuild/mozinfo.py
@@ -0,0 +1,163 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This module produces a JSON file that provides basic build info and
+# configuration metadata.
+
+import json
+import os
+import platform
+import re
+
+import six
+
+
+def build_dict(config, env=os.environ):
+ """
+ Build a dict containing data about the build configuration from
+ the environment.
+ """
+ substs = config.substs
+
+ # Check that all required variables are present first.
+ required = ["TARGET_CPU", "OS_TARGET"]
+ missing = [r for r in required if r not in substs]
+ if missing:
+ raise Exception(
+ "Missing required environment variables: %s" % ", ".join(missing)
+ )
+
+ d = {}
+ d["topsrcdir"] = config.topsrcdir
+
+ if config.mozconfig:
+ d["mozconfig"] = config.mozconfig
+
+ # os
+ o = substs["OS_TARGET"]
+ known_os = {"Linux": "linux", "WINNT": "win", "Darwin": "mac", "Android": "android"}
+ if o in known_os:
+ d["os"] = known_os[o]
+ else:
+ # Allow unknown values, just lowercase them.
+ d["os"] = o.lower()
+
+ # Widget toolkit, just pass the value directly through.
+ d["toolkit"] = substs.get("MOZ_WIDGET_TOOLKIT")
+
+ # Application name
+ if "MOZ_APP_NAME" in substs:
+ d["appname"] = substs["MOZ_APP_NAME"]
+
+ # Build app name
+ if "MOZ_BUILD_APP" in substs:
+ d["buildapp"] = substs["MOZ_BUILD_APP"]
+
+ # processor
+ p = substs["TARGET_CPU"]
+ # do some slight massaging for some values
+ # TODO: retain specific values in case someone wants them?
+ if p.startswith("arm"):
+ p = "arm"
+ elif re.match("i[3-9]86", p):
+ p = "x86"
+ d["processor"] = p
+ # hardcoded list of 64-bit CPUs
+ if p in ["x86_64", "ppc64", "aarch64"]:
+ d["bits"] = 64
+ # hardcoded list of known 32-bit CPUs
+ elif p in ["x86", "arm", "ppc"]:
+ d["bits"] = 32
+ # other CPUs will wind up with unknown bits
+
+ d["debug"] = substs.get("MOZ_DEBUG") == "1"
+ d["nightly_build"] = substs.get("NIGHTLY_BUILD") == "1"
+ d["early_beta_or_earlier"] = substs.get("EARLY_BETA_OR_EARLIER") == "1"
+ d["release_or_beta"] = substs.get("RELEASE_OR_BETA") == "1"
+ d["devedition"] = substs.get("MOZ_DEV_EDITION") == "1"
+ d["pgo"] = substs.get("MOZ_PGO") == "1"
+ d["crashreporter"] = bool(substs.get("MOZ_CRASHREPORTER"))
+ d["normandy"] = substs.get("MOZ_NORMANDY") == "1"
+ d["datareporting"] = bool(substs.get("MOZ_DATA_REPORTING"))
+ d["healthreport"] = substs.get("MOZ_SERVICES_HEALTHREPORT") == "1"
+ d["sync"] = substs.get("MOZ_SERVICES_SYNC") == "1"
+ # FIXME(emilio): We need to update a lot of WPT expectations before removing this.
+ d["stylo"] = True
+ d["asan"] = substs.get("MOZ_ASAN") == "1"
+ d["tsan"] = substs.get("MOZ_TSAN") == "1"
+ d["ubsan"] = substs.get("MOZ_UBSAN") == "1"
+ d["telemetry"] = substs.get("MOZ_TELEMETRY_REPORTING") == "1"
+ d["tests_enabled"] = substs.get("ENABLE_TESTS") == "1"
+ d["bin_suffix"] = substs.get("BIN_SUFFIX", "")
+ d["require_signing"] = substs.get("MOZ_REQUIRE_SIGNING") == "1"
+ d["official"] = bool(substs.get("MOZILLA_OFFICIAL"))
+ d["updater"] = substs.get("MOZ_UPDATER") == "1"
+ d["artifact"] = substs.get("MOZ_ARTIFACT_BUILDS") == "1"
+ d["ccov"] = substs.get("MOZ_CODE_COVERAGE") == "1"
+ d["cc_type"] = substs.get("CC_TYPE")
+ d["domstreams"] = substs.get("MOZ_DOM_STREAMS") == "1"
+ d["isolated_process"] = (
+ substs.get("MOZ_ANDROID_CONTENT_SERVICE_ISOLATED_PROCESS") == "1"
+ )
+
+ def guess_platform():
+ if d["buildapp"] == "browser":
+ p = d["os"]
+ if p == "mac":
+ p = "macosx64"
+ elif d["bits"] == 64:
+ p = "{}64".format(p)
+ elif p in ("win",):
+ p = "{}32".format(p)
+
+ if d["asan"]:
+ p = "{}-asan".format(p)
+
+ return p
+
+ if d["buildapp"] == "mobile/android":
+ if d["processor"] == "x86":
+ return "android-x86"
+ if d["processor"] == "x86_64":
+ return "android-x86_64"
+ if d["processor"] == "aarch64":
+ return "android-aarch64"
+ return "android-arm"
+
+ def guess_buildtype():
+ if d["debug"]:
+ return "debug"
+ if d["pgo"]:
+ return "pgo"
+ return "opt"
+
+ # if buildapp or bits are unknown, we don't have a configuration similar to
+ # any in automation and the guesses are useless.
+ if "buildapp" in d and (d["os"] == "mac" or "bits" in d):
+ d["platform_guess"] = guess_platform()
+ d["buildtype_guess"] = guess_buildtype()
+
+ if (
+ d.get("buildapp", "") == "mobile/android"
+ and "MOZ_ANDROID_MIN_SDK_VERSION" in substs
+ ):
+ d["android_min_sdk"] = substs["MOZ_ANDROID_MIN_SDK_VERSION"]
+
+ d["is_ubuntu"] = "Ubuntu" in platform.version()
+
+ return d
+
+
+def write_mozinfo(file, config, env=os.environ):
+ """Write JSON data about the configuration specified in config and an
+ environment variable dict to ``|file|``, which may be a filename or file-like
+ object.
+ See build_dict for information about what environment variables are used,
+ and what keys are produced.
+ """
+ build_conf = build_dict(config, env)
+ if isinstance(file, six.text_type):
+ file = open(file, "wt")
+
+ json.dump(build_conf, file, sort_keys=True, indent=4)
diff --git a/python/mozbuild/mozbuild/nodeutil.py b/python/mozbuild/mozbuild/nodeutil.py
new file mode 100644
index 0000000000..8ec724ab89
--- /dev/null
+++ b/python/mozbuild/mozbuild/nodeutil.py
@@ -0,0 +1,126 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import platform
+import subprocess
+from distutils.version import StrictVersion
+
+from mozboot.util import get_tools_dir
+from mozfile import which
+from six import PY3
+
+NODE_MIN_VERSION = StrictVersion("12.22.12")
+NPM_MIN_VERSION = StrictVersion("6.14.16")
+
+
+def find_node_paths():
+ """Determines the possible paths for node executables.
+
+ Returns a list of paths, which includes the build state directory.
+ """
+ mozbuild_tools_dir = get_tools_dir()
+
+ if platform.system() == "Windows":
+ mozbuild_node_path = os.path.join(mozbuild_tools_dir, "node")
+ else:
+ mozbuild_node_path = os.path.join(mozbuild_tools_dir, "node", "bin")
+
+ # We still fallback to the PATH, since on OSes that don't have toolchain
+ # artifacts available to download, Node may be coming from $PATH.
+ paths = [mozbuild_node_path] + os.environ.get("PATH").split(os.pathsep)
+
+ if platform.system() == "Windows":
+ paths += [
+ "%s\\nodejs" % os.environ.get("SystemDrive"),
+ os.path.join(os.environ.get("ProgramFiles"), "nodejs"),
+ os.path.join(os.environ.get("PROGRAMW6432"), "nodejs"),
+ os.path.join(os.environ.get("PROGRAMFILES"), "nodejs"),
+ ]
+
+ return paths
+
+
+def check_executable_version(exe, wrap_call_with_node=False):
+ """Determine the version of a Node executable by invoking it.
+
+ May raise ``subprocess.CalledProcessError`` or ``ValueError`` on failure.
+ """
+ out = None
+ # npm may be a script (Except on Windows), so we must call it with node.
+ if wrap_call_with_node and platform.system() != "Windows":
+ binary, _ = find_node_executable()
+ if binary:
+ out = (
+ subprocess.check_output(
+ [binary, exe, "--version"], universal_newlines=PY3
+ )
+ .lstrip("v")
+ .rstrip()
+ )
+
+ # If we can't find node, or we don't need to wrap it, fallback to calling
+ # direct.
+ if not out:
+ out = (
+ subprocess.check_output([exe, "--version"], universal_newlines=PY3)
+ .lstrip("v")
+ .rstrip()
+ )
+ return StrictVersion(out)
+
+
+def find_node_executable(
+ nodejs_exe=os.environ.get("NODEJS"), min_version=NODE_MIN_VERSION
+):
+ """Find a Node executable from the mozbuild directory.
+
+ Returns a tuple containing the the path to an executable binary and a
+ version tuple. Both tuple entries will be None if a Node executable
+ could not be resolved.
+ """
+ if nodejs_exe:
+ try:
+ version = check_executable_version(nodejs_exe)
+ except (subprocess.CalledProcessError, ValueError):
+ return None, None
+
+ if version >= min_version:
+ return nodejs_exe, version.version
+
+ return None, None
+
+ # "nodejs" is first in the tuple on the assumption that it's only likely to
+ # exist on systems (probably linux distros) where there is a program in the path
+ # called "node" that does something else.
+ return find_executable("node", min_version)
+
+
+def find_npm_executable(min_version=NPM_MIN_VERSION):
+ """Find a Node executable from the mozbuild directory.
+
+ Returns a tuple containing the the path to an executable binary and a
+ version tuple. Both tuple entries will be None if a Node executable
+ could not be resolved.
+ """
+ return find_executable("npm", min_version, True)
+
+
+def find_executable(name, min_version, use_node_for_version_check=False):
+ paths = find_node_paths()
+ exe = which(name, path=paths)
+
+ if not exe:
+ return None, None
+
+ # Verify we can invoke the executable and its version is acceptable.
+ try:
+ version = check_executable_version(exe, use_node_for_version_check)
+ except (subprocess.CalledProcessError, ValueError):
+ return None, None
+
+ if version < min_version:
+ return None, None
+
+ return exe, version.version
diff --git a/python/mozbuild/mozbuild/preprocessor.py b/python/mozbuild/mozbuild/preprocessor.py
new file mode 100644
index 0000000000..193eb58475
--- /dev/null
+++ b/python/mozbuild/mozbuild/preprocessor.py
@@ -0,0 +1,938 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+This is a very primitive line based preprocessor, for times when using
+a C preprocessor isn't an option.
+
+It currently supports the following grammar for expressions, whitespace is
+ignored:
+
+expression :
+ and_cond ( '||' expression ) ? ;
+and_cond:
+ test ( '&&' and_cond ) ? ;
+test:
+ unary ( ( '==' | '!=' ) unary ) ? ;
+unary :
+ '!'? value ;
+value :
+ [0-9]+ # integer
+ | 'defined(' \w+ ')'
+ | \w+ # string identifier or value;
+"""
+
+import errno
+import io
+import os
+import re
+import sys
+from optparse import OptionParser
+
+import six
+from mozpack.path import normsep
+
+from mozbuild.makeutil import Makefile
+
+# hack around win32 mangling our line endings
+# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/65443
+if sys.platform == "win32":
+ import msvcrt
+
+ msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
+ os.linesep = "\n"
+
+
+__all__ = ["Context", "Expression", "Preprocessor", "preprocess"]
+
+
+def _to_text(a):
+ # We end up converting a lot of different types (text_type, binary_type,
+ # int, etc.) to Unicode in this script. This function handles all of those
+ # possibilities.
+ if isinstance(a, (six.text_type, six.binary_type)):
+ return six.ensure_text(a)
+ return six.text_type(a)
+
+
+def path_starts_with(path, prefix):
+ if os.altsep:
+ prefix = prefix.replace(os.altsep, os.sep)
+ path = path.replace(os.altsep, os.sep)
+ prefix = [os.path.normcase(p) for p in prefix.split(os.sep)]
+ path = [os.path.normcase(p) for p in path.split(os.sep)]
+ return path[: len(prefix)] == prefix
+
+
+class Expression:
+ def __init__(self, expression_string):
+ """
+ Create a new expression with this string.
+ The expression will already be parsed into an Abstract Syntax Tree.
+ """
+ self.content = expression_string
+ self.offset = 0
+ self.__ignore_whitespace()
+ self.e = self.__get_logical_or()
+ if self.content:
+ raise Expression.ParseError(self)
+
+ def __get_logical_or(self):
+ """
+ Production: and_cond ( '||' expression ) ?
+ """
+ if not len(self.content):
+ return None
+ rv = Expression.__AST("logical_op")
+ # test
+ rv.append(self.__get_logical_and())
+ self.__ignore_whitespace()
+ if self.content[:2] != "||":
+ # no logical op needed, short cut to our prime element
+ return rv[0]
+ # append operator
+ rv.append(Expression.__ASTLeaf("op", self.content[:2]))
+ self.__strip(2)
+ self.__ignore_whitespace()
+ rv.append(self.__get_logical_or())
+ self.__ignore_whitespace()
+ return rv
+
+ def __get_logical_and(self):
+ """
+ Production: test ( '&&' and_cond ) ?
+ """
+ if not len(self.content):
+ return None
+ rv = Expression.__AST("logical_op")
+ # test
+ rv.append(self.__get_equality())
+ self.__ignore_whitespace()
+ if self.content[:2] != "&&":
+ # no logical op needed, short cut to our prime element
+ return rv[0]
+ # append operator
+ rv.append(Expression.__ASTLeaf("op", self.content[:2]))
+ self.__strip(2)
+ self.__ignore_whitespace()
+ rv.append(self.__get_logical_and())
+ self.__ignore_whitespace()
+ return rv
+
+ def __get_equality(self):
+ """
+ Production: unary ( ( '==' | '!=' ) unary ) ?
+ """
+ if not len(self.content):
+ return None
+ rv = Expression.__AST("equality")
+ # unary
+ rv.append(self.__get_unary())
+ self.__ignore_whitespace()
+ if not re.match("[=!]=", self.content):
+ # no equality needed, short cut to our prime unary
+ return rv[0]
+ # append operator
+ rv.append(Expression.__ASTLeaf("op", self.content[:2]))
+ self.__strip(2)
+ self.__ignore_whitespace()
+ rv.append(self.__get_unary())
+ self.__ignore_whitespace()
+ return rv
+
+ def __get_unary(self):
+ """
+ Production: '!'? value
+ """
+ # eat whitespace right away, too
+ not_ws = re.match("!\s*", self.content)
+ if not not_ws:
+ return self.__get_value()
+ rv = Expression.__AST("not")
+ self.__strip(not_ws.end())
+ rv.append(self.__get_value())
+ self.__ignore_whitespace()
+ return rv
+
+ def __get_value(self):
+ """
+ Production: ( [0-9]+ | 'defined(' \w+ ')' | \w+ )
+ Note that the order is important, and the expression is kind-of
+ ambiguous as \w includes 0-9. One could make it unambiguous by
+ removing 0-9 from the first char of a string literal.
+ """
+ rv = None
+ m = re.match("defined\s*\(\s*(\w+)\s*\)", self.content)
+ if m:
+ word_len = m.end()
+ rv = Expression.__ASTLeaf("defined", m.group(1))
+ else:
+ word_len = re.match("[0-9]*", self.content).end()
+ if word_len:
+ value = int(self.content[:word_len])
+ rv = Expression.__ASTLeaf("int", value)
+ else:
+ word_len = re.match("\w*", self.content).end()
+ if word_len:
+ rv = Expression.__ASTLeaf("string", self.content[:word_len])
+ else:
+ raise Expression.ParseError(self)
+ self.__strip(word_len)
+ self.__ignore_whitespace()
+ return rv
+
+ def __ignore_whitespace(self):
+ ws_len = re.match("\s*", self.content).end()
+ self.__strip(ws_len)
+ return
+
+ def __strip(self, length):
+ """
+ Remove a given amount of chars from the input and update
+ the offset.
+ """
+ self.content = self.content[length:]
+ self.offset += length
+
+ def evaluate(self, context):
+ """
+ Evaluate the expression with the given context
+ """
+
+ # Helper function to evaluate __get_equality results
+ def eval_equality(tok):
+ left = opmap[tok[0].type](tok[0])
+ right = opmap[tok[2].type](tok[2])
+ rv = left == right
+ if tok[1].value == "!=":
+ rv = not rv
+ return rv
+
+ # Helper function to evaluate __get_logical_and and __get_logical_or results
+ def eval_logical_op(tok):
+ left = opmap[tok[0].type](tok[0])
+ right = opmap[tok[2].type](tok[2])
+ if tok[1].value == "&&":
+ return left and right
+ elif tok[1].value == "||":
+ return left or right
+ raise Expression.ParseError(self)
+
+ # Mapping from token types to evaluator functions
+ # Apart from (non-)equality, all these can be simple lambda forms.
+ opmap = {
+ "logical_op": eval_logical_op,
+ "equality": eval_equality,
+ "not": lambda tok: not opmap[tok[0].type](tok[0]),
+ "string": lambda tok: context[tok.value],
+ "defined": lambda tok: tok.value in context,
+ "int": lambda tok: tok.value,
+ }
+
+ return opmap[self.e.type](self.e)
+
+ class __AST(list):
+ """
+ Internal class implementing Abstract Syntax Tree nodes
+ """
+
+ def __init__(self, type):
+ self.type = type
+ super(self.__class__, self).__init__(self)
+
+ class __ASTLeaf:
+ """
+ Internal class implementing Abstract Syntax Tree leafs
+ """
+
+ def __init__(self, type, value):
+ self.value = value
+ self.type = type
+
+ def __str__(self):
+ return self.value.__str__()
+
+ def __repr__(self):
+ return self.value.__repr__()
+
+ class ParseError(Exception):
+ """
+ Error raised when parsing fails.
+ It has two members, offset and content, which give the offset of the
+ error and the offending content.
+ """
+
+ def __init__(self, expression):
+ self.offset = expression.offset
+ self.content = expression.content[:3]
+
+ def __str__(self):
+ return 'Unexpected content at offset {0}, "{1}"'.format(
+ self.offset, self.content
+ )
+
+
+class Context(dict):
+ """
+ This class holds variable values by subclassing dict, and while it
+ truthfully reports True and False on
+
+ name in context
+
+ it returns the variable name itself on
+
+ context["name"]
+
+ to reflect the ambiguity between string literals and preprocessor
+ variables.
+ """
+
+ def __getitem__(self, key):
+ if key in self:
+ return super(self.__class__, self).__getitem__(key)
+ return key
+
+
+class Preprocessor:
+ """
+ Class for preprocessing text files.
+ """
+
+ class Error(RuntimeError):
+ def __init__(self, cpp, MSG, context):
+ self.file = cpp.context["FILE"]
+ self.line = cpp.context["LINE"]
+ self.key = MSG
+ RuntimeError.__init__(self, (self.file, self.line, self.key, context))
+
+ def __init__(self, defines=None, marker="#"):
+ self.context = Context()
+ self.context.update({"FILE": "", "LINE": 0, "DIRECTORY": os.path.abspath(".")})
+ try:
+ # Can import globally because of bootstrapping issues.
+ from buildconfig import topobjdir, topsrcdir
+ except ImportError:
+ # Allow this script to still work independently of a configured objdir.
+ topsrcdir = topobjdir = None
+ self.topsrcdir = topsrcdir
+ self.topobjdir = topobjdir
+ self.curdir = "."
+ self.actionLevel = 0
+ self.disableLevel = 0
+ # ifStates can be
+ # 0: hadTrue
+ # 1: wantsTrue
+ # 2: #else found
+ self.ifStates = []
+ self.checkLineNumbers = False
+
+ # A list of (filter_name, filter_function) pairs.
+ self.filters = []
+
+ self.cmds = {}
+ for cmd, level in (
+ ("define", 0),
+ ("undef", 0),
+ ("if", sys.maxsize),
+ ("ifdef", sys.maxsize),
+ ("ifndef", sys.maxsize),
+ ("else", 1),
+ ("elif", 1),
+ ("elifdef", 1),
+ ("elifndef", 1),
+ ("endif", sys.maxsize),
+ ("expand", 0),
+ ("literal", 0),
+ ("filter", 0),
+ ("unfilter", 0),
+ ("include", 0),
+ ("includesubst", 0),
+ ("error", 0),
+ ):
+ self.cmds[cmd] = (level, getattr(self, "do_" + cmd))
+ self.out = sys.stdout
+ self.setMarker(marker)
+ self.varsubst = re.compile("@(?P<VAR>\w+)@", re.U)
+ self.includes = set()
+ self.silenceMissingDirectiveWarnings = False
+ if defines:
+ self.context.update(defines)
+
+ def failUnused(self, file):
+ msg = None
+ if self.actionLevel == 0 and not self.silenceMissingDirectiveWarnings:
+ msg = "no preprocessor directives found"
+ elif self.actionLevel == 1:
+ msg = "no useful preprocessor directives found"
+ if msg:
+
+ class Fake(object):
+ pass
+
+ fake = Fake()
+ fake.context = {
+ "FILE": file,
+ "LINE": None,
+ }
+ raise Preprocessor.Error(fake, msg, None)
+
+ def setMarker(self, aMarker):
+ """
+ Set the marker to be used for processing directives.
+ Used for handling CSS files, with pp.setMarker('%'), for example.
+ The given marker may be None, in which case no markers are processed.
+ """
+ self.marker = aMarker
+ if aMarker:
+ instruction_prefix = "\s*{0}"
+ instruction_cmd = "(?P<cmd>[a-z]+)(?:\s+(?P<args>.*?))?\s*$"
+ instruction_fmt = instruction_prefix + instruction_cmd
+ ambiguous_fmt = instruction_prefix + "\s+" + instruction_cmd
+
+ self.instruction = re.compile(instruction_fmt.format(aMarker))
+ self.comment = re.compile(aMarker, re.U)
+ self.ambiguous_comment = re.compile(ambiguous_fmt.format(aMarker))
+ else:
+
+ class NoMatch(object):
+ def match(self, *args):
+ return False
+
+ self.instruction = self.comment = NoMatch()
+
+ def setSilenceDirectiveWarnings(self, value):
+ """
+ Sets whether missing directive warnings are silenced, according to
+ ``value``. The default behavior of the preprocessor is to emit
+ such warnings.
+ """
+ self.silenceMissingDirectiveWarnings = value
+
+ def addDefines(self, defines):
+ """
+ Adds the specified defines to the preprocessor.
+ ``defines`` may be a dictionary object or an iterable of key/value pairs
+ (as tuples or other iterables of length two)
+ """
+ self.context.update(defines)
+
+ def clone(self):
+ """
+ Create a clone of the current processor, including line ending
+ settings, marker, variable definitions, output stream.
+ """
+ rv = Preprocessor()
+ rv.context.update(self.context)
+ rv.setMarker(self.marker)
+ rv.out = self.out
+ return rv
+
+ def processFile(self, input, output, depfile=None):
+ """
+ Preprocesses the contents of the ``input`` stream and writes the result
+ to the ``output`` stream. If ``depfile`` is set, the dependencies of
+ ``output`` file are written to ``depfile`` in Makefile format.
+ """
+ self.out = output
+
+ self.do_include(input, False)
+ self.failUnused(input.name)
+
+ if depfile:
+ mk = Makefile()
+ mk.create_rule([output.name]).add_dependencies(self.includes)
+ mk.dump(depfile)
+
+ def computeDependencies(self, input):
+ """
+ Reads the ``input`` stream, and computes the dependencies for that input.
+ """
+ try:
+ old_out = self.out
+ self.out = None
+ self.do_include(input, False)
+
+ return self.includes
+ finally:
+ self.out = old_out
+
+ def applyFilters(self, aLine):
+ for f in self.filters:
+ aLine = f[1](aLine)
+ return aLine
+
+ def noteLineInfo(self):
+ # Record the current line and file. Called once before transitioning
+ # into or out of an included file and after writing each line.
+ self.line_info = self.context["FILE"], self.context["LINE"]
+
+ def write(self, aLine):
+ """
+ Internal method for handling output.
+ """
+ if not self.out:
+ return
+
+ next_line, next_file = self.context["LINE"], self.context["FILE"]
+ if self.checkLineNumbers:
+ expected_file, expected_line = self.line_info
+ expected_line += 1
+ if (
+ expected_line != next_line
+ or expected_file
+ and expected_file != next_file
+ ):
+ self.out.write(
+ '//@line {line} "{file}"\n'.format(line=next_line, file=next_file)
+ )
+ self.noteLineInfo()
+
+ filteredLine = self.applyFilters(aLine)
+ if filteredLine != aLine:
+ self.actionLevel = 2
+ self.out.write(filteredLine)
+
+ def handleCommandLine(self, args, defaultToStdin=False):
+ """
+ Parse a commandline into this parser.
+ Uses OptionParser internally, no args mean sys.argv[1:].
+ """
+
+ def get_output_file(path, encoding=None):
+ if encoding is None:
+ encoding = "utf-8"
+ dir = os.path.dirname(path)
+ if dir:
+ try:
+ os.makedirs(dir)
+ except OSError as error:
+ if error.errno != errno.EEXIST:
+ raise
+ return io.open(path, "w", encoding=encoding, newline="\n")
+
+ p = self.getCommandLineParser()
+ options, args = p.parse_args(args=args)
+ out = self.out
+ depfile = None
+
+ if options.output:
+ out = get_output_file(options.output, options.output_encoding)
+ elif options.output_encoding:
+ raise Preprocessor.Error(
+ self, "--output-encoding doesn't work without --output", None
+ )
+ if defaultToStdin and len(args) == 0:
+ args = [sys.stdin]
+ if options.depend:
+ raise Preprocessor.Error(self, "--depend doesn't work with stdin", None)
+ if options.depend:
+ if not options.output:
+ raise Preprocessor.Error(
+ self, "--depend doesn't work with stdout", None
+ )
+ depfile = get_output_file(options.depend)
+
+ if args:
+ for f in args:
+ if not isinstance(f, io.TextIOBase):
+ f = io.open(f, "r", encoding="utf-8")
+ with f as input_:
+ self.processFile(input=input_, output=out)
+ if depfile:
+ mk = Makefile()
+ mk.create_rule([six.ensure_text(options.output)]).add_dependencies(
+ self.includes
+ )
+ mk.dump(depfile)
+ depfile.close()
+
+ if options.output:
+ out.close()
+
+ def getCommandLineParser(self, unescapeDefines=False):
+ escapedValue = re.compile('".*"$')
+ numberValue = re.compile("\d+$")
+
+ def handleD(option, opt, value, parser):
+ vals = value.split("=", 1)
+ if len(vals) == 1:
+ vals.append(1)
+ elif unescapeDefines and escapedValue.match(vals[1]):
+ # strip escaped string values
+ vals[1] = vals[1][1:-1]
+ elif numberValue.match(vals[1]):
+ vals[1] = int(vals[1])
+ self.context[vals[0]] = vals[1]
+
+ def handleU(option, opt, value, parser):
+ del self.context[value]
+
+ def handleF(option, opt, value, parser):
+ self.do_filter(value)
+
+ def handleMarker(option, opt, value, parser):
+ self.setMarker(value)
+
+ def handleSilenceDirectiveWarnings(option, opt, value, parse):
+ self.setSilenceDirectiveWarnings(True)
+
+ p = OptionParser()
+ p.add_option(
+ "-D",
+ action="callback",
+ callback=handleD,
+ type="string",
+ metavar="VAR[=VAL]",
+ help="Define a variable",
+ )
+ p.add_option(
+ "-U",
+ action="callback",
+ callback=handleU,
+ type="string",
+ metavar="VAR",
+ help="Undefine a variable",
+ )
+ p.add_option(
+ "-F",
+ action="callback",
+ callback=handleF,
+ type="string",
+ metavar="FILTER",
+ help="Enable the specified filter",
+ )
+ p.add_option(
+ "-o",
+ "--output",
+ type="string",
+ default=None,
+ metavar="FILENAME",
+ help="Output to the specified file instead of stdout",
+ )
+ p.add_option(
+ "--depend",
+ type="string",
+ default=None,
+ metavar="FILENAME",
+ help="Generate dependencies in the given file",
+ )
+ p.add_option(
+ "--marker",
+ action="callback",
+ callback=handleMarker,
+ type="string",
+ help="Use the specified marker instead of #",
+ )
+ p.add_option(
+ "--silence-missing-directive-warnings",
+ action="callback",
+ callback=handleSilenceDirectiveWarnings,
+ help="Don't emit warnings about missing directives",
+ )
+ p.add_option(
+ "--output-encoding",
+ type="string",
+ default=None,
+ metavar="ENCODING",
+ help="Encoding to use for the output",
+ )
+ return p
+
+ def handleLine(self, aLine):
+ """
+ Handle a single line of input (internal).
+ """
+ if self.actionLevel == 0 and self.comment.match(aLine):
+ self.actionLevel = 1
+ m = self.instruction.match(aLine)
+ if m:
+ args = None
+ cmd = m.group("cmd")
+ try:
+ args = m.group("args")
+ except IndexError:
+ pass
+ if cmd not in self.cmds:
+ raise Preprocessor.Error(self, "INVALID_CMD", aLine)
+ level, cmd = self.cmds[cmd]
+ if level >= self.disableLevel:
+ cmd(args)
+ if cmd != "literal":
+ self.actionLevel = 2
+ elif self.disableLevel == 0:
+ if self.comment.match(aLine):
+ # make sure the comment is not ambiguous with a command
+ m = self.ambiguous_comment.match(aLine)
+ if m:
+ cmd = m.group("cmd")
+ if cmd in self.cmds:
+ raise Preprocessor.Error(self, "AMBIGUOUS_COMMENT", aLine)
+ else:
+ self.write(aLine)
+
+ # Instruction handlers
+ # These are named do_'instruction name' and take one argument
+
+ # Variables
+ def do_define(self, args):
+ m = re.match("(?P<name>\w+)(?:\s(?P<value>.*))?", args, re.U)
+ if not m:
+ raise Preprocessor.Error(self, "SYNTAX_DEF", args)
+ val = ""
+ if m.group("value"):
+ val = self.applyFilters(m.group("value"))
+ try:
+ val = int(val)
+ except Exception:
+ pass
+ self.context[m.group("name")] = val
+
+ def do_undef(self, args):
+ m = re.match("(?P<name>\w+)$", args, re.U)
+ if not m:
+ raise Preprocessor.Error(self, "SYNTAX_DEF", args)
+ if args in self.context:
+ del self.context[args]
+
+ # Logic
+ def ensure_not_else(self):
+ if len(self.ifStates) == 0 or self.ifStates[-1] == 2:
+ sys.stderr.write(
+ "WARNING: bad nesting of #else in %s\n" % self.context["FILE"]
+ )
+
+ def do_if(self, args, replace=False):
+ if self.disableLevel and not replace:
+ self.disableLevel += 1
+ return
+ val = None
+ try:
+ e = Expression(args)
+ val = e.evaluate(self.context)
+ except Exception:
+ # XXX do real error reporting
+ raise Preprocessor.Error(self, "SYNTAX_ERR", args)
+ if isinstance(val, six.text_type) or isinstance(val, six.binary_type):
+ # we're looking for a number value, strings are false
+ val = False
+ if not val:
+ self.disableLevel = 1
+ if replace:
+ if val:
+ self.disableLevel = 0
+ self.ifStates[-1] = self.disableLevel
+ else:
+ self.ifStates.append(self.disableLevel)
+
+ def do_ifdef(self, args, replace=False):
+ if self.disableLevel and not replace:
+ self.disableLevel += 1
+ return
+ if re.search("\W", args, re.U):
+ raise Preprocessor.Error(self, "INVALID_VAR", args)
+ if args not in self.context:
+ self.disableLevel = 1
+ if replace:
+ if args in self.context:
+ self.disableLevel = 0
+ self.ifStates[-1] = self.disableLevel
+ else:
+ self.ifStates.append(self.disableLevel)
+
+ def do_ifndef(self, args, replace=False):
+ if self.disableLevel and not replace:
+ self.disableLevel += 1
+ return
+ if re.search("\W", args, re.U):
+ raise Preprocessor.Error(self, "INVALID_VAR", args)
+ if args in self.context:
+ self.disableLevel = 1
+ if replace:
+ if args not in self.context:
+ self.disableLevel = 0
+ self.ifStates[-1] = self.disableLevel
+ else:
+ self.ifStates.append(self.disableLevel)
+
+ def do_else(self, args, ifState=2):
+ self.ensure_not_else()
+ hadTrue = self.ifStates[-1] == 0
+ self.ifStates[-1] = ifState # in-else
+ if hadTrue:
+ self.disableLevel = 1
+ return
+ self.disableLevel = 0
+
+ def do_elif(self, args):
+ if self.disableLevel == 1:
+ if self.ifStates[-1] == 1:
+ self.do_if(args, replace=True)
+ else:
+ self.do_else(None, self.ifStates[-1])
+
+ def do_elifdef(self, args):
+ if self.disableLevel == 1:
+ if self.ifStates[-1] == 1:
+ self.do_ifdef(args, replace=True)
+ else:
+ self.do_else(None, self.ifStates[-1])
+
+ def do_elifndef(self, args):
+ if self.disableLevel == 1:
+ if self.ifStates[-1] == 1:
+ self.do_ifndef(args, replace=True)
+ else:
+ self.do_else(None, self.ifStates[-1])
+
+ def do_endif(self, args):
+ if self.disableLevel > 0:
+ self.disableLevel -= 1
+ if self.disableLevel == 0:
+ self.ifStates.pop()
+
+ # output processing
+ def do_expand(self, args):
+ lst = re.split("__(\w+)__", args, re.U)
+
+ def vsubst(v):
+ if v in self.context:
+ return _to_text(self.context[v])
+ return ""
+
+ for i in range(1, len(lst), 2):
+ lst[i] = vsubst(lst[i])
+ lst.append("\n") # add back the newline
+ self.write(six.moves.reduce(lambda x, y: x + y, lst, ""))
+
+ def do_literal(self, args):
+ self.write(args + "\n")
+
+ def do_filter(self, args):
+ filters = [f for f in args.split(" ") if hasattr(self, "filter_" + f)]
+ if len(filters) == 0:
+ return
+ current = dict(self.filters)
+ for f in filters:
+ current[f] = getattr(self, "filter_" + f)
+ self.filters = [(fn, current[fn]) for fn in sorted(current.keys())]
+ return
+
+ def do_unfilter(self, args):
+ filters = args.split(" ")
+ current = dict(self.filters)
+ for f in filters:
+ if f in current:
+ del current[f]
+ self.filters = [(fn, current[fn]) for fn in sorted(current.keys())]
+ return
+
+ # Filters
+ #
+ # emptyLines: Strips blank lines from the output.
+ def filter_emptyLines(self, aLine):
+ if aLine == "\n":
+ return ""
+ return aLine
+
+ # dumbComments: Empties out lines that consists of optional whitespace
+ # followed by a `//`.
+ def filter_dumbComments(self, aLine):
+ return re.sub("^\s*//.*", "", aLine)
+
+ # substitution: variables wrapped in @ are replaced with their value.
+ def filter_substitution(self, aLine, fatal=True):
+ def repl(matchobj):
+ varname = matchobj.group("VAR")
+ if varname in self.context:
+ return _to_text(self.context[varname])
+ if fatal:
+ raise Preprocessor.Error(self, "UNDEFINED_VAR", varname)
+ return matchobj.group(0)
+
+ return self.varsubst.sub(repl, aLine)
+
+ # attemptSubstitution: variables wrapped in @ are replaced with their
+ # value, or an empty string if the variable is not defined.
+ def filter_attemptSubstitution(self, aLine):
+ return self.filter_substitution(aLine, fatal=False)
+
+ # File ops
+ def do_include(self, args, filters=True):
+ """
+ Preprocess a given file.
+ args can either be a file name, or a file-like object.
+ Files should be opened, and will be closed after processing.
+ """
+ isName = isinstance(args, six.string_types)
+ oldCheckLineNumbers = self.checkLineNumbers
+ self.checkLineNumbers = False
+ if isName:
+ try:
+ args = _to_text(args)
+ if filters:
+ args = self.applyFilters(args)
+ if not os.path.isabs(args):
+ args = os.path.join(self.curdir, args)
+ args = io.open(args, "r", encoding="utf-8")
+ except Preprocessor.Error:
+ raise
+ except Exception:
+ raise Preprocessor.Error(self, "FILE_NOT_FOUND", _to_text(args))
+ self.checkLineNumbers = bool(
+ re.search("\.(js|jsm|java|webidl)(?:\.in)?$", args.name)
+ )
+ oldFile = self.context["FILE"]
+ oldLine = self.context["LINE"]
+ oldDir = self.context["DIRECTORY"]
+ oldCurdir = self.curdir
+ self.noteLineInfo()
+
+ if args.isatty():
+ # we're stdin, use '-' and '' for file and dir
+ self.context["FILE"] = "-"
+ self.context["DIRECTORY"] = ""
+ self.curdir = "."
+ else:
+ abspath = os.path.abspath(args.name)
+ self.curdir = os.path.dirname(abspath)
+ self.includes.add(six.ensure_text(abspath))
+ if self.topobjdir and path_starts_with(abspath, self.topobjdir):
+ abspath = "$OBJDIR" + normsep(abspath[len(self.topobjdir) :])
+ elif self.topsrcdir and path_starts_with(abspath, self.topsrcdir):
+ abspath = "$SRCDIR" + normsep(abspath[len(self.topsrcdir) :])
+ self.context["FILE"] = abspath
+ self.context["DIRECTORY"] = os.path.dirname(abspath)
+ self.context["LINE"] = 0
+
+ for l in args:
+ self.context["LINE"] += 1
+ self.handleLine(l)
+ if isName:
+ args.close()
+
+ self.context["FILE"] = oldFile
+ self.checkLineNumbers = oldCheckLineNumbers
+ self.context["LINE"] = oldLine
+ self.context["DIRECTORY"] = oldDir
+ self.curdir = oldCurdir
+
+ def do_includesubst(self, args):
+ args = self.filter_substitution(args)
+ self.do_include(args)
+
+ def do_error(self, args):
+ raise Preprocessor.Error(self, "Error: ", _to_text(args))
+
+
+def preprocess(includes=[sys.stdin], defines={}, output=sys.stdout, marker="#"):
+ pp = Preprocessor(defines=defines, marker=marker)
+ for f in includes:
+ with io.open(f, "r", encoding="utf-8") as input:
+ pp.processFile(input=input, output=output)
+ return pp.includes
+
+
+# Keep this module independently executable.
+if __name__ == "__main__":
+ pp = Preprocessor()
+ pp.handleCommandLine(None, True)
diff --git a/python/mozbuild/mozbuild/pythonutil.py b/python/mozbuild/mozbuild/pythonutil.py
new file mode 100644
index 0000000000..a3540647f9
--- /dev/null
+++ b/python/mozbuild/mozbuild/pythonutil.py
@@ -0,0 +1,23 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import sys
+
+
+def iter_modules_in_path(*paths):
+ paths = [os.path.abspath(os.path.normcase(p)) + os.sep for p in paths]
+ for name, module in sys.modules.items():
+ if getattr(module, "__file__", None) is None:
+ continue
+ if module.__file__ is None:
+ continue
+ path = module.__file__
+
+ if path.endswith(".pyc"):
+ path = path[:-1]
+ path = os.path.abspath(os.path.normcase(path))
+
+ if any(path.startswith(p) for p in paths):
+ yield path
diff --git a/python/mozbuild/mozbuild/repackaging/__init__.py b/python/mozbuild/mozbuild/repackaging/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/repackaging/__init__.py
diff --git a/python/mozbuild/mozbuild/repackaging/application_ini.py b/python/mozbuild/mozbuild/repackaging/application_ini.py
new file mode 100644
index 0000000000..f11c94f781
--- /dev/null
+++ b/python/mozbuild/mozbuild/repackaging/application_ini.py
@@ -0,0 +1,66 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from mozpack.files import FileFinder
+from six import string_types
+from six.moves import configparser
+
+
+def get_application_ini_value(
+ finder_or_application_directory, section, value, fallback=None
+):
+ """Find string with given `section` and `value` in any `application.ini`
+ under given directory or finder.
+
+ If string is not found and `fallback` is given, find string with given
+ `section` and `fallback` instead.
+
+ Raises an `Exception` if no string is found."""
+
+ return next(
+ get_application_ini_values(
+ finder_or_application_directory,
+ dict(section=section, value=value, fallback=fallback),
+ )
+ )
+
+
+def get_application_ini_values(finder_or_application_directory, *args):
+ """Find multiple strings for given `section` and `value` pairs.
+ Additional `args` should be dictionaries with keys `section`, `value`,
+ and optional `fallback`. Returns an iterable of strings, one for each
+ dictionary provided.
+
+ `fallback` is treated as with `get_application_ini_value`.
+
+ Raises an `Exception` if any string is not found."""
+
+ if isinstance(finder_or_application_directory, string_types):
+ finder = FileFinder(finder_or_application_directory)
+ else:
+ finder = finder_or_application_directory
+
+ # Packages usually have a top-level `firefox/` directory; search below it.
+ for p, f in finder.find("**/application.ini"):
+ data = f.open().read().decode("utf-8")
+ parser = configparser.ConfigParser()
+ parser.read_string(data)
+
+ for d in args:
+ rc = None
+ try:
+ rc = parser.get(d["section"], d["value"])
+ except configparser.NoOptionError:
+ if "fallback" not in d:
+ raise
+ else:
+ rc = parser.get(d["section"], d["fallback"])
+
+ if rc is None:
+ raise Exception("Input does not contain an application.ini file")
+
+ yield rc
+
+ # Process only the first `application.ini`.
+ break
diff --git a/python/mozbuild/mozbuild/repackaging/deb.py b/python/mozbuild/mozbuild/repackaging/deb.py
new file mode 100644
index 0000000000..3e01680437
--- /dev/null
+++ b/python/mozbuild/mozbuild/repackaging/deb.py
@@ -0,0 +1,694 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import datetime
+import json
+import logging
+import os
+import shutil
+import subprocess
+import tarfile
+import tempfile
+import zipfile
+from email.utils import format_datetime
+from pathlib import Path
+from string import Template
+
+import mozfile
+import mozpack.path as mozpath
+import requests
+from mozilla_version.gecko import GeckoVersion
+from redo import retry
+
+from mozbuild.repackaging.application_ini import get_application_ini_values
+
+
+class NoDebPackageFound(Exception):
+ """Raised when no .deb is found after calling dpkg-buildpackage"""
+
+ def __init__(self, deb_file_path) -> None:
+ super().__init__(
+ f"No {deb_file_path} package found after calling dpkg-buildpackage"
+ )
+
+
+class HgServerError(Exception):
+ """Raised when Hg responds with an error code that is not 404 (i.e. when there is an outage)"""
+
+ def __init__(self, msg) -> None:
+ super().__init__(msg)
+
+
+_DEB_ARCH = {
+ "all": "all",
+ "x86": "i386",
+ "x86_64": "amd64",
+}
+# At the moment the Firefox build baseline is jessie.
+# The debian-repackage image defined in taskcluster/docker/debian-repackage/Dockerfile
+# bootstraps the /srv/jessie-i386 and /srv/jessie-amd64 chroot environments we use to
+# create the `.deb` repackages. By running the repackage using chroot we generate shared
+# library dependencies that match the Firefox build baseline
+# defined in taskcluster/scripts/misc/build-sysroot.sh
+_DEB_DIST = "jessie"
+
+
+def repackage_deb(
+ log,
+ infile,
+ output,
+ template_dir,
+ arch,
+ version,
+ build_number,
+ release_product,
+ release_type,
+ fluent_localization,
+ fluent_resource_loader,
+):
+ if not tarfile.is_tarfile(infile):
+ raise Exception("Input file %s is not a valid tarfile." % infile)
+
+ tmpdir = _create_temporary_directory(arch)
+ source_dir = os.path.join(tmpdir, "source")
+ try:
+ mozfile.extract_tarball(infile, source_dir)
+ application_ini_data = _extract_application_ini_data(infile)
+ build_variables = _get_build_variables(
+ application_ini_data,
+ arch,
+ version,
+ build_number,
+ depends="${shlibs:Depends},",
+ )
+
+ _copy_plain_deb_config(template_dir, source_dir)
+ _render_deb_templates(template_dir, source_dir, build_variables)
+
+ app_name = application_ini_data["name"]
+ with open(
+ mozpath.join(source_dir, app_name.lower(), "is-packaged-app"), "w"
+ ) as f:
+ f.write("This is a packaged app.\n")
+
+ _inject_deb_distribution_folder(source_dir, app_name)
+ _inject_deb_desktop_entry_file(
+ log,
+ source_dir,
+ build_variables,
+ release_product,
+ release_type,
+ fluent_localization,
+ fluent_resource_loader,
+ )
+ _generate_deb_archive(
+ source_dir,
+ target_dir=tmpdir,
+ output_file_path=output,
+ build_variables=build_variables,
+ arch=arch,
+ )
+
+ finally:
+ shutil.rmtree(tmpdir)
+
+
+def repackage_deb_l10n(
+ input_xpi_file, input_tar_file, output, template_dir, version, build_number
+):
+ arch = "all"
+
+ tmpdir = _create_temporary_directory(arch)
+ source_dir = os.path.join(tmpdir, "source")
+ try:
+ langpack_metadata = _extract_langpack_metadata(input_xpi_file)
+ langpack_dir = mozpath.join(source_dir, "firefox", "distribution", "extensions")
+ application_ini_data = _extract_application_ini_data(input_tar_file)
+ langpack_id = langpack_metadata["langpack_id"]
+ build_variables = _get_build_variables(
+ application_ini_data,
+ arch,
+ version,
+ build_number,
+ depends=application_ini_data["remoting_name"],
+ # Debian package names are only lowercase
+ package_name_suffix=f"-l10n-{langpack_id.lower()}",
+ description_suffix=f" - {langpack_metadata['description']}",
+ )
+ _copy_plain_deb_config(template_dir, source_dir)
+ _render_deb_templates(template_dir, source_dir, build_variables)
+
+ os.makedirs(langpack_dir, exist_ok=True)
+ shutil.copy(
+ input_xpi_file,
+ mozpath.join(
+ langpack_dir,
+ f"{langpack_metadata['browser_specific_settings']['gecko']['id']}.xpi",
+ ),
+ )
+ _generate_deb_archive(
+ source_dir=source_dir,
+ target_dir=tmpdir,
+ output_file_path=output,
+ build_variables=build_variables,
+ arch=arch,
+ )
+ finally:
+ shutil.rmtree(tmpdir)
+
+
+def _extract_application_ini_data(input_tar_file):
+ with tempfile.TemporaryDirectory() as d:
+ with tarfile.open(input_tar_file) as tar:
+ application_ini_files = [
+ tar_info
+ for tar_info in tar.getmembers()
+ if tar_info.name.endswith("/application.ini")
+ ]
+ if len(application_ini_files) == 0:
+ raise ValueError(
+ f"Cannot find any application.ini file in archive {input_tar_file}"
+ )
+ if len(application_ini_files) > 1:
+ raise ValueError(
+ f"Too many application.ini files found in archive {input_tar_file}. "
+ f"Found: {application_ini_files}"
+ )
+
+ tar.extract(application_ini_files[0], path=d)
+
+ return _extract_application_ini_data_from_directory(d)
+
+
+def _extract_application_ini_data_from_directory(application_directory):
+ values = get_application_ini_values(
+ application_directory,
+ dict(section="App", value="Name"),
+ dict(section="App", value="CodeName", fallback="Name"),
+ dict(section="App", value="Vendor"),
+ dict(section="App", value="RemotingName"),
+ dict(section="App", value="BuildID"),
+ )
+
+ data = {
+ "name": next(values),
+ "display_name": next(values),
+ "vendor": next(values),
+ "remoting_name": next(values),
+ "build_id": next(values),
+ }
+ data["timestamp"] = datetime.datetime.strptime(data["build_id"], "%Y%m%d%H%M%S")
+
+ return data
+
+
+def _get_build_variables(
+ application_ini_data,
+ arch,
+ version_string,
+ build_number,
+ depends,
+ package_name_suffix="",
+ description_suffix="",
+):
+ version = GeckoVersion.parse(version_string)
+ # Nightlies don't have build numbers
+ deb_pkg_version = (
+ f"{version}~{application_ini_data['build_id']}"
+ if version.is_nightly
+ else f"{version}~build{build_number}"
+ )
+ remoting_name = application_ini_data["remoting_name"].lower()
+
+ return {
+ "DEB_DESCRIPTION": f"{application_ini_data['vendor']} {application_ini_data['display_name']}"
+ f"{description_suffix}",
+ "DEB_PKG_INSTALL_PATH": f"usr/lib/{remoting_name}",
+ "DEB_PKG_NAME": f"{remoting_name}{package_name_suffix}",
+ "DEB_PKG_VERSION": deb_pkg_version,
+ "DEB_CHANGELOG_DATE": format_datetime(application_ini_data["timestamp"]),
+ "DEB_ARCH_NAME": _DEB_ARCH[arch],
+ "DEB_DEPENDS": depends,
+ }
+
+
+def _copy_plain_deb_config(input_template_dir, source_dir):
+ template_dir_filenames = os.listdir(input_template_dir)
+ plain_filenames = [
+ mozpath.basename(filename)
+ for filename in template_dir_filenames
+ if not filename.endswith(".in")
+ ]
+ os.makedirs(mozpath.join(source_dir, "debian"), exist_ok=True)
+
+ for filename in plain_filenames:
+ shutil.copy(
+ mozpath.join(input_template_dir, filename),
+ mozpath.join(source_dir, "debian", filename),
+ )
+
+
+def _render_deb_templates(
+ input_template_dir, source_dir, build_variables, exclude_file_names=None
+):
+ exclude_file_names = [] if exclude_file_names is None else exclude_file_names
+
+ template_dir_filenames = os.listdir(input_template_dir)
+ template_filenames = [
+ mozpath.basename(filename)
+ for filename in template_dir_filenames
+ if filename.endswith(".in") and filename not in exclude_file_names
+ ]
+ os.makedirs(mozpath.join(source_dir, "debian"), exist_ok=True)
+
+ for file_name in template_filenames:
+ with open(mozpath.join(input_template_dir, file_name)) as f:
+ template = Template(f.read())
+ with open(mozpath.join(source_dir, "debian", Path(file_name).stem), "w") as f:
+ f.write(template.substitute(build_variables))
+
+
+def _inject_deb_distribution_folder(source_dir, app_name):
+ with tempfile.TemporaryDirectory() as git_clone_dir:
+ subprocess.check_call(
+ [
+ "git",
+ "clone",
+ "https://github.com/mozilla-partners/deb.git",
+ git_clone_dir,
+ ],
+ )
+ shutil.copytree(
+ mozpath.join(git_clone_dir, "desktop/deb/distribution"),
+ mozpath.join(source_dir, app_name.lower(), "distribution"),
+ )
+
+
+def _inject_deb_desktop_entry_file(
+ log,
+ source_dir,
+ build_variables,
+ release_product,
+ release_type,
+ fluent_localization,
+ fluent_resource_loader,
+):
+ desktop_entry_file_text = _generate_browser_desktop_entry_file_text(
+ log,
+ build_variables,
+ release_product,
+ release_type,
+ fluent_localization,
+ fluent_resource_loader,
+ )
+ desktop_entry_file_filename = f"{build_variables['DEB_PKG_NAME']}.desktop"
+ os.makedirs(mozpath.join(source_dir, "debian"), exist_ok=True)
+ with open(
+ mozpath.join(source_dir, "debian", desktop_entry_file_filename), "w"
+ ) as f:
+ f.write(desktop_entry_file_text)
+
+
+def _generate_browser_desktop_entry_file_text(
+ log,
+ build_variables,
+ release_product,
+ release_type,
+ fluent_localization,
+ fluent_resource_loader,
+):
+ localizations = _create_fluent_localizations(
+ fluent_resource_loader, fluent_localization, release_type, release_product, log
+ )
+ desktop_entry = _generate_browser_desktop_entry(build_variables, localizations)
+ desktop_entry_file_text = "\n".join(desktop_entry)
+ return desktop_entry_file_text
+
+
+def _create_fluent_localizations(
+ fluent_resource_loader, fluent_localization, release_type, release_product, log
+):
+ brand_fluent_filename = "brand.ftl"
+ l10n_central_url = "https://hg.mozilla.org/l10n-central"
+ desktop_entry_fluent_filename = "linuxDesktopEntry.ftl"
+
+ l10n_dir = tempfile.mkdtemp()
+
+ loader = fluent_resource_loader(os.path.join(l10n_dir, "{locale}"))
+
+ localizations = {}
+ linux_l10n_changesets = _load_linux_l10n_changesets(
+ "browser/locales/l10n-changesets.json"
+ )
+ locales = ["en-US"]
+ locales.extend(linux_l10n_changesets.keys())
+ en_US_brand_fluent_filename = _get_en_US_brand_fluent_filename(
+ brand_fluent_filename, release_type, release_product
+ )
+
+ for locale in locales:
+ locale_dir = os.path.join(l10n_dir, locale)
+ os.mkdir(locale_dir)
+ localized_desktop_entry_filename = os.path.join(
+ locale_dir, desktop_entry_fluent_filename
+ )
+ if locale == "en-US":
+ en_US_desktop_entry_fluent_filename = os.path.join(
+ "browser/locales/en-US/browser", desktop_entry_fluent_filename
+ )
+ shutil.copyfile(
+ en_US_desktop_entry_fluent_filename,
+ localized_desktop_entry_filename,
+ )
+ else:
+ non_en_US_desktop_entry_fluent_filename = os.path.join(
+ "browser/browser", desktop_entry_fluent_filename
+ )
+ non_en_US_fluent_resource_file_url = os.path.join(
+ l10n_central_url,
+ locale,
+ "raw-file",
+ linux_l10n_changesets[locale]["revision"],
+ non_en_US_desktop_entry_fluent_filename,
+ )
+ response = requests.get(non_en_US_fluent_resource_file_url)
+ response = retry(
+ requests.get,
+ args=[non_en_US_fluent_resource_file_url],
+ attempts=5,
+ sleeptime=3,
+ jitter=2,
+ )
+ mgs = "Missing {fluent_resource_file_name} for {locale}: received HTTP {status_code} for GET {resource_file_url}"
+ params = {
+ "fluent_resource_file_name": desktop_entry_fluent_filename,
+ "locale": locale,
+ "resource_file_url": non_en_US_fluent_resource_file_url,
+ "status_code": response.status_code,
+ }
+ action = "repackage-deb"
+ if response.status_code == 404:
+ log(
+ logging.WARNING,
+ action,
+ params,
+ mgs,
+ )
+ continue
+ if response.status_code != 200:
+ log(
+ logging.ERROR,
+ action,
+ params,
+ mgs,
+ )
+ raise HgServerError(mgs.format(**params))
+
+ with open(localized_desktop_entry_filename, "w", encoding="utf-8") as f:
+ f.write(response.text)
+
+ shutil.copyfile(
+ en_US_brand_fluent_filename,
+ os.path.join(locale_dir, brand_fluent_filename),
+ )
+
+ fallbacks = [locale]
+ if locale != "en-US":
+ fallbacks.append("en-US")
+ localizations[locale] = fluent_localization(
+ fallbacks, [desktop_entry_fluent_filename, brand_fluent_filename], loader
+ )
+
+ return localizations
+
+
+def _get_en_US_brand_fluent_filename(
+ brand_fluent_filename, release_type, release_product
+):
+ branding_fluent_filename_template = os.path.join(
+ "browser/branding/{brand}/locales/en-US", brand_fluent_filename
+ )
+ if release_type == "nightly":
+ return branding_fluent_filename_template.format(brand="nightly")
+ elif release_type == "beta" and release_product == "firefox":
+ return branding_fluent_filename_template.format(brand="official")
+ elif release_type == "beta" and release_product == "devedition":
+ return branding_fluent_filename_template.format(brand="aurora")
+ else:
+ return branding_fluent_filename_template.format(brand="unofficial")
+
+
+def _load_linux_l10n_changesets(l10n_changesets_filename):
+ with open(l10n_changesets_filename) as l10n_changesets_file:
+ l10n_changesets = json.load(l10n_changesets_file)
+ return {
+ locale: changeset
+ for locale, changeset in l10n_changesets.items()
+ if any(platform.startswith("linux") for platform in changeset["platforms"])
+ }
+
+
+def _generate_browser_desktop_entry(build_variables, localizations):
+ mime_types = [
+ "application/json",
+ "application/pdf",
+ "application/rdf+xml",
+ "application/rss+xml",
+ "application/x-xpinstall",
+ "application/xhtml+xml",
+ "application/xml",
+ "audio/flac",
+ "audio/ogg",
+ "audio/webm",
+ "image/avif",
+ "image/gif",
+ "image/jpeg",
+ "image/png",
+ "image/svg+xml",
+ "image/webp",
+ "text/html",
+ "text/xml",
+ "video/ogg",
+ "video/webm",
+ "x-scheme-handler/chrome",
+ "x-scheme-handler/http",
+ "x-scheme-handler/https",
+ ]
+
+ categories = [
+ "GNOME",
+ "GTK",
+ "Network",
+ "WebBrowser",
+ ]
+
+ actions = [
+ {
+ "name": "new-window",
+ "message": "desktop-action-new-window-name",
+ "command": f"{build_variables['DEB_PKG_NAME']} --new-window %u",
+ },
+ {
+ "name": "new-private-window",
+ "message": "desktop-action-new-private-window-name",
+ "command": f"{build_variables['DEB_PKG_NAME']} --private-window %u",
+ },
+ {
+ "name": "open-profile-manager",
+ "message": "desktop-action-open-profile-manager",
+ "command": f"{build_variables['DEB_PKG_NAME']} --ProfileManager",
+ },
+ ]
+
+ desktop_entry = _desktop_entry_section(
+ "Desktop Entry",
+ [
+ {
+ "key": "Version",
+ "value": "1.0",
+ },
+ {
+ "key": "Type",
+ "value": "Application",
+ },
+ {
+ "key": "Exec",
+ "value": f"{build_variables['DEB_PKG_NAME']} %u",
+ },
+ {
+ "key": "Terminal",
+ "value": "false",
+ },
+ {
+ "key": "X-MultipleArgs",
+ "value": "false",
+ },
+ {
+ "key": "Icon",
+ "value": build_variables["DEB_PKG_NAME"],
+ },
+ {
+ "key": "StartupWMClass",
+ "value": build_variables["DEB_PKG_NAME"],
+ },
+ {
+ "key": "Categories",
+ "value": _desktop_entry_list(categories),
+ },
+ {
+ "key": "MimeType",
+ "value": _desktop_entry_list(mime_types),
+ },
+ {
+ "key": "StartupNotify",
+ "value": "true",
+ },
+ {
+ "key": "Actions",
+ "value": _desktop_entry_list([action["name"] for action in actions]),
+ },
+ {"key": "Name", "value": "desktop-entry-name", "l10n": True},
+ {"key": "Comment", "value": "desktop-entry-comment", "l10n": True},
+ {"key": "GenericName", "value": "desktop-entry-generic-name", "l10n": True},
+ {"key": "Keywords", "value": "desktop-entry-keywords", "l10n": True},
+ {
+ "key": "X-GNOME-FullName",
+ "value": "desktop-entry-x-gnome-full-name",
+ "l10n": True,
+ },
+ ],
+ localizations,
+ )
+
+ for action in actions:
+ desktop_entry.extend(
+ _desktop_entry_section(
+ f"Desktop Action {action['name']}",
+ [
+ {
+ "key": "Name",
+ "value": action["message"],
+ "l10n": True,
+ },
+ {
+ "key": "Exec",
+ "value": action["command"],
+ },
+ ],
+ localizations,
+ )
+ )
+
+ return desktop_entry
+
+
+def _desktop_entry_list(iterable):
+ delimiter = ";"
+ return f"{delimiter.join(iterable)}{delimiter}"
+
+
+def _desktop_entry_attribute(key, value, locale=None, localizations=None):
+ if not locale and not localizations:
+ return f"{key}={value}"
+ if locale and locale == "en-US":
+ return f"{key}={localizations[locale].format_value(value)}"
+ else:
+ return f"{key}[{locale.replace('-', '_')}]={localizations[locale].format_value(value)}"
+
+
+def _desktop_entry_section(header, attributes, localizations):
+ desktop_entry_section = [f"[{header}]"]
+ l10n_attributes = [attribute for attribute in attributes if attribute.get("l10n")]
+ non_l10n_attributes = [
+ attribute for attribute in attributes if not attribute.get("l10n")
+ ]
+ for attribute in non_l10n_attributes:
+ desktop_entry_section.append(
+ _desktop_entry_attribute(attribute["key"], attribute["value"])
+ )
+ for attribute in l10n_attributes:
+ for locale in localizations:
+ desktop_entry_section.append(
+ _desktop_entry_attribute(
+ attribute["key"], attribute["value"], locale, localizations
+ )
+ )
+ desktop_entry_section.append("")
+ return desktop_entry_section
+
+
+def _generate_deb_archive(
+ source_dir, target_dir, output_file_path, build_variables, arch
+):
+ command = _get_command(arch)
+ subprocess.check_call(command, cwd=source_dir)
+ deb_arch = _DEB_ARCH[arch]
+ deb_file_name = f"{build_variables['DEB_PKG_NAME']}_{build_variables['DEB_PKG_VERSION']}_{deb_arch}.deb"
+ deb_file_path = mozpath.join(target_dir, deb_file_name)
+
+ if not os.path.exists(deb_file_path):
+ raise NoDebPackageFound(deb_file_path)
+
+ subprocess.check_call(["dpkg-deb", "--info", deb_file_path])
+ shutil.move(deb_file_path, output_file_path)
+
+
+def _get_command(arch):
+ deb_arch = _DEB_ARCH[arch]
+ command = [
+ "dpkg-buildpackage",
+ # TODO: Use long options once we stop supporting Debian Jesse. They're more
+ # explicit.
+ #
+ # Long options were added in dpkg 1.18.8 which is part of Debian Stretch.
+ #
+ # https://git.dpkg.org/cgit/dpkg/dpkg.git/commit/?h=1.18.x&id=293bd243a19149165fc4fd8830b16a51d471a5e9
+ # https://packages.debian.org/stretch/dpkg-dev
+ "-us", # --unsigned-source
+ "-uc", # --unsigned-changes
+ "-b", # --build=binary
+ ]
+
+ if deb_arch != "all":
+ command.append(f"--host-arch={deb_arch}")
+
+ if _is_chroot_available(arch):
+ flattened_command = " ".join(command)
+ command = [
+ "chroot",
+ _get_chroot_path(arch),
+ "bash",
+ "-c",
+ f"cd /tmp/*/source; {flattened_command}",
+ ]
+
+ return command
+
+
+def _create_temporary_directory(arch):
+ if _is_chroot_available(arch):
+ return tempfile.mkdtemp(dir=f"{_get_chroot_path(arch)}/tmp")
+ else:
+ return tempfile.mkdtemp()
+
+
+def _is_chroot_available(arch):
+ return os.path.isdir(_get_chroot_path(arch))
+
+
+def _get_chroot_path(arch):
+ deb_arch = "amd64" if arch == "all" else _DEB_ARCH[arch]
+ return f"/srv/{_DEB_DIST}-{deb_arch}"
+
+
+_MANIFEST_FILE_NAME = "manifest.json"
+
+
+def _extract_langpack_metadata(input_xpi_file):
+ with tempfile.TemporaryDirectory() as d:
+ with zipfile.ZipFile(input_xpi_file) as zip:
+ zip.extract(_MANIFEST_FILE_NAME, path=d)
+
+ with open(mozpath.join(d, _MANIFEST_FILE_NAME)) as f:
+ return json.load(f)
diff --git a/python/mozbuild/mozbuild/repackaging/dmg.py b/python/mozbuild/mozbuild/repackaging/dmg.py
new file mode 100644
index 0000000000..883927f214
--- /dev/null
+++ b/python/mozbuild/mozbuild/repackaging/dmg.py
@@ -0,0 +1,56 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import tarfile
+from pathlib import Path
+
+import mozfile
+from mozpack.dmg import create_dmg
+
+from mozbuild.bootstrap import bootstrap_toolchain
+from mozbuild.repackaging.application_ini import get_application_ini_value
+
+
+def repackage_dmg(infile, output):
+
+ if not tarfile.is_tarfile(infile):
+ raise Exception("Input file %s is not a valid tarfile." % infile)
+
+ # Resolve required tools
+ dmg_tool = bootstrap_toolchain("dmg/dmg")
+ if not dmg_tool:
+ raise Exception("DMG tool not found")
+ hfs_tool = bootstrap_toolchain("dmg/hfsplus")
+ if not hfs_tool:
+ raise Exception("HFS tool not found")
+ mkfshfs_tool = bootstrap_toolchain("hfsplus/newfs_hfs")
+ if not mkfshfs_tool:
+ raise Exception("MKFSHFS tool not found")
+
+ with mozfile.TemporaryDirectory() as tmp:
+ tmpdir = Path(tmp)
+ mozfile.extract_tarball(infile, tmpdir)
+
+ # Remove the /Applications symlink. If we don't, an rsync command in
+ # create_dmg() will break, and create_dmg() re-creates the symlink anyway.
+ symlink = tmpdir / " "
+ if symlink.is_file():
+ symlink.unlink()
+
+ volume_name = get_application_ini_value(
+ str(tmpdir), "App", "CodeName", fallback="Name"
+ )
+
+ # The extra_files argument is empty [] because they are already a part
+ # of the original dmg produced by the build, and they remain in the
+ # tarball generated by the signing task.
+ create_dmg(
+ source_directory=tmpdir,
+ output_dmg=Path(output),
+ volume_name=volume_name,
+ extra_files=[],
+ dmg_tool=Path(dmg_tool),
+ hfs_tool=Path(hfs_tool),
+ mkfshfs_tool=Path(mkfshfs_tool),
+ )
diff --git a/python/mozbuild/mozbuild/repackaging/installer.py b/python/mozbuild/mozbuild/repackaging/installer.py
new file mode 100644
index 0000000000..9bd17613bf
--- /dev/null
+++ b/python/mozbuild/mozbuild/repackaging/installer.py
@@ -0,0 +1,55 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import shutil
+import tempfile
+import zipfile
+
+import mozpack.path as mozpath
+
+from mozbuild.action.exe_7z_archive import archive_exe
+from mozbuild.util import ensureParentDir
+
+
+def repackage_installer(
+ topsrcdir, tag, setupexe, package, output, package_name, sfx_stub, use_upx
+):
+ if package and not zipfile.is_zipfile(package):
+ raise Exception("Package file %s is not a valid .zip file." % package)
+ if package is not None and package_name is None:
+ raise Exception("Package name must be provided, if a package is provided.")
+ if package is None and package_name is not None:
+ raise Exception(
+ "Package name must not be provided, if a package is not provided."
+ )
+
+ # We need the full path for the tag and output, since we chdir later.
+ tag = mozpath.realpath(tag)
+ output = mozpath.realpath(output)
+ ensureParentDir(output)
+
+ tmpdir = tempfile.mkdtemp()
+ old_cwd = os.getcwd()
+ try:
+ if package:
+ z = zipfile.ZipFile(package)
+ z.extractall(tmpdir)
+ z.close()
+
+ # Copy setup.exe into the root of the install dir, alongside the
+ # package.
+ shutil.copyfile(setupexe, mozpath.join(tmpdir, mozpath.basename(setupexe)))
+
+ # archive_exe requires us to be in the directory where the package is
+ # unpacked (the tmpdir)
+ os.chdir(tmpdir)
+
+ sfx_package = mozpath.join(topsrcdir, sfx_stub)
+
+ archive_exe(package_name, tag, sfx_package, output, use_upx)
+
+ finally:
+ os.chdir(old_cwd)
+ shutil.rmtree(tmpdir)
diff --git a/python/mozbuild/mozbuild/repackaging/mar.py b/python/mozbuild/mozbuild/repackaging/mar.py
new file mode 100644
index 0000000000..f215c17238
--- /dev/null
+++ b/python/mozbuild/mozbuild/repackaging/mar.py
@@ -0,0 +1,93 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import shutil
+import subprocess
+import sys
+import tarfile
+import tempfile
+import zipfile
+from pathlib import Path
+
+import mozfile
+import mozpack.path as mozpath
+
+from mozbuild.repackaging.application_ini import get_application_ini_value
+from mozbuild.util import ensureParentDir
+
+_BCJ_OPTIONS = {
+ "x86": ["--x86"],
+ "x86_64": ["--x86"],
+ "aarch64": [],
+ # macOS Universal Builds
+ "macos-x86_64-aarch64": [],
+}
+
+
+def repackage_mar(topsrcdir, package, mar, output, arch=None, mar_channel_id=None):
+ if not zipfile.is_zipfile(package) and not tarfile.is_tarfile(package):
+ raise Exception("Package file %s is not a valid .zip or .tar file." % package)
+ if arch and arch not in _BCJ_OPTIONS:
+ raise Exception(
+ "Unknown architecture {}, available architectures: {}".format(
+ arch, list(_BCJ_OPTIONS.keys())
+ )
+ )
+
+ ensureParentDir(output)
+ tmpdir = tempfile.mkdtemp()
+ try:
+ if tarfile.is_tarfile(package):
+ filelist = mozfile.extract_tarball(package, tmpdir)
+ else:
+ z = zipfile.ZipFile(package)
+ z.extractall(tmpdir)
+ filelist = z.namelist()
+ z.close()
+
+ toplevel_dirs = set([mozpath.split(f)[0] for f in filelist])
+ excluded_stuff = set([" ", ".background", ".DS_Store", ".VolumeIcon.icns"])
+ toplevel_dirs = toplevel_dirs - excluded_stuff
+ # Make sure the .zip file just contains a directory like 'firefox/' at
+ # the top, and find out what it is called.
+ if len(toplevel_dirs) != 1:
+ raise Exception(
+ "Package file is expected to have a single top-level directory"
+ "(eg: 'firefox'), not: %s" % toplevel_dirs
+ )
+ ffxdir = mozpath.join(tmpdir, toplevel_dirs.pop())
+
+ make_full_update = mozpath.join(
+ topsrcdir, "tools/update-packaging/make_full_update.sh"
+ )
+
+ env = os.environ.copy()
+ env["MOZ_PRODUCT_VERSION"] = get_application_ini_value(tmpdir, "App", "Version")
+ env["MAR"] = mozpath.normpath(mar)
+ if arch:
+ env["BCJ_OPTIONS"] = " ".join(_BCJ_OPTIONS[arch])
+ if mar_channel_id:
+ env["MAR_CHANNEL_ID"] = mar_channel_id
+ # The Windows build systems have xz installed but it isn't in the path
+ # like it is on Linux and Mac OS X so just use the XZ env var so the mar
+ # generation scripts can find it.
+ xz_path = mozpath.join(topsrcdir, "xz/xz.exe")
+ if os.path.exists(xz_path):
+ env["XZ"] = mozpath.normpath(xz_path)
+
+ cmd = [make_full_update, output, ffxdir]
+ if sys.platform == "win32":
+ # make_full_update.sh is a bash script, and Windows needs to
+ # explicitly call out the shell to execute the script from Python.
+
+ mozillabuild = os.environ["MOZILLABUILD"]
+ if (Path(mozillabuild) / "msys2").exists():
+ cmd.insert(0, mozillabuild + "/msys2/usr/bin/bash.exe")
+ else:
+ cmd.insert(0, mozillabuild + "/msys/bin/bash.exe")
+ subprocess.check_call(cmd, env=env)
+
+ finally:
+ shutil.rmtree(tmpdir)
diff --git a/python/mozbuild/mozbuild/repackaging/msi.py b/python/mozbuild/mozbuild/repackaging/msi.py
new file mode 100644
index 0000000000..b0b1b09983
--- /dev/null
+++ b/python/mozbuild/mozbuild/repackaging/msi.py
@@ -0,0 +1,122 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+from xml.dom import minidom
+
+import mozpack.path as mozpath
+
+from mozbuild.util import ensureParentDir
+
+_MSI_ARCH = {
+ "x86": "x86",
+ "x86_64": "x64",
+}
+
+
+def update_wsx(wfile, pvalues):
+
+ parsed = minidom.parse(wfile)
+
+ # construct a dictinary for the pre-processing options
+ # iterate over that list and add them to the wsx xml doc
+ for k, v in pvalues.items():
+ entry = parsed.createProcessingInstruction("define", k + ' = "' + v + '"')
+ root = parsed.firstChild
+ parsed.insertBefore(entry, root)
+ # write out xml to new wfile
+ new_w_file = wfile + ".new"
+ with open(new_w_file, "w") as fh:
+ parsed.writexml(fh)
+ shutil.move(new_w_file, wfile)
+ return wfile
+
+
+def repackage_msi(
+ topsrcdir, wsx, version, locale, arch, setupexe, candle, light, output
+):
+ if sys.platform != "win32":
+ raise Exception("repackage msi only works on windows")
+ if not os.path.isdir(topsrcdir):
+ raise Exception("%s does not exist." % topsrcdir)
+ if not os.path.isfile(wsx):
+ raise Exception("%s does not exist." % wsx)
+ if version is None:
+ raise Exception("version name must be provided.")
+ if locale is None:
+ raise Exception("locale name must be provided.")
+ if arch is None or arch not in _MSI_ARCH.keys():
+ raise Exception(
+ "arch name must be provided and one of {}.".format(_MSI_ARCH.keys())
+ )
+ if not os.path.isfile(setupexe):
+ raise Exception("%s does not exist." % setupexe)
+ if candle is not None and not os.path.isfile(candle):
+ raise Exception("%s does not exist." % candle)
+ if light is not None and not os.path.isfile(light):
+ raise Exception("%s does not exist." % light)
+ embeddedVersion = "0.0.0.0"
+ # Version string cannot contain 'a' or 'b' when embedding in msi manifest.
+ if "a" not in version and "b" not in version:
+ if version.endswith("esr"):
+ parts = version[:-3].split(".")
+ else:
+ parts = version.split(".")
+ while len(parts) < 4:
+ parts.append("0")
+ embeddedVersion = ".".join(parts)
+
+ wsx = mozpath.realpath(wsx)
+ setupexe = mozpath.realpath(setupexe)
+ output = mozpath.realpath(output)
+ ensureParentDir(output)
+
+ if sys.platform == "win32":
+ tmpdir = tempfile.mkdtemp()
+ old_cwd = os.getcwd()
+ try:
+ wsx_file = os.path.split(wsx)[1]
+ shutil.copy(wsx, tmpdir)
+ temp_wsx_file = os.path.join(tmpdir, wsx_file)
+ temp_wsx_file = mozpath.realpath(temp_wsx_file)
+ pre_values = {
+ "Vendor": "Mozilla",
+ "BrandFullName": "Mozilla Firefox",
+ "Version": version,
+ "AB_CD": locale,
+ "Architecture": _MSI_ARCH[arch],
+ "ExeSourcePath": setupexe,
+ "EmbeddedVersionCode": embeddedVersion,
+ }
+ # update wsx file with inputs from
+ newfile = update_wsx(temp_wsx_file, pre_values)
+ wix_object_file = os.path.join(tmpdir, "installer.wixobj")
+ env = os.environ.copy()
+ if candle is None:
+ candle = "candle.exe"
+ cmd = [candle, "-out", wix_object_file, newfile]
+ subprocess.check_call(cmd, env=env)
+ wix_installer = wix_object_file.replace(".wixobj", ".msi")
+ if light is None:
+ light = "light.exe"
+ light_cmd = [
+ light,
+ "-cultures:neutral",
+ "-sw1076",
+ "-sw1079",
+ "-out",
+ wix_installer,
+ wix_object_file,
+ ]
+ subprocess.check_call(light_cmd, env=env)
+ os.remove(wix_object_file)
+ # mv file to output dir
+ shutil.move(wix_installer, output)
+ finally:
+ os.chdir(old_cwd)
+ shutil.rmtree(tmpdir)
diff --git a/python/mozbuild/mozbuild/repackaging/msix.py b/python/mozbuild/mozbuild/repackaging/msix.py
new file mode 100644
index 0000000000..707096c499
--- /dev/null
+++ b/python/mozbuild/mozbuild/repackaging/msix.py
@@ -0,0 +1,1193 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+r"""Repackage ZIP archives (or directories) into MSIX App Packages.
+
+# Known issues
+
+- The icons in the Start Menu have a solid colour tile behind them. I think
+ this is an issue with plating.
+"""
+
+import functools
+import itertools
+import logging
+import os
+import re
+import shutil
+import subprocess
+import sys
+import time
+import urllib
+from collections import defaultdict
+from pathlib import Path
+
+import mozpack.path as mozpath
+from mach.util import get_state_dir
+from mozfile import which
+from mozpack.copier import FileCopier
+from mozpack.files import FileFinder, JarFinder
+from mozpack.manifests import InstallManifest
+from mozpack.mozjar import JarReader
+from mozpack.packager.unpack import UnpackFinder
+from six.moves import shlex_quote
+
+from mozbuild.repackaging.application_ini import get_application_ini_values
+from mozbuild.util import ensureParentDir
+
+
+def log_copy_result(log, elapsed, destdir, result):
+ COMPLETE = (
+ "Elapsed: {elapsed:.2f}s; From {dest}: Kept {existing} existing; "
+ "Added/updated {updated}; "
+ "Removed {rm_files} files and {rm_dirs} directories."
+ )
+ copy_result = COMPLETE.format(
+ elapsed=elapsed,
+ dest=destdir,
+ existing=result.existing_files_count,
+ updated=result.updated_files_count,
+ rm_files=result.removed_files_count,
+ rm_dirs=result.removed_directories_count,
+ )
+ log(logging.INFO, "msix", {"copy_result": copy_result}, "{copy_result}")
+
+
+# See https://docs.microsoft.com/en-us/uwp/schemas/appxpackage/uapmanifestschema/element-identity.
+_MSIX_ARCH = {"x86": "x86", "x86_64": "x64", "aarch64": "arm64"}
+
+
+@functools.lru_cache(maxsize=None)
+def sdk_tool_search_path():
+ from mozbuild.configure import ConfigureSandbox
+
+ sandbox = ConfigureSandbox({}, argv=["configure"])
+ sandbox.include_file(
+ str(Path(__file__).parent.parent.parent.parent.parent / "moz.configure")
+ )
+ return sandbox._value_for(sandbox["sdk_bin_path"]) + [
+ "c:/Windows/System32/WindowsPowershell/v1.0"
+ ]
+
+
+def find_sdk_tool(binary, log=None):
+ if binary.lower().endswith(".exe"):
+ binary = binary[:-4]
+
+ maybe = os.environ.get(binary.upper())
+ if maybe:
+ log(
+ logging.DEBUG,
+ "msix",
+ {"binary": binary, "path": maybe},
+ "Found {binary} in environment: {path}",
+ )
+ return mozpath.normsep(maybe)
+
+ maybe = which(binary, extra_search_dirs=sdk_tool_search_path())
+ if maybe:
+ log(
+ logging.DEBUG,
+ "msix",
+ {"binary": binary, "path": maybe},
+ "Found {binary} on path: {path}",
+ )
+ return mozpath.normsep(maybe)
+
+ return None
+
+
+def get_embedded_version(version, buildid):
+ r"""Turn a display version into "dotted quad" notation.
+
+ N.b.: some parts of the MSIX packaging ecosystem require the final part of
+ the dotted quad to be identically 0, so we enforce that here.
+ """
+
+ # It's irritating to roll our own version parsing, but the tree doesn't seem
+ # to contain exactly what we need at this time.
+ version = version.rsplit("esr", 1)[0]
+ alpha = "a" in version
+
+ tail = None
+ if "a" in version:
+ head, tail = version.rsplit("a", 1)
+ if tail != "1":
+ # Disallow anything beyond `X.Ya1`.
+ raise ValueError(
+ f"Alpha version not of the form X.0a1 is not supported: {version}"
+ )
+ tail = buildid
+ elif "b" in version:
+ head, tail = version.rsplit("b", 1)
+ if len(head.split(".")) > 2:
+ raise ValueError(
+ f"Beta version not of the form X.YbZ is not supported: {version}"
+ )
+ elif "rc" in version:
+ head, tail = version.rsplit("rc", 1)
+ if len(head.split(".")) > 2:
+ raise ValueError(
+ f"Release candidate version not of the form X.YrcZ is not supported: {version}"
+ )
+ else:
+ head = version
+
+ components = (head.split(".") + ["0", "0", "0"])[:3]
+ if tail:
+ components[2] = tail
+
+ if alpha:
+ # Nightly builds are all `X.0a1`, which isn't helpful. Include build ID
+ # to disambiguate. But each part of the dotted quad is 16 bits, so we
+ # have to squash.
+ if components[1] != "0":
+ # Disallow anything beyond `X.0a1`.
+ raise ValueError(
+ f"Alpha version not of the form X.0a1 is not supported: {version}"
+ )
+
+ # Last two digits only to save space. Nightly builds in 2066 and 2099
+ # will be impacted, but future us can deal with that.
+ year = buildid[2:4]
+ if year[0] == "0":
+ # Avoid leading zero, like `.0YMm`.
+ year = year[1:]
+ month = buildid[4:6]
+ day = buildid[6:8]
+ if day[0] == "0":
+ # Avoid leading zero, like `.0DHh`.
+ day = day[1:]
+ hour = buildid[8:10]
+
+ components[1] = "".join((year, month))
+ components[2] = "".join((day, hour))
+
+ version = "{}.{}.{}.0".format(*components)
+
+ return version
+
+
+def get_appconstants_sys_mjs_values(finder, *args):
+ r"""Extract values, such as the display version like `MOZ_APP_VERSION_DISPLAY:
+ "...";`, from the omnijar. This allows to determine the beta number, like
+ `X.YbW`, where the regular beta version is only `X.Y`. Takes a list of
+ names and returns an iterator of the unique such value found for each name.
+ Raises an exception if a name is not found or if multiple values are found.
+ """
+ lines = defaultdict(list)
+ for _, f in finder.find("**/modules/AppConstants.sys.mjs"):
+ # MOZ_OFFICIAL_BRANDING is split across two lines, so remove line breaks
+ # immediately following ":"s so those values can be read.
+ data = f.open().read().decode("utf-8").replace(":\n", ":")
+ for line in data.splitlines():
+ for arg in args:
+ if arg in line:
+ lines[arg].append(line)
+
+ for arg in args:
+ (value,) = lines[arg] # We expect exactly one definition.
+ _, _, value = value.partition(":")
+ value = value.strip().strip('",;')
+ yield value
+
+
+def get_branding(use_official, topsrcdir, build_app, finder, log=None):
+ """Figure out which branding directory to use."""
+ conf_vars = mozpath.join(topsrcdir, build_app, "confvars.sh")
+
+ def conf_vars_value(key):
+ lines = open(conf_vars).readlines()
+ for line in lines:
+ line = line.strip()
+ if line and line[0] == "#":
+ continue
+ if key not in line:
+ continue
+ _, _, value = line.partition("=")
+ if not value:
+ continue
+ log(
+ logging.INFO,
+ "msix",
+ {"key": key, "conf_vars": conf_vars, "value": value},
+ "Read '{key}' from {conf_vars}: {value}",
+ )
+ return value
+ log(
+ logging.ERROR,
+ "msix",
+ {"key": key, "conf_vars": conf_vars},
+ "Unable to find '{key}' in {conf_vars}!",
+ )
+
+ # Branding defaults
+ branding_reason = "No branding set"
+ branding = conf_vars_value("MOZ_BRANDING_DIRECTORY")
+
+ if use_official:
+ # Read MOZ_OFFICIAL_BRANDING_DIRECTORY from confvars.sh
+ branding_reason = "'MOZ_OFFICIAL_BRANDING' set"
+ branding = conf_vars_value("MOZ_OFFICIAL_BRANDING_DIRECTORY")
+ else:
+ # Check if --with-branding was used when building
+ log(
+ logging.INFO,
+ "msix",
+ {},
+ "Checking buildconfig.html for --with-branding build flag.",
+ )
+ for _, f in finder.find("**/chrome/toolkit/content/global/buildconfig.html"):
+ data = f.open().read().decode("utf-8")
+ match = re.search(r"--with-branding=([a-z/]+)", data)
+ if match:
+ branding_reason = "'--with-branding' set"
+ branding = match.group(1)
+
+ log(
+ logging.INFO,
+ "msix",
+ {
+ "branding_reason": branding_reason,
+ "branding": branding,
+ },
+ "{branding_reason}; Using branding from '{branding}'.",
+ )
+ return mozpath.join(topsrcdir, branding)
+
+
+def unpack_msix(input_msix, output, log=None, verbose=False):
+ r"""Unpack the given MSIX to the given output directory.
+
+ MSIX packages are ZIP files, but they are Zip64/version 4.5 ZIP files, so
+ `mozjar.py` doesn't yet handle. Unpack using `unzip{.exe}` for simplicity.
+
+ In addition, file names inside the MSIX package are URL quoted. URL unquote
+ here.
+ """
+
+ log(
+ logging.INFO,
+ "msix",
+ {
+ "input_msix": input_msix,
+ "output": output,
+ },
+ "Unpacking input MSIX '{input_msix}' to directory '{output}'",
+ )
+
+ unzip = find_sdk_tool("unzip.exe", log=log)
+ if not unzip:
+ raise ValueError("unzip is required; set UNZIP or PATH")
+
+ subprocess.check_call(
+ [unzip, input_msix, "-d", output] + (["-q"] if not verbose else []),
+ universal_newlines=True,
+ )
+
+ # Sanity check: is this an MSIX?
+ temp_finder = FileFinder(output)
+ if not temp_finder.contains("AppxManifest.xml"):
+ raise ValueError("MSIX file does not contain 'AppxManifest.xml'?")
+
+ # Files in the MSIX are URL encoded/quoted; unquote here.
+ for dirpath, dirs, files in os.walk(output):
+ # This is a one way to update (in place, for os.walk) the variable `dirs` while iterating
+ # over it and `files`.
+ for i, (p, var) in itertools.chain(
+ enumerate((f, files) for f in files), enumerate((g, dirs) for g in dirs)
+ ):
+ q = urllib.parse.unquote(p)
+ if p != q:
+ log(
+ logging.DEBUG,
+ "msix",
+ {
+ "dirpath": dirpath,
+ "p": p,
+ "q": q,
+ },
+ "URL unquoting '{p}' -> '{q}' in {dirpath}",
+ )
+
+ var[i] = q
+ os.rename(os.path.join(dirpath, p), os.path.join(dirpath, q))
+
+ # The "package root" of our MSIX packages is like "Mozilla Firefox Beta Package Root", i.e., it
+ # varies by channel. This is an easy way to determine it.
+ for p, _ in temp_finder.find("**/application.ini"):
+ relpath = os.path.split(p)[0]
+
+ # The application executable, like `firefox.exe`, is in this directory.
+ return mozpath.normpath(mozpath.join(output, relpath))
+
+
+def repackage_msix(
+ dir_or_package,
+ topsrcdir,
+ channel=None,
+ distribution_dirs=[],
+ version=None,
+ vendor=None,
+ displayname=None,
+ app_name=None,
+ identity=None,
+ publisher=None,
+ publisher_display_name="Mozilla Corporation",
+ arch=None,
+ output=None,
+ force=False,
+ log=None,
+ verbose=False,
+ makeappx=None,
+):
+ if not channel:
+ raise Exception("channel is required")
+ if channel not in (
+ "official",
+ "beta",
+ "aurora",
+ "nightly",
+ "unofficial",
+ ):
+ raise Exception("channel is unrecognized: {}".format(channel))
+
+ # TODO: maybe we can fish this from the package directly? Maybe from a DLL,
+ # maybe from application.ini?
+ if arch is None or arch not in _MSIX_ARCH.keys():
+ raise Exception(
+ "arch name must be provided and one of {}.".format(_MSIX_ARCH.keys())
+ )
+
+ if not os.path.exists(dir_or_package):
+ raise Exception("{} does not exist".format(dir_or_package))
+
+ if (
+ os.path.isfile(dir_or_package)
+ and os.path.splitext(dir_or_package)[1] == ".msix"
+ ):
+ # The convention is $MOZBUILD_STATE_PATH/cache/$FEATURE.
+ msix_dir = mozpath.normsep(
+ mozpath.join(
+ get_state_dir(),
+ "cache",
+ "mach-msix",
+ "msix-unpack",
+ )
+ )
+
+ if os.path.exists(msix_dir):
+ shutil.rmtree(msix_dir)
+ ensureParentDir(msix_dir)
+
+ dir_or_package = unpack_msix(dir_or_package, msix_dir, log=log, verbose=verbose)
+
+ log(
+ logging.INFO,
+ "msix",
+ {
+ "input": dir_or_package,
+ },
+ "Adding files from '{input}'",
+ )
+
+ if os.path.isdir(dir_or_package):
+ finder = FileFinder(dir_or_package)
+ else:
+ finder = JarFinder(dir_or_package, JarReader(dir_or_package))
+
+ values = get_application_ini_values(
+ finder,
+ dict(section="App", value="CodeName", fallback="Name"),
+ dict(section="App", value="Vendor"),
+ )
+
+ first = next(values)
+ if not displayname:
+ displayname = "Mozilla {}".format(first)
+
+ if channel == "beta":
+ # Release (official) and Beta share branding. Differentiate Beta a little bit.
+ displayname += " Beta"
+
+ second = next(values)
+ vendor = vendor or second
+
+ # For `AppConstants.sys.mjs` and `brand.properties`, which are in the omnijar in packaged
+ # builds. The nested langpack XPI files can't be read by `mozjar.py`.
+ unpack_finder = UnpackFinder(finder, unpack_xpi=False)
+
+ values = get_appconstants_sys_mjs_values(
+ unpack_finder,
+ "MOZ_OFFICIAL_BRANDING",
+ "MOZ_BUILD_APP",
+ "MOZ_APP_NAME",
+ "MOZ_APP_VERSION_DISPLAY",
+ "MOZ_BUILDID",
+ )
+ try:
+ use_official_branding = {"true": True, "false": False}[next(values)]
+ except KeyError as err:
+ raise Exception(
+ f"Unexpected value '{err.args[0]}' found for 'MOZ_OFFICIAL_BRANDING'."
+ ) from None
+
+ build_app = next(values)
+
+ _temp = next(values)
+ if not app_name:
+ app_name = _temp
+
+ if not version:
+ display_version = next(values)
+ buildid = next(values)
+ version = get_embedded_version(display_version, buildid)
+ log(
+ logging.INFO,
+ "msix",
+ {
+ "version": version,
+ "display_version": display_version,
+ "buildid": buildid,
+ },
+ "AppConstants.sys.mjs display version is '{display_version}' and build ID is"
+ + " '{buildid}': embedded version will be '{version}'",
+ )
+
+ # TODO: Bug 1721922: localize this description via Fluent.
+ lines = []
+ for _, f in unpack_finder.find("**/chrome/en-US/locale/branding/brand.properties"):
+ lines.extend(
+ line
+ for line in f.open().read().decode("utf-8").splitlines()
+ if "brandFullName" in line
+ )
+ (brandFullName,) = lines # We expect exactly one definition.
+ _, _, brandFullName = brandFullName.partition("=")
+ brandFullName = brandFullName.strip()
+
+ if channel == "beta":
+ # Release (official) and Beta share branding. Differentiate Beta a little bit.
+ brandFullName += " Beta"
+
+ branding = get_branding(
+ use_official_branding, topsrcdir, build_app, unpack_finder, log
+ )
+ if not os.path.isdir(branding):
+ raise Exception("branding dir {} does not exist".format(branding))
+
+ template = os.path.join(topsrcdir, build_app, "installer", "windows", "msix")
+
+ # Discard everything after a '#' comment character.
+ locale_allowlist = set(
+ locale.partition("#")[0].strip().lower()
+ for locale in open(os.path.join(template, "msix-all-locales")).readlines()
+ if locale.partition("#")[0].strip()
+ )
+
+ # The convention is $MOZBUILD_STATE_PATH/cache/$FEATURE.
+ output_dir = mozpath.normsep(
+ mozpath.join(
+ get_state_dir(), "cache", "mach-msix", "msix-temp-{}".format(channel)
+ )
+ )
+
+ # Like 'Firefox Package Root', 'Firefox Nightly Package Root', 'Firefox Beta
+ # Package Root'. This is `BrandFullName` in the installer, and we want to
+ # be close but to not match. By not matching, we hope to prevent confusion
+ # and/or errors between regularly installed builds and App Package builds.
+ instdir = "{} Package Root".format(displayname)
+
+ # The standard package name is like "CompanyNoSpaces.ProductNoSpaces".
+ identity = identity or "{}.{}".format(vendor, displayname).replace(" ", "")
+
+ # We might want to include the publisher ID hash here. I.e.,
+ # "__{publisherID}". My locally produced MSIX was named like
+ # `Mozilla.MozillaFirefoxNightly_89.0.0.0_x64__4gf61r4q480j0`, suggesting also a
+ # missing field, but it's not necessary, since this is just an output file name.
+ package_output_name = "{identity}_{version}_{arch}".format(
+ identity=identity, version=version, arch=_MSIX_ARCH[arch]
+ )
+ # The convention is $MOZBUILD_STATE_PATH/cache/$FEATURE.
+ default_output = mozpath.normsep(
+ mozpath.join(
+ get_state_dir(), "cache", "mach-msix", "{}.msix".format(package_output_name)
+ )
+ )
+ output = output or default_output
+ log(logging.INFO, "msix", {"output": output}, "Repackaging to: {output}")
+
+ m = InstallManifest()
+ m.add_copy(mozpath.join(template, "Resources.pri"), "Resources.pri")
+
+ m.add_pattern_copy(mozpath.join(branding, "msix", "Assets"), "**", "Assets")
+ m.add_pattern_copy(mozpath.join(template, "VFS"), "**", "VFS")
+
+ copier = FileCopier()
+
+ # TODO: Bug 1710147: filter out MSVCRT files and use a dependency instead.
+ for p, f in finder:
+ if not os.path.isdir(dir_or_package):
+ # In archived builds, `p` is like "firefox/firefox.exe"; we want just "firefox.exe".
+ pp = os.path.relpath(p, app_name)
+ else:
+ # In local builds and unpacked MSIX directories, `p` is like "firefox.exe" already.
+ pp = p
+
+ if pp.startswith("distribution"):
+ # Treat any existing distribution as a distribution directory,
+ # potentially with language packs. This makes it easy to repack
+ # unpacked MSIXes.
+ distribution_dir = mozpath.join(dir_or_package, "distribution")
+ if distribution_dir not in distribution_dirs:
+ distribution_dirs.append(distribution_dir)
+
+ continue
+
+ copier.add(mozpath.normsep(mozpath.join("VFS", "ProgramFiles", instdir, pp)), f)
+
+ # Locales to declare as supported in `AppxManifest.xml`.
+ locales = set(["en-US"])
+
+ for distribution_dir in [
+ mozpath.join(template, "distribution")
+ ] + distribution_dirs:
+ log(
+ logging.INFO,
+ "msix",
+ {"dir": distribution_dir},
+ "Adding distribution files from {dir}",
+ )
+
+ # In automation, we have no easy way to remap the names of artifacts fetched from dependent
+ # tasks. In particular, langpacks will be named like `target.langpack.xpi`. The fetch
+ # tasks do allow us to put them in a per-locale directory, so that the entire set can be
+ # fetched. Here we remap the names.
+ finder = FileFinder(distribution_dir)
+
+ for p, f in finder:
+ locale = None
+ if os.path.basename(p) == "target.langpack.xpi":
+ # Turn "/path/to/LOCALE/target.langpack.xpi" into "LOCALE". This is how langpacks
+ # are presented in CI.
+ base, locale = os.path.split(os.path.dirname(p))
+
+ # Like "locale-LOCALE/langpack-LOCALE@firefox.mozilla.org.xpi". This is what AMO
+ # serves and how flatpak builds name langpacks, but not how snap builds name
+ # langpacks. I can't explain the discrepancy.
+ dest = mozpath.normsep(
+ mozpath.join(
+ base,
+ f"locale-{locale}",
+ f"langpack-{locale}@{app_name}.mozilla.org.xpi",
+ )
+ )
+
+ log(
+ logging.DEBUG,
+ "msix",
+ {"path": p, "dest": dest},
+ "Renaming langpack {path} to {dest}",
+ )
+
+ elif os.path.basename(p).startswith("langpack-"):
+ # Turn "/path/to/langpack-LOCALE@firefox.mozilla.org.xpi" into "LOCALE". This is
+ # how langpacks are presented from an unpacked MSIX.
+ _, _, locale = os.path.basename(p).partition("langpack-")
+ locale, _, _ = locale.partition("@")
+ dest = p
+
+ else:
+ dest = p
+
+ if locale:
+ locale = locale.strip().lower()
+ locales.add(locale)
+ log(
+ logging.DEBUG,
+ "msix",
+ {"locale": locale, "dest": dest},
+ "Distributing locale '{locale}' from {dest}",
+ )
+
+ dest = mozpath.normsep(
+ mozpath.join("VFS", "ProgramFiles", instdir, "distribution", dest)
+ )
+ if copier.contains(dest):
+ log(
+ logging.INFO,
+ "msix",
+ {"dest": dest, "path": mozpath.join(finder.base, p)},
+ "Skipping duplicate: {dest} from {path}",
+ )
+ continue
+
+ log(
+ logging.DEBUG,
+ "msix",
+ {"dest": dest, "path": mozpath.join(finder.base, p)},
+ "Adding distribution path: {dest} from {path}",
+ )
+
+ copier.add(
+ dest,
+ f,
+ )
+
+ locales.remove("en-US")
+
+ # Windows MSIX packages support a finite set of locales: see
+ # https://docs.microsoft.com/en-us/windows/uwp/publish/supported-languages, which is encoded in
+ # https://searchfox.org/mozilla-central/source/browser/installer/windows/msix/msix-all-locales.
+ # We distribute all of the langpacks supported by the release channel in our MSIX, which is
+ # encoded in https://searchfox.org/mozilla-central/source/browser/locales/all-locales. But we
+ # only advertise support in the App manifest for the intersection of that set and the set of
+ # supported locales.
+ #
+ # We distribute all langpacks to avoid the following issue. Suppose a user manually installs a
+ # langpack that is not supported by Windows, and then updates the installed MSIX package. MSIX
+ # package upgrades are essentially paveover installs, so there is no opportunity for Firefox to
+ # update the langpack before the update. But, since all langpacks are bundled with the MSIX,
+ # that langpack will be up-to-date, preventing one class of YSOD.
+ unadvertised = set()
+ if locale_allowlist:
+ unadvertised = locales - locale_allowlist
+ locales = locales & locale_allowlist
+ for locale in sorted(unadvertised):
+ log(
+ logging.INFO,
+ "msix",
+ {"locale": locale},
+ "Not advertising distributed locale '{locale}' that is not recognized by Windows",
+ )
+
+ locales = ["en-US"] + list(sorted(locales))
+ resource_language_list = "\n".join(
+ f' <Resource Language="{locale}" />' for locale in locales
+ )
+
+ defines = {
+ "APPX_ARCH": _MSIX_ARCH[arch],
+ "APPX_DISPLAYNAME": brandFullName,
+ "APPX_DESCRIPTION": brandFullName,
+ # Like 'Mozilla.MozillaFirefox', 'Mozilla.MozillaFirefoxBeta', or
+ # 'Mozilla.MozillaFirefoxNightly'.
+ "APPX_IDENTITY": identity,
+ # Like 'Firefox Package Root', 'Firefox Nightly Package Root', 'Firefox
+ # Beta Package Root'. See above.
+ "APPX_INSTDIR": instdir,
+ # Like 'Firefox%20Package%20Root'.
+ "APPX_INSTDIR_QUOTED": urllib.parse.quote(instdir),
+ "APPX_PUBLISHER": publisher,
+ "APPX_PUBLISHER_DISPLAY_NAME": publisher_display_name,
+ "APPX_RESOURCE_LANGUAGE_LIST": resource_language_list,
+ "APPX_VERSION": version,
+ "MOZ_APP_DISPLAYNAME": displayname,
+ "MOZ_APP_NAME": app_name,
+ # Keep synchronized with `toolkit\mozapps\notificationserver\NotificationComServer.cpp`.
+ "MOZ_INOTIFICATIONACTIVATION_CLSID": "916f9b5d-b5b2-4d36-b047-03c7a52f81c8",
+ }
+
+ m.add_preprocess(
+ mozpath.join(template, "AppxManifest.xml.in"),
+ "AppxManifest.xml",
+ [],
+ defines=defines,
+ marker="<!-- #", # So that we can have well-formed XML.
+ )
+ m.populate_registry(copier)
+
+ output_dir = mozpath.abspath(output_dir)
+ ensureParentDir(output_dir)
+
+ start = time.monotonic()
+ result = copier.copy(
+ output_dir, remove_empty_directories=True, skip_if_older=not force
+ )
+ if log:
+ log_copy_result(log, time.monotonic() - start, output_dir, result)
+
+ if verbose:
+ # Dump AppxManifest.xml contents for ease of debugging.
+ log(logging.DEBUG, "msix", {}, "AppxManifest.xml")
+ log(logging.DEBUG, "msix", {}, ">>>")
+ for line in open(mozpath.join(output_dir, "AppxManifest.xml")).readlines():
+ log(logging.DEBUG, "msix", {}, line[:-1]) # Drop trailing line terminator.
+ log(logging.DEBUG, "msix", {}, "<<<")
+
+ if not makeappx:
+ makeappx = find_sdk_tool("makeappx.exe", log=log)
+ if not makeappx:
+ raise ValueError(
+ "makeappx is required; " "set MAKEAPPX or WINDOWSSDKDIR or PATH"
+ )
+
+ # `makeappx.exe` supports both slash and hyphen style arguments; `makemsix`
+ # supports only hyphen style. `makeappx.exe` allows to overwrite and to
+ # provide more feedback, so we prefer invoking with these flags. This will
+ # also accommodate `wine makeappx.exe`.
+ stdout = subprocess.run(
+ [makeappx], check=False, capture_output=True, universal_newlines=True
+ ).stdout
+ is_makeappx = "MakeAppx Tool" in stdout
+
+ if is_makeappx:
+ args = [makeappx, "pack", "/d", output_dir, "/p", output, "/overwrite"]
+ else:
+ args = [makeappx, "pack", "-d", output_dir, "-p", output]
+ if verbose and is_makeappx:
+ args.append("/verbose")
+ joined = " ".join(shlex_quote(arg) for arg in args)
+ log(logging.INFO, "msix", {"args": args, "joined": joined}, "Invoking: {joined}")
+
+ sys.stdout.flush() # Otherwise the subprocess output can be interleaved.
+ if verbose:
+ subprocess.check_call(args, universal_newlines=True)
+ else:
+ # Suppress output unless we fail.
+ try:
+ subprocess.check_output(args, universal_newlines=True)
+ except subprocess.CalledProcessError as e:
+ sys.stderr.write(e.output)
+ raise
+
+ return output
+
+
+def _sign_msix_win(output, force, log, verbose):
+ powershell_exe = find_sdk_tool("powershell.exe", log=log)
+ if not powershell_exe:
+ raise ValueError("powershell is required; " "set POWERSHELL or PATH")
+
+ def powershell(argstring, check=True):
+ "Invoke `powershell.exe`. Arguments are given as a string to allow consumer to quote."
+ args = [powershell_exe, "-c", argstring]
+ joined = " ".join(shlex_quote(arg) for arg in args)
+ log(
+ logging.INFO, "msix", {"args": args, "joined": joined}, "Invoking: {joined}"
+ )
+ return subprocess.run(
+ args, check=check, universal_newlines=True, capture_output=True
+ ).stdout
+
+ signtool = find_sdk_tool("signtool.exe", log=log)
+ if not signtool:
+ raise ValueError(
+ "signtool is required; " "set SIGNTOOL or WINDOWSSDKDIR or PATH"
+ )
+
+ # Our first order of business is to find, or generate, a (self-signed)
+ # certificate.
+
+ # These are baked into enough places under `browser/` that we need not
+ # extract constants.
+ vendor = "Mozilla"
+ publisher = "CN=Mozilla Corporation, OU=MSIX Packaging"
+ friendly_name = "Mozilla Corporation MSIX Packaging Test Certificate"
+
+ # The convention is $MOZBUILD_STATE_PATH/cache/$FEATURE.
+ crt_path = mozpath.join(
+ get_state_dir(),
+ "cache",
+ "mach-msix",
+ "{}.crt".format(friendly_name).replace(" ", "_").lower(),
+ )
+ crt_path = mozpath.abspath(crt_path)
+ ensureParentDir(crt_path)
+
+ pfx_path = crt_path.replace(".crt", ".pfx")
+
+ # TODO: maybe use an actual password. For now, just something that won't be
+ # brute-forced.
+ password = "193dbfc6-8ff7-4a95-8f32-6b4468626bd0"
+
+ if force or not os.path.isfile(crt_path):
+ log(
+ logging.INFO,
+ "msix",
+ {"crt_path": crt_path},
+ "Creating new self signed certificate at: {}".format(crt_path),
+ )
+
+ thumbprints = [
+ thumbprint.strip()
+ for thumbprint in powershell(
+ (
+ "Get-ChildItem -Path Cert:\CurrentUser\My"
+ '| Where-Object {{$_.Subject -Match "{}"}}'
+ '| Where-Object {{$_.FriendlyName -Match "{}"}}'
+ "| Select-Object -ExpandProperty Thumbprint"
+ ).format(vendor, friendly_name)
+ ).splitlines()
+ ]
+ if len(thumbprints) > 1:
+ raise Exception(
+ "Multiple certificates with friendly name found: {}".format(
+ friendly_name
+ )
+ )
+
+ if len(thumbprints) == 1:
+ thumbprint = thumbprints[0]
+ else:
+ thumbprint = None
+
+ if not thumbprint:
+ thumbprint = (
+ powershell(
+ (
+ 'New-SelfSignedCertificate -Type Custom -Subject "{}" '
+ '-KeyUsage DigitalSignature -FriendlyName "{}"'
+ " -CertStoreLocation Cert:\CurrentUser\My"
+ ' -TextExtension @("2.5.29.37={{text}}1.3.6.1.5.5.7.3.3", '
+ '"2.5.29.19={{text}}")'
+ "| Select-Object -ExpandProperty Thumbprint"
+ ).format(publisher, friendly_name)
+ )
+ .strip()
+ .upper()
+ )
+
+ if not thumbprint:
+ raise Exception(
+ "Failed to find or create certificate with friendly name: {}".format(
+ friendly_name
+ )
+ )
+
+ powershell(
+ 'Export-Certificate -Cert Cert:\CurrentUser\My\{} -FilePath "{}"'.format(
+ thumbprint, crt_path
+ )
+ )
+ log(
+ logging.INFO,
+ "msix",
+ {"crt_path": crt_path},
+ "Exported public certificate: {crt_path}",
+ )
+
+ powershell(
+ (
+ 'Export-PfxCertificate -Cert Cert:\CurrentUser\My\{} -FilePath "{}"'
+ ' -Password (ConvertTo-SecureString -String "{}" -Force -AsPlainText)'
+ ).format(thumbprint, pfx_path, password)
+ )
+ log(
+ logging.INFO,
+ "msix",
+ {"pfx_path": pfx_path},
+ "Exported private certificate: {pfx_path}",
+ )
+
+ # Second, to find the right thumbprint to use. We do this here in case
+ # we're coming back to an existing certificate.
+
+ log(
+ logging.INFO,
+ "msix",
+ {"crt_path": crt_path},
+ "Signing with existing self signed certificate: {crt_path}",
+ )
+
+ thumbprints = [
+ thumbprint.strip()
+ for thumbprint in powershell(
+ 'Get-PfxCertificate -FilePath "{}" | Select-Object -ExpandProperty Thumbprint'.format(
+ crt_path
+ )
+ ).splitlines()
+ ]
+ if len(thumbprints) > 1:
+ raise Exception("Multiple thumbprints found for PFX: {}".format(pfx_path))
+ if len(thumbprints) == 0:
+ raise Exception("No thumbprints found for PFX: {}".format(pfx_path))
+ thumbprint = thumbprints[0]
+ log(
+ logging.INFO,
+ "msix",
+ {"thumbprint": thumbprint},
+ "Signing with certificate with thumbprint: {thumbprint}",
+ )
+
+ # Third, do the actual signing.
+
+ args = [
+ signtool,
+ "sign",
+ "/a",
+ "/fd",
+ "SHA256",
+ "/f",
+ pfx_path,
+ "/p",
+ password,
+ output,
+ ]
+ if not verbose:
+ subprocess.check_call(args, universal_newlines=True)
+ else:
+ # Suppress output unless we fail.
+ try:
+ subprocess.check_output(args, universal_newlines=True)
+ except subprocess.CalledProcessError as e:
+ sys.stderr.write(e.output)
+ raise
+
+ # As a convenience to the user, tell how to use this certificate if it's not
+ # already trusted, and how to work with MSIX files more generally.
+ if verbose:
+ root_thumbprints = [
+ root_thumbprint.strip()
+ for root_thumbprint in powershell(
+ "Get-ChildItem -Path Cert:\LocalMachine\Root\{} "
+ "| Select-Object -ExpandProperty Thumbprint".format(thumbprint),
+ check=False,
+ ).splitlines()
+ ]
+ if thumbprint not in root_thumbprints:
+ log(
+ logging.INFO,
+ "msix",
+ {"thumbprint": thumbprint},
+ "Certificate with thumbprint not found in trusted roots: {thumbprint}",
+ )
+ log(
+ logging.INFO,
+ "msix",
+ {"crt_path": crt_path, "output": output},
+ r"""\
+# Usage
+To trust this certificate (requires an elevated shell):
+powershell -c 'Import-Certificate -FilePath "{crt_path}" -Cert Cert:\LocalMachine\Root\'
+To verify this MSIX signature exists and is trusted:
+powershell -c 'Get-AuthenticodeSignature -FilePath "{output}" | Format-List *'
+To install this MSIX:
+powershell -c 'Add-AppPackage -path "{output}"'
+To see details after installing:
+powershell -c 'Get-AppPackage -name Mozilla.MozillaFirefox(Beta,...)'
+ """.strip(),
+ )
+
+ return 0
+
+
+def _sign_msix_posix(output, force, log, verbose):
+ makeappx = find_sdk_tool("makeappx", log=log)
+
+ if not makeappx:
+ raise ValueError("makeappx is required; " "set MAKEAPPX or PATH")
+
+ openssl = find_sdk_tool("openssl", log=log)
+
+ if not openssl:
+ raise ValueError("openssl is required; " "set OPENSSL or PATH")
+
+ if "sign" not in subprocess.run(makeappx, capture_output=True).stdout.decode(
+ "utf-8"
+ ):
+ raise ValueError(
+ "makeappx must support 'sign' operation. ",
+ "You probably need to build Mozilla's version of it: ",
+ "https://github.com/mozilla/msix-packaging/tree/johnmcpms/signing",
+ )
+
+ def run_openssl(args, check=True, capture_output=True):
+ full_args = [openssl, *args]
+ joined = " ".join(shlex_quote(arg) for arg in full_args)
+ log(
+ logging.INFO,
+ "msix",
+ {"args": args},
+ f"Invoking: {joined}",
+ )
+ return subprocess.run(
+ full_args,
+ check=check,
+ capture_output=capture_output,
+ universal_newlines=True,
+ )
+
+ # These are baked into enough places under `browser/` that we need not
+ # extract constants.
+ cn = "Mozilla Corporation"
+ ou = "MSIX Packaging"
+ friendly_name = "Mozilla Corporation MSIX Packaging Test Certificate"
+ # Password is needed when generating the cert, but
+ # "makeappx" explicitly does _not_ support passing it
+ # so it ends up getting removed when we create the pfx
+ password = "temp"
+
+ cache_dir = mozpath.join(get_state_dir(), "cache", "mach-msix")
+ ca_crt_path = mozpath.join(cache_dir, "MozillaMSIXCA.cer")
+ ca_key_path = mozpath.join(cache_dir, "MozillaMSIXCA.key")
+ csr_path = mozpath.join(cache_dir, "MozillaMSIX.csr")
+ crt_path = mozpath.join(cache_dir, "MozillaMSIX.cer")
+ key_path = mozpath.join(cache_dir, "MozillaMSIX.key")
+ pfx_path = mozpath.join(
+ cache_dir,
+ "{}.pfx".format(friendly_name).replace(" ", "_").lower(),
+ )
+ pfx_path = mozpath.abspath(pfx_path)
+ ensureParentDir(pfx_path)
+
+ if force or not os.path.isfile(pfx_path):
+ log(
+ logging.INFO,
+ "msix",
+ {"pfx_path": pfx_path},
+ "Creating new self signed certificate at: {}".format(pfx_path),
+ )
+
+ # Ultimately, we only end up using the CA certificate
+ # and the pfx (aka pkcs12) bundle containing the signing key
+ # and certificate. The other things we create along the way
+ # are not used for subsequent signing for testing.
+ # To get those, we have to do a few things:
+ # 1) Create a new CA key and certificate
+ # 2) Create a new signing key
+ # 3) Create a CSR with that signing key
+ # 4) Create the certificate with the CA key+cert from the CSR
+ # 5) Convert the signing key and certificate to a pfx bundle
+ args = [
+ "req",
+ "-x509",
+ "-days",
+ "7200",
+ "-sha256",
+ "-newkey",
+ "rsa:4096",
+ "-keyout",
+ ca_key_path,
+ "-out",
+ ca_crt_path,
+ "-outform",
+ "PEM",
+ "-subj",
+ f"/OU={ou} CA/CN={cn} CA",
+ "-passout",
+ f"pass:{password}",
+ ]
+ run_openssl(args)
+ args = [
+ "genrsa",
+ "-des3",
+ "-out",
+ key_path,
+ "-passout",
+ f"pass:{password}",
+ ]
+ run_openssl(args)
+ args = [
+ "req",
+ "-new",
+ "-key",
+ key_path,
+ "-out",
+ csr_path,
+ "-subj",
+ # We actually want these in the opposite order, to match what's
+ # included in the AppxManifest. Openssl ends up reversing these
+ # for some reason, so we put them in backwards here.
+ f"/OU={ou}/CN={cn}",
+ "-passin",
+ f"pass:{password}",
+ ]
+ run_openssl(args)
+ args = [
+ "x509",
+ "-req",
+ "-sha256",
+ "-days",
+ "7200",
+ "-in",
+ csr_path,
+ "-CA",
+ ca_crt_path,
+ "-CAcreateserial",
+ "-CAkey",
+ ca_key_path,
+ "-out",
+ crt_path,
+ "-outform",
+ "PEM",
+ "-passin",
+ f"pass:{password}",
+ ]
+ run_openssl(args)
+ args = [
+ "pkcs12",
+ "-export",
+ "-inkey",
+ key_path,
+ "-in",
+ crt_path,
+ "-name",
+ friendly_name,
+ "-passin",
+ f"pass:{password}",
+ # All three of these options (-keypbe, -certpbe, and -passout)
+ # are necessary to create a pfx bundle that won't even prompt
+ # for a password. If we miss one, we will still get a password
+ # prompt for the blank password.
+ "-keypbe",
+ "NONE",
+ "-certpbe",
+ "NONE",
+ "-passout",
+ "pass:",
+ "-out",
+ pfx_path,
+ ]
+ run_openssl(args)
+
+ args = [makeappx, "sign", "-p", output, "-c", pfx_path]
+ if not verbose:
+ subprocess.check_call(
+ args,
+ universal_newlines=True,
+ stdout=subprocess.DEVNULL,
+ stderr=subprocess.DEVNULL,
+ )
+ else:
+ # Suppress output unless we fail.
+ try:
+ subprocess.check_output(args, universal_newlines=True)
+ except subprocess.CalledProcessError as e:
+ sys.stderr.write(e.output)
+ raise
+
+ if verbose:
+ log(
+ logging.INFO,
+ "msix",
+ {
+ "ca_crt_path": ca_crt_path,
+ "ca_crt": mozpath.basename(ca_crt_path),
+ "output_path": output,
+ "output": mozpath.basename(output),
+ },
+ r"""\
+# Usage
+First, transfer the root certificate ({ca_crt_path}) and signed MSIX
+({output_path}) to a Windows machine.
+To trust this certificate ({ca_crt_path}), run the following in an elevated shell:
+powershell -c 'Import-Certificate -FilePath "{ca_crt}" -Cert Cert:\LocalMachine\Root\'
+To verify this MSIX signature exists and is trusted:
+powershell -c 'Get-AuthenticodeSignature -FilePath "{output}" | Format-List *'
+To install this MSIX:
+powershell -c 'Add-AppPackage -path "{output}"'
+To see details after installing:
+powershell -c 'Get-AppPackage -name Mozilla.MozillaFirefox(Beta,...)'
+ """.strip(),
+ )
+
+
+def sign_msix(output, force=False, log=None, verbose=False):
+ """Sign an MSIX with a locally generated self-signed certificate."""
+
+ if sys.platform.startswith("win"):
+ return _sign_msix_win(output, force, log, verbose)
+ else:
+ return _sign_msix_posix(output, force, log, verbose)
diff --git a/python/mozbuild/mozbuild/repackaging/pkg.py b/python/mozbuild/mozbuild/repackaging/pkg.py
new file mode 100644
index 0000000000..e7699ce5c4
--- /dev/null
+++ b/python/mozbuild/mozbuild/repackaging/pkg.py
@@ -0,0 +1,46 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import shutil
+import tarfile
+from pathlib import Path
+
+import mozfile
+from mozpack.pkg import create_pkg
+
+from mozbuild.bootstrap import bootstrap_toolchain
+
+
+def repackage_pkg(infile, output):
+
+ if not tarfile.is_tarfile(infile):
+ raise Exception("Input file %s is not a valid tarfile." % infile)
+
+ xar_tool = bootstrap_toolchain("xar/xar")
+ if not xar_tool:
+ raise Exception("Could not find xar tool.")
+ mkbom_tool = bootstrap_toolchain("mkbom/mkbom")
+ if not mkbom_tool:
+ raise Exception("Could not find mkbom tool.")
+ # Note: CPIO isn't standard on all OS's
+ cpio_tool = shutil.which("cpio")
+ if not cpio_tool:
+ raise Exception("Could not find cpio.")
+
+ with mozfile.TemporaryDirectory() as tmpdir:
+ mozfile.extract_tarball(infile, tmpdir)
+
+ app_list = list(Path(tmpdir).glob("*.app"))
+ if len(app_list) != 1:
+ raise Exception(
+ "Input file should contain a single .app file. %s found."
+ % len(app_list)
+ )
+ create_pkg(
+ source_app=Path(app_list[0]),
+ output_pkg=Path(output),
+ mkbom_tool=Path(mkbom_tool),
+ xar_tool=Path(xar_tool),
+ cpio_tool=Path(cpio_tool),
+ )
diff --git a/python/mozbuild/mozbuild/repackaging/test/python.ini b/python/mozbuild/mozbuild/repackaging/test/python.ini
new file mode 100644
index 0000000000..f51fad30a3
--- /dev/null
+++ b/python/mozbuild/mozbuild/repackaging/test/python.ini
@@ -0,0 +1,4 @@
+[DEFAULT]
+subsuite = mozbuild
+
+[test_msix.py]
diff --git a/python/mozbuild/mozbuild/repackaging/test/test_msix.py b/python/mozbuild/mozbuild/repackaging/test/test_msix.py
new file mode 100644
index 0000000000..f6735dcc75
--- /dev/null
+++ b/python/mozbuild/mozbuild/repackaging/test/test_msix.py
@@ -0,0 +1,53 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+
+from mozunit import main
+
+from mozbuild.repackaging.msix import get_embedded_version
+
+
+class TestMSIX(unittest.TestCase):
+ def test_embedded_version(self):
+ """Test embedded version extraction."""
+
+ buildid = "YYYY0M0D0HMmSs"
+ for input, output in [
+ ("X.0a1", "X.YY0M.D0H.0"),
+ ("X.YbZ", "X.Y.Z.0"),
+ ("X.Yesr", "X.Y.0.0"),
+ ("X.Y.Zesr", "X.Y.Z.0"),
+ ("X.YrcZ", "X.Y.Z.0"),
+ ("X.Y", "X.Y.0.0"),
+ ("X.Y.Z", "X.Y.Z.0"),
+ ]:
+ version = get_embedded_version(input, buildid)
+ self.assertEqual(version, output)
+ # Some parts of the MSIX packaging ecosystem require the final digit
+ # in the dotted quad to be 0.
+ self.assertTrue(version.endswith(".0"))
+
+ buildid = "YYYYMmDdHhMmSs"
+ for input, output in [
+ ("X.0a1", "X.YYMm.DdHh.0"),
+ ]:
+ version = get_embedded_version(input, buildid)
+ self.assertEqual(version, output)
+ # Some parts of the MSIX packaging ecosystem require the final digit
+ # in the dotted quad to be 0.
+ self.assertTrue(version.endswith(".0"))
+
+ for input in [
+ "X.Ya1",
+ "X.0a2",
+ "X.Y.ZbW",
+ "X.Y.ZrcW",
+ ]:
+ with self.assertRaises(ValueError):
+ get_embedded_version(input, buildid)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/resources/html-build-viewer/build_resources.html b/python/mozbuild/mozbuild/resources/html-build-viewer/build_resources.html
new file mode 100644
index 0000000000..9daf30178b
--- /dev/null
+++ b/python/mozbuild/mozbuild/resources/html-build-viewer/build_resources.html
@@ -0,0 +1,694 @@
+<!-- This Source Code Form is subject to the terms of the Mozilla Public
+ - License, v. 2.0. If a copy of the MPL was not distributed with this
+ - file, You can obtain one at http://mozilla.org/MPL/2.0/. -->
+<!DOCTYPE html>
+<html lang="en">
+ <head>
+ <title>Build System Resource Usage</title>
+
+ <meta charset='utf-8'>
+ <script src="https://d3js.org/d3.v3.min.js" charset="utf-8"></script>
+ <link rel="stylesheet" href="https://firefoxux.github.io/design-tokens/photon-colors/photon-colors.css" charset="utf-8">
+ <style>
+
+svg {
+ overflow: visible;
+}
+body {
+ background-color: var(--grey-20);
+ font-family: sans-serif;
+ padding: 30px 80px;
+ display: flex;
+ flex-direction: column;
+}
+
+h1 {
+ font-size: 2.3rem;
+ margin: 40px 0 20px;
+}
+
+.dashboard-card {
+ padding: 32px 80px;
+ background-color: var(--white-100);
+ border-radius: 8px;
+ margin: 60px 0;
+}
+
+h2 {
+ color: var(--grey-80);
+ margin-bottom: 30px;
+}
+
+.chart {
+ padding-top: 20px;
+}
+
+.grid-list ul {
+ list-style: none;
+ padding: 0;
+ margin: 0;
+ display: grid;
+ grid-template-columns: max-content max-content max-content;
+ gap: 15px 4px;
+}
+
+.grid-list ul li {
+ grid-column: 1 / -1;
+ display: grid;
+ grid-template-columns: subgrid;
+ place-items: baseline;
+}
+
+.grid-list ul li .label {
+ color: var(--grey-50);
+ padding-right: 40px;
+}
+
+.grid-list ul li .value {
+ justify-self: end;
+ font-size: 1.3em;
+}
+
+.axis path,
+.axis line {
+ fill: none;
+ stroke: #000;
+ shape-rendering: crispEdges;
+}
+
+.grid {
+ stroke: gray;
+ stroke-dasharray: 3, 2;
+ opacity: 0.4;
+}
+
+.area {
+ fill: var(--blue-50);
+}
+
+.graphs {
+ text-anchor: end;
+}
+
+.timeline {
+ fill: var(--blue-40);
+ stroke: var(--blue-70);
+ stroke-width: 1;
+}
+
+.short {
+ fill: var(--grey-50);
+ stroke: var(--blue-70);
+ stroke-width: 1;
+}
+
+#tooltip {
+ z-index: 10;
+ position: fixed;
+ background: var(--white-100);
+ padding: 20px;
+ border-radius: 5px;
+ border: 1px solid var(--grey-20);
+}
+
+.align-self-start {
+ align-self: start;
+}
+
+/* utility-classes from Firefox Photon style guide "https://design.firefox.com/photon/"" */
+
+.shadow-10 {
+ box-shadow: 0 1px 4px var(--grey-90-a10);
+}
+
+.shadow-20 {
+ box-shadow: 0 2px 8px var(--grey-90-a10);
+}
+
+.shadow-30 {
+ box-shadow: 0 4px 16px var(--grey-90-a10);
+}
+
+.shadow-custom {
+ box-shadow: 0 8px 12px 1px rgba(29,17,51,.04),0 3px 16px 2px rgba(9,32,77,.12),0 5px 10px -3px rgba(29,17,51,.12);
+}
+
+
+ </style>
+ </head>
+ <body>
+ <script>
+var currentResources;
+
+/**
+ * Interface for a build resources JSON file.
+ */
+function BuildResources(data) {
+ if (data.version < 1 || data.version > 3) {
+ throw new Error("Unsupported version of the JSON format: " + data.version);
+ }
+
+ this.resources = [];
+
+ var cpu_fields = data.cpu_times_fields;
+ var io_fields = data.io_fields;
+ var virt_fields = data.virt_fields;
+ var swap_fields = data.swap_fields;
+
+ function convert(dest, source, sourceKey, destKey, fields) {
+ var i = 0;
+ fields.forEach(function (field) {
+ dest[destKey][field] = source[sourceKey][i];
+ i++;
+ });
+ }
+
+ var offset = data.start;
+ var cpu_times_totals = {};
+
+ cpu_fields.forEach(function (field) {
+ cpu_times_totals[field] = 0;
+ });
+
+ this.ioTotal = {};
+ var i = 0;
+ io_fields.forEach(function (field) {
+ this.ioTotal[field] = data.overall.io[i];
+ i++;
+ }.bind(this));
+
+ data.samples.forEach(function (sample) {
+ var entry = {
+ start: sample.start - offset,
+ end: sample.end - offset,
+ duration: sample.duration,
+ cpu_percent: sample.cpu_percent_mean,
+ cpu_times: {},
+ cpu_times_percents: {},
+ io: {},
+ virt: {},
+ swap: {},
+ };
+
+ convert(entry, sample, "cpu_times_sum", "cpu_times", cpu_fields);
+ convert(entry, sample, "io", "io", io_fields);
+ convert(entry, sample, "virt", "virt", virt_fields);
+ convert(entry, sample, "swap", "swap", swap_fields);
+
+ var total = 0;
+ for (var k in entry.cpu_times) {
+ cpu_times_totals[k] += entry.cpu_times[k];
+ total += entry.cpu_times[k];
+ }
+
+ for (var k in entry.cpu_times) {
+ if (total == 0) {
+ if (k == "idle") {
+ entry.cpu_times_percents[k] = 100;
+ } else {
+ entry.cpu_times_percents[k] = 0;
+ }
+ } else {
+ entry.cpu_times_percents[k] = entry.cpu_times[k] / total * 100;
+ }
+ }
+
+ this.resources.push(entry);
+ }.bind(this));
+
+ this.virt_fields = virt_fields;
+ this.cpu_times_fields = [];
+
+ // Filter out CPU fields that have no values.
+ for (var k in cpu_times_totals) {
+ var v = cpu_times_totals[k];
+ if (v) {
+ this.cpu_times_fields.push(k);
+ continue;
+ }
+
+ this.resources.forEach(function (entry) {
+ delete entry.cpu_times[k];
+ delete entry.cpu_times_percents[k];
+ });
+ }
+
+ this.offset = offset;
+ this.data = data;
+}
+
+BuildResources.prototype = Object.freeze({
+ get start() {
+ return this.data.start;
+ },
+
+ get startDate() {
+ return new Date(this.start * 1000);
+ },
+
+ get end() {
+ return this.data.end;
+ },
+
+ get endDate() {
+ return new Date(this.end * 1000);
+ },
+
+ get duration() {
+ return this.data.duration;
+ },
+
+ get sample_times() {
+ var times = [];
+ this.resources.forEach(function (sample) {
+ times.push(sample.start);
+ });
+
+ return times;
+ },
+
+ get cpuPercent() {
+ return this.data.overall.cpu_percent_mean;
+ },
+
+ get tiers() {
+ var t = [];
+
+ this.data.phases.forEach(function (e) {
+ t.push(e.name);
+ });
+
+ return t;
+ },
+
+ getTier: function (tier) {
+ for (var i = 0; i < this.data.phases.length; i++) {
+ var t = this.data.phases[i];
+
+ if (t.name == tier) {
+ return t;
+ }
+ }
+ },
+});
+
+function format_percent(d, i) {
+ return d + "%";
+}
+
+const updateChartsOnResizeWindow = () => addEventListener('resize', updateResourcesGraph);
+updateChartsOnResizeWindow();
+
+// layout spacing to set charts widths
+const marginBodyX = 8;
+const paddingBodyX = 80;
+const marginCardsX = 0;
+const paddingCardsX = 80;
+
+function updateResourcesGraph() {
+ renderResources("cpu_graph", currentResources, 400, "cpu_times_fields", "cpu_times_percents", 100, format_percent, [
+ ["nice", "#0d9fff"],
+ ["irq", "#ff0d9f"],
+ ["softirq", "#ff0d9f"],
+ ["steal", "#000000"],
+ ["guest", "#000000"],
+ ["guest_nice", "#000000"],
+ ["system", "var(--purple-80)"],
+ ["iowait", "#ff0d25"],
+ ["user", "var(--magenta-50)"],
+ ["idle", "var(--grey-20)"],
+ ]);
+ // On macos, there doesn't seem to be a combination of values that sums up to
+ // the total, so just use the percentage. Only macos has a "wired" value.
+ if ('wired' in currentResources.resources[0].virt) {
+ renderResources("mem_graph", currentResources, 200, "virt_fields", "virt", 100, format_percent, [
+ ["percent", "var(--blue-50"],
+ ]);
+ } else {
+ renderResources("mem_graph", currentResources, 200, "virt_fields", "virt", currentResources.resources[0].virt['total'], d3.format("s"), [
+ ["used", "var(--blue-50"],
+ ["buffers", "#f65c5c"],
+ ["cached", "var(--orange-50)"],
+ ["free", "var(--grey-20)"],
+ ]);
+ }
+ renderTimeline("tiers", currentResources);
+ document.getElementById("wall_time").textContent = Math.round(currentResources.duration * 100) / 100;
+ document.getElementById("start_date").textContent = currentResources.startDate.toLocaleString();
+ document.getElementById("end_date").textContent = currentResources.endDate.toLocaleString();
+ document.getElementById("cpu_percent").textContent = Math.round(currentResources.cpuPercent * 100) / 100;
+ document.getElementById("write_bytes").textContent = currentResources.ioTotal["write_bytes"];
+ document.getElementById("read_bytes").textContent = currentResources.ioTotal["read_bytes"];
+ document.getElementById("write_time").textContent = currentResources.ioTotal["write_time"];
+ document.getElementById("read_time").textContent = currentResources.ioTotal["read_time"];
+}
+
+function renderKey(key) {
+ d3.json(key, function onResource(error, response) {
+ if (error) {
+ alert("Data not available. Is the server still running?");
+ return;
+ }
+
+ currentResources = new BuildResources(response);
+ updateResourcesGraph();
+ });
+}
+
+function renderResources(id, resources, height, fields_attr, data_attr, max_value, tick_format, layers) {
+ document.getElementById(id).innerHTML = "";
+
+ const margin = {top: 20, right: 20, bottom: 20, left: 50};
+ const width = window.innerWidth - 2 * (marginBodyX + paddingBodyX + marginCardsX + paddingCardsX) - margin.left;
+ var heightChart = height - margin.top - margin.bottom;
+
+ var x = d3.scale.linear()
+ .range([0, width])
+ .domain(d3.extent(resources.resources, function (d) { return d.start; }))
+ ;
+ var y = d3.scale.linear()
+ .range([heightChart, 0])
+ .domain([0, max_value])
+ ;
+
+ var xAxis = d3.svg.axis()
+ .scale(x)
+ .orient("bottom")
+ ;
+ var yAxis = d3.svg.axis()
+ .scale(y)
+ .orient("left")
+ .tickFormat(tick_format)
+ ;
+
+ var area = d3.svg.area()
+ .x(function (d) { return x(d.start); })
+ .y0(function(d) { return y(d.y0); })
+ .y1(function(d) { return y(d.y0 + d.y); })
+ ;
+
+ var stack = d3.layout.stack()
+ .values(function (d) { return d.values; })
+ ;
+
+ // Manually control the layer order because we want it consistent and want
+ // to inject some sanity.
+ var layers = layers.filter(function (l) {
+ return resources[fields_attr].indexOf(l[0]) != -1;
+ });
+
+ // Draw a legend.
+ var legend = d3.select("#" + id)
+ .append("svg")
+ .attr("width", width + margin.left + margin.right)
+ .attr("height", 15)
+ .append("g")
+ .attr("class", "legend")
+ ;
+
+ legend.selectAll("g")
+ .data(layers)
+ .enter()
+ .append("g")
+ .each(function (d, i) {
+ var g = d3.select(this);
+ g.append("rect")
+ .attr("x", i * 100 + 20)
+ .attr("y", 0)
+ .attr("width", 10)
+ .attr("height", 10)
+ .style("fill", d[1])
+ ;
+ g.append("text")
+ .attr("x", i * 100 + 40)
+ .attr("y", 10)
+ .attr("height", 10)
+ .attr("width", 70)
+ .text(d[0])
+ ;
+ })
+ ;
+
+ var svg = d3.select("#" + id).append("svg")
+ .attr("width", width)
+ .attr("height", heightChart + margin.top + margin.bottom)
+ .append("g")
+ .attr("transform", "translate(" + margin.left + "," + margin.top + ")")
+ ;
+
+ var data = stack(layers.map(function (layer) {
+ return {
+ name: layer[0],
+ color: layer[1],
+ values: resources.resources.map(function (d) {
+ return {
+ start: d.start,
+ y: d[data_attr][layer[0]],
+ };
+ }),
+ };
+ }));
+
+ var graphs = svg.selectAll(".graphs")
+ .data(data)
+ .enter().append("g")
+ .attr("class", "graphs")
+ ;
+
+ graphs.append("path")
+ .attr("class", "area")
+ .attr("d", function (d) { return area(d.values); })
+ .style("fill", function (d) { return d.color; })
+ ;
+
+ svg.append("g")
+ .attr("class", "x axis")
+ .attr("transform", "translate(0," + heightChart + ")")
+ .call(xAxis)
+ ;
+
+ svg.append("g")
+ .attr("class", "x grid")
+ .attr("transform", "translate(0," + heightChart + ")")
+ .call(xAxis.tickSize(-heightChart, 0, 0).tickFormat(""))
+ ;
+
+ svg.append("g")
+ .attr("class", "y axis")
+ .call(yAxis)
+ ;
+
+ svg.append("g")
+ .attr("class", "y grid")
+ .call(yAxis.tickSize(-width, 0, 0).tickFormat(""))
+ ;
+}
+
+function renderTimeline(id, resources) {
+ document.getElementById(id).innerHTML = "";
+
+ var margin = {top: 20, right: 20, bottom: 20, left: 50};
+ const width = window.innerWidth - 2 * (marginBodyX + paddingBodyX + marginCardsX + paddingCardsX) - margin.left;
+
+ var x = d3.scale.linear()
+ .range([0, width])
+ .domain(d3.extent(resources.resources, function (d) { return d.start; }))
+ ;
+ // Now we render a timeline of sorts of the tiers
+ // There is a row of rectangles that visualize divisions between the
+ // different items. We use the same x scale as the resource graph so times
+ // line up properly.
+ svg = d3.select("#" + id).append("svg")
+ .attr("width", width)
+ .attr("height", 100)
+ .append("g")
+ ;
+
+ var y = d3.scale.linear().range([10, 0]).domain([0, 1]);
+
+ resources.tiers.forEach(function (t, i) {
+ var tier = resources.getTier(t);
+
+ var x_start = x(tier.start - resources.offset);
+ var x_end = x(tier.end - resources.offset);
+
+ svg.append("rect")
+ .attr("x", x_start)
+ .attr("y", 20)
+ .attr("height", 30)
+ .attr("width", x_end - x_start)
+ .attr("class", "timeline tier")
+ .attr("tier", t)
+ ;
+ });
+
+ function getEntry(element) {
+ var tier = element.getAttribute("tier");
+
+ var entry = resources.getTier(tier);
+ entry.tier = tier;
+
+ return entry;
+ }
+
+ d3.selectAll(".timeline")
+ .on("mouseenter", function () {
+ var entry = getEntry(this);
+
+ d3.select("#tt_tier").html(entry.tier);
+ d3.select("#tt_duration").html(entry.duration || "n/a");
+ d3.select("#tt_cpu_percent").html(entry.cpu_percent_mean || "n/a");
+
+ d3.select("#tooltip").style("display", "");
+ })
+ .on("mouseleave", function () {
+ var tooltip = d3.select("#tooltip");
+ tooltip.style("display", "none");
+ })
+ .on("mousemove", function () {
+ var e = d3.event;
+ x_offset = 10;
+
+ if (e.clientX > window.innerWidth / 2) {
+ x_offset = -150;
+ }
+
+ d3.select("#tooltip")
+ .style("left", (e.clientX + x_offset) + "px")
+ .style("top", (e.clientY + 10) + "px")
+ ;
+ })
+ ;
+}
+
+function initData(data) {
+ var list = d3.select("#list");
+ // Clear the list if it wasn't already empty.
+ list.selectAll("*").remove();
+ list.style("display", "none");
+
+ if (!data) {
+ return;
+ }
+ // If the data contains a list of files, use that list.
+ // Otherwise, we expect it's directly resources info data.
+ if (Object.keys(data).length == 1 && "files" in data) {
+ if (data.files.length > 1) {
+ for (file of data.files) {
+ list.append("option").attr("value", file).text(file);
+ }
+ list.style("display", "inline");
+ }
+ renderKey(data.files[0]);
+ } else {
+ currentResources = new BuildResources(data);
+ updateResourcesGraph();
+ }
+}
+
+document.addEventListener("DOMContentLoaded", function() {
+ var list = d3.select("#list");
+ list.on("change", function() {renderKey(this.value);})
+ d3.json("build_resources.json", function onList(error, response) {
+ initData(response);
+ });
+}, false);
+
+document.addEventListener("drop", function(event) {
+ event.preventDefault();
+ var uris = event.dataTransfer.getData("text/uri-list");
+ if (uris) {
+ var data = {
+ files: uris.split(/\r\n|\r|\n/).filter(uri => !uri.startsWith("#")),
+ };
+ initData(data);
+ }
+}, false);
+
+document.addEventListener("dragover", function(event) {
+ // prevent default to allow drop
+ event.preventDefault();
+}, false);
+
+ </script>
+ <h1>Build Resource Usage Report</h1>
+
+ <div id="tooltip" class="shadow-30" style="display: none;">
+ <div class="grid-list"><ul>
+ <li>
+ <div class="label">Tier</div>
+ <div class="value" id="tt_tier"></div>
+ </li>
+ <li>
+ <div class="label">Duration</div>
+ <div class="value" id="tt_duration"></div>
+ </li>
+ <li>
+ <div class="label">CPU %</div>
+ <div class="value" id="tt_cpu_percent"></div>
+ </li>
+ </ul></div>
+ </div>
+
+ <select id="list" style="display: none"></select>
+ <div class="dashboard-card shadow-10">
+ <h2>CPU</h2>
+ <div id="cpu_graph" class="chart"></div>
+ </div>
+ <div class="dashboard-card shadow-10">
+ <h2>Memory</h2>
+ <div id="mem_graph" class="chart"></div>
+ </div>
+ <div class="dashboard-card shadow-10">
+ <h2>Tiers</h2>
+ <div id="tiers"></div>
+ </div>
+ <div class="dashboard-card shadow-10 align-self-start">
+ <h2>Summary</h2>
+ <div id="summary" class="grid-list" style="padding-top: 20px">
+ <ul>
+ <li>
+ <div class="label">Wall Time (s)</div>
+ <div class="value" id="wall_time"></div>
+ <div class="unit">s</div>
+ </li>
+ <li>
+ <div class="label">Start Date</div>
+ <div class="value" id="start_date"></div>
+ <div class="unit"></div>
+ </li>
+ <li>
+ <div class="label">End Date</div>
+ <div class="value" id="end_date"></div>
+ <div class="unit"></div>
+ </li>
+ <li>
+ <div class="label">CPU %</div>
+ <div class="value" id="cpu_percent"></div>
+ <div class="unit">%</div>
+ </li>
+ <li>
+ <div class="label">Write Bytes</div>
+ <div class="value" id="write_bytes"></div>
+ <div class="unit">B</div>
+ </li>
+ <li>
+ <div class="label">Read Bytes</div>
+ <div class="value" id="read_bytes"></div>
+ <div class="unit">B</div>
+ </li>
+ <li>
+ <div class="label">Write Time</div>
+ <div class="value" id="write_time"></div>
+ <div class="unit"></div>
+ </li>
+ <li>
+ <div class="label">Read Time</div>
+ <div class="value" id="read_time"></div>
+ <div class="unit"></div>
+ </li>
+ </ul>
+ </div>
+ </div>
+ </body>
+</html>
diff --git a/python/mozbuild/mozbuild/schedules.py b/python/mozbuild/mozbuild/schedules.py
new file mode 100644
index 0000000000..5f484ed377
--- /dev/null
+++ b/python/mozbuild/mozbuild/schedules.py
@@ -0,0 +1,77 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+Constants for SCHEDULES configuration in moz.build files and for
+skip-unless-schedules optimizations in task-graph generation.
+"""
+
+# TODO: ideally these lists could be specified in moz.build itself
+
+# Inclusive components are those which are scheduled when certain files are
+# changed, but do not run by default. These are generally added to
+# `SCHEDULES.inclusive` using `+=`, but can also be used as exclusive
+# components for files which *only* affect the named component.
+INCLUSIVE_COMPONENTS = [
+ "docs",
+ "py-lint",
+ "js-lint",
+ "yaml-lint",
+ # inclusive test suites -- these *only* run when certain files have changed
+ "jittest",
+ "test-verify",
+ "test-verify-gpu",
+ "test-verify-wpt",
+ "test-coverage",
+ "test-coverage-wpt",
+ "jsreftest",
+ "android-hw-gfx",
+ "rusttests",
+]
+INCLUSIVE_COMPONENTS = sorted(INCLUSIVE_COMPONENTS)
+
+# Exclusive components are those which are scheduled by default, but for which
+# some files *only* affect that component. For example, most files affect all
+# platforms, but platform-specific files exclusively affect a single platform.
+# These components are assigned to `SCHEDULES.exclusive` with `=`. Each comment
+# denotes a new mutually exclusive set of groups that tasks can belong to.
+EXCLUSIVE_COMPONENTS = [
+ # os families
+ "android",
+ "linux",
+ "macosx",
+ "windows",
+ # broad test harness categories
+ "awsy",
+ "condprofile",
+ "cppunittest",
+ "firefox-ui",
+ "fuzztest",
+ "geckoview-junit",
+ "gtest",
+ "marionette",
+ "mochitest",
+ "raptor",
+ "reftest",
+ "talos",
+ "telemetry-tests-client",
+ "xpcshell",
+ "xpcshell-coverage",
+ "web-platform-tests",
+ # specific test suites
+ "crashtest",
+ "mochitest-a11y",
+ "mochitest-browser-a11y",
+ "mochitest-browser-media",
+ "mochitest-browser-chrome",
+ "mochitest-chrome",
+ "mochitest-plain",
+ "web-platform-tests-crashtest",
+ "web-platform-tests-print-reftest",
+ "web-platform-tests-reftest",
+ "web-platform-tests-wdspec",
+ "nss",
+]
+EXCLUSIVE_COMPONENTS = sorted(EXCLUSIVE_COMPONENTS)
+ALL_COMPONENTS = INCLUSIVE_COMPONENTS + EXCLUSIVE_COMPONENTS
diff --git a/python/mozbuild/mozbuild/settings.py b/python/mozbuild/mozbuild/settings.py
new file mode 100644
index 0000000000..e17a5cb400
--- /dev/null
+++ b/python/mozbuild/mozbuild/settings.py
@@ -0,0 +1,30 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from mach.decorators import SettingsProvider
+
+
+@SettingsProvider
+class TelemetrySettings:
+ config_settings = [
+ (
+ "build.telemetry",
+ "boolean",
+ "Enable submission of build system telemetry "
+ '(Deprecated, replaced by "telemetry.is_enabled")',
+ ),
+ (
+ "mach_telemetry.is_enabled",
+ "boolean",
+ "Build system telemetry is allowed",
+ False,
+ ),
+ (
+ "mach_telemetry.is_set_up",
+ "boolean",
+ "The telemetry setup workflow has been completed "
+ "(e.g.: user has been prompted to opt-in)",
+ False,
+ ),
+ ]
diff --git a/python/mozbuild/mozbuild/shellutil.py b/python/mozbuild/mozbuild/shellutil.py
new file mode 100644
index 0000000000..36665cf4b1
--- /dev/null
+++ b/python/mozbuild/mozbuild/shellutil.py
@@ -0,0 +1,210 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import re
+
+
+def _tokens2re(**tokens):
+ # Create a pattern for non-escaped tokens, in the form:
+ # (?<!\\)(?:a|b|c...)
+ # This is meant to match patterns a, b, or c, or ... if they are not
+ # preceded by a backslash.
+ # where a, b, c... are in the form
+ # (?P<name>pattern)
+ # which matches the pattern and captures it in a named match group.
+ # The group names and patterns are given as arguments.
+ all_tokens = "|".join(
+ "(?P<%s>%s)" % (name, value) for name, value in tokens.items()
+ )
+ nonescaped = r"(?<!\\)(?:%s)" % all_tokens
+
+ # The final pattern matches either the above pattern, or an escaped
+ # backslash, captured in the "escape" match group.
+ return re.compile("(?:%s|%s)" % (nonescaped, r"(?P<escape>\\\\)"))
+
+
+UNQUOTED_TOKENS_RE = _tokens2re(
+ whitespace=r"[\t\r\n ]+",
+ quote=r'[\'"]',
+ comment="#",
+ special=r"[<>&|`(){}$;\*\?]",
+ backslashed=r"\\[^\\]",
+)
+
+DOUBLY_QUOTED_TOKENS_RE = _tokens2re(
+ quote='"',
+ backslashedquote=r'\\"',
+ special="\$",
+ backslashed=r'\\[^\\"]',
+)
+
+ESCAPED_NEWLINES_RE = re.compile(r"\\\n")
+
+# This regexp contains the same characters as all those listed in
+# UNQUOTED_TOKENS_RE. Please keep in sync.
+SHELL_QUOTE_RE = re.compile(r"[\\\t\r\n \'\"#<>&|`(){}$;\*\?]")
+
+
+class MetaCharacterException(Exception):
+ def __init__(self, char):
+ self.char = char
+
+
+class _ClineSplitter(object):
+ """
+ Parses a given command line string and creates a list of command
+ and arguments, with wildcard expansion.
+ """
+
+ def __init__(self, cline):
+ self.arg = None
+ self.cline = cline
+ self.result = []
+ self._parse_unquoted()
+
+ def _push(self, str):
+ """
+ Push the given string as part of the current argument
+ """
+ if self.arg is None:
+ self.arg = ""
+ self.arg += str
+
+ def _next(self):
+ """
+ Finalize current argument, effectively adding it to the list.
+ """
+ if self.arg is None:
+ return
+ self.result.append(self.arg)
+ self.arg = None
+
+ def _parse_unquoted(self):
+ """
+ Parse command line remainder in the context of an unquoted string.
+ """
+ while self.cline:
+ # Find the next token
+ m = UNQUOTED_TOKENS_RE.search(self.cline)
+ # If we find none, the remainder of the string can be pushed to
+ # the current argument and the argument finalized
+ if not m:
+ self._push(self.cline)
+ break
+ # The beginning of the string, up to the found token, is part of
+ # the current argument
+ if m.start():
+ self._push(self.cline[: m.start()])
+ self.cline = self.cline[m.end() :]
+
+ match = {name: value for name, value in m.groupdict().items() if value}
+ if "quote" in match:
+ # " or ' start a quoted string
+ if match["quote"] == '"':
+ self._parse_doubly_quoted()
+ else:
+ self._parse_quoted()
+ elif "comment" in match:
+ # Comments are ignored. The current argument can be finalized,
+ # and parsing stopped.
+ break
+ elif "special" in match:
+ # Unquoted, non-escaped special characters need to be sent to a
+ # shell.
+ raise MetaCharacterException(match["special"])
+ elif "whitespace" in match:
+ # Whitespaces terminate current argument.
+ self._next()
+ elif "escape" in match:
+ # Escaped backslashes turn into a single backslash
+ self._push("\\")
+ elif "backslashed" in match:
+ # Backslashed characters are unbackslashed
+ # e.g. echo \a -> a
+ self._push(match["backslashed"][1])
+ else:
+ raise Exception("Shouldn't reach here")
+ if self.arg:
+ self._next()
+
+ def _parse_quoted(self):
+ # Single quoted strings are preserved, except for the final quote
+ index = self.cline.find("'")
+ if index == -1:
+ raise Exception("Unterminated quoted string in command")
+ self._push(self.cline[:index])
+ self.cline = self.cline[index + 1 :]
+
+ def _parse_doubly_quoted(self):
+ if not self.cline:
+ raise Exception("Unterminated quoted string in command")
+ while self.cline:
+ m = DOUBLY_QUOTED_TOKENS_RE.search(self.cline)
+ if not m:
+ raise Exception("Unterminated quoted string in command")
+ self._push(self.cline[: m.start()])
+ self.cline = self.cline[m.end() :]
+ match = {name: value for name, value in m.groupdict().items() if value}
+ if "quote" in match:
+ # a double quote ends the quoted string, so go back to
+ # unquoted parsing
+ return
+ elif "special" in match:
+ # Unquoted, non-escaped special characters in a doubly quoted
+ # string still have a special meaning and need to be sent to a
+ # shell.
+ raise MetaCharacterException(match["special"])
+ elif "escape" in match:
+ # Escaped backslashes turn into a single backslash
+ self._push("\\")
+ elif "backslashedquote" in match:
+ # Backslashed double quotes are un-backslashed
+ self._push('"')
+ elif "backslashed" in match:
+ # Backslashed characters are kept backslashed
+ self._push(match["backslashed"])
+
+
+def split(cline):
+ """
+ Split the given command line string.
+ """
+ s = ESCAPED_NEWLINES_RE.sub("", cline)
+ return _ClineSplitter(s).result
+
+
+def _quote(s):
+ """Given a string, returns a version that can be used literally on a shell
+ command line, enclosing it with single quotes if necessary.
+
+ As a special case, if given an int, returns a string containing the int,
+ not enclosed in quotes.
+ """
+ if type(s) == int:
+ return "%d" % s
+
+ # Empty strings need to be quoted to have any significance
+ if s and not SHELL_QUOTE_RE.search(s) and not s.startswith("~"):
+ return s
+
+ # Single quoted strings can contain any characters unescaped except the
+ # single quote itself, which can't even be escaped, so the string needs to
+ # be closed, an escaped single quote added, and reopened.
+ t = type(s)
+ return t("'%s'") % s.replace(t("'"), t("'\\''"))
+
+
+def quote(*strings):
+ """Given one or more strings, returns a quoted string that can be used
+ literally on a shell command line.
+
+ >>> quote('a', 'b')
+ "a b"
+ >>> quote('a b', 'c')
+ "'a b' c"
+ """
+ return " ".join(_quote(s) for s in strings)
+
+
+__all__ = ["MetaCharacterException", "split", "quote"]
diff --git a/python/mozbuild/mozbuild/sphinx.py b/python/mozbuild/mozbuild/sphinx.py
new file mode 100644
index 0000000000..4d7afb621c
--- /dev/null
+++ b/python/mozbuild/mozbuild/sphinx.py
@@ -0,0 +1,293 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import importlib
+from pathlib import Path
+
+from docutils import nodes
+from docutils.parsers.rst import Directive
+from mots.config import FileConfig
+from mots.directory import Directory
+from mots.export import export_to_format
+from sphinx.util.docstrings import prepare_docstring
+from sphinx.util.docutils import ReferenceRole
+
+
+def function_reference(f, attr, args, doc):
+ lines = []
+
+ lines.extend(
+ [
+ f,
+ "-" * len(f),
+ "",
+ ]
+ )
+
+ docstring = prepare_docstring(doc)
+
+ lines.extend(
+ [
+ docstring[0],
+ "",
+ ]
+ )
+
+ arg_types = []
+
+ for t in args:
+ if isinstance(t, list):
+ inner_types = [t2.__name__ for t2 in t]
+ arg_types.append(" | ".join(inner_types))
+ continue
+
+ arg_types.append(t.__name__)
+
+ arg_s = "(%s)" % ", ".join(arg_types)
+
+ lines.extend(
+ [
+ ":Arguments: %s" % arg_s,
+ "",
+ ]
+ )
+
+ lines.extend(docstring[1:])
+ lines.append("")
+
+ return lines
+
+
+def variable_reference(v, st_type, in_type, doc):
+ lines = [
+ v,
+ "-" * len(v),
+ "",
+ ]
+
+ docstring = prepare_docstring(doc)
+
+ lines.extend(
+ [
+ docstring[0],
+ "",
+ ]
+ )
+
+ lines.extend(
+ [
+ ":Storage Type: ``%s``" % st_type.__name__,
+ ":Input Type: ``%s``" % in_type.__name__,
+ "",
+ ]
+ )
+
+ lines.extend(docstring[1:])
+ lines.append("")
+
+ return lines
+
+
+def special_reference(v, func, typ, doc):
+ lines = [
+ v,
+ "-" * len(v),
+ "",
+ ]
+
+ docstring = prepare_docstring(doc)
+
+ lines.extend(
+ [
+ docstring[0],
+ "",
+ ":Type: ``%s``" % typ.__name__,
+ "",
+ ]
+ )
+
+ lines.extend(docstring[1:])
+ lines.append("")
+
+ return lines
+
+
+def format_module(m):
+ lines = []
+
+ lines.extend(
+ [
+ ".. note::",
+ " moz.build files' implementation includes a ``Path`` class.",
+ ]
+ )
+ path_docstring_minus_summary = prepare_docstring(m.Path.__doc__)[2:]
+ lines.extend([" " + line for line in path_docstring_minus_summary])
+
+ for subcontext, cls in sorted(m.SUBCONTEXTS.items()):
+ lines.extend(
+ [
+ ".. _mozbuild_subcontext_%s:" % subcontext,
+ "",
+ "Sub-Context: %s" % subcontext,
+ "=============" + "=" * len(subcontext),
+ "",
+ ]
+ )
+ lines.extend(prepare_docstring(cls.__doc__))
+ if lines[-1]:
+ lines.append("")
+
+ for k, v in sorted(cls.VARIABLES.items()):
+ lines.extend(variable_reference(k, *v))
+
+ lines.extend(
+ [
+ "Variables",
+ "=========",
+ "",
+ ]
+ )
+
+ for v in sorted(m.VARIABLES):
+ lines.extend(variable_reference(v, *m.VARIABLES[v]))
+
+ lines.extend(
+ [
+ "Functions",
+ "=========",
+ "",
+ ]
+ )
+
+ for func in sorted(m.FUNCTIONS):
+ lines.extend(function_reference(func, *m.FUNCTIONS[func]))
+
+ lines.extend(
+ [
+ "Special Variables",
+ "=================",
+ "",
+ ]
+ )
+
+ for v in sorted(m.SPECIAL_VARIABLES):
+ lines.extend(special_reference(v, *m.SPECIAL_VARIABLES[v]))
+
+ return lines
+
+
+def find_mots_config_path(app):
+ """Find and return mots config path if it exists."""
+ base_path = Path(app.srcdir).parent
+ config_path = base_path / "mots.yaml"
+ if config_path.exists():
+ return config_path
+
+
+def export_mots(config_path):
+ """Load mots configuration and export it to file."""
+ # Load from disk and initialize configuration and directory.
+ config = FileConfig(config_path)
+ config.load()
+ directory = Directory(config)
+ directory.load()
+
+ # Fetch file format (i.e., "rst") and export path.
+ frmt = config.config["export"]["format"]
+ path = config_path.parent / config.config["export"]["path"]
+
+ # Generate output.
+ output = export_to_format(directory, frmt)
+
+ # Create export directory if it does not exist.
+ path.parent.mkdir(parents=True, exist_ok=True)
+
+ # Write changes to disk.
+ with path.open("w", encoding="utf-8") as f:
+ f.write(output)
+
+
+class MozbuildSymbols(Directive):
+ """Directive to insert mozbuild sandbox symbol information."""
+
+ required_arguments = 1
+
+ def run(self):
+ module = importlib.import_module(self.arguments[0])
+ fname = module.__file__
+ if fname.endswith(".pyc"):
+ fname = fname[0:-1]
+
+ self.state.document.settings.record_dependencies.add(fname)
+
+ # We simply format out the documentation as rst then feed it back
+ # into the parser for conversion. We don't even emit ourselves, so
+ # there's no record of us.
+ self.state_machine.insert_input(format_module(module), fname)
+
+ return []
+
+
+class Searchfox(ReferenceRole):
+ """Role which links a relative path from the source to it's searchfox URL.
+
+ Can be used like:
+
+ See :searchfox:`browser/base/content/browser-places.js` for more details.
+
+ Will generate a link to
+ ``https://searchfox.org/mozilla-central/source/browser/base/content/browser-places.js``
+
+ The example above will use the path as the text, to use custom text:
+
+ See :searchfox:`this file <browser/base/content/browser-places.js>` for
+ more details.
+
+ To specify a different source tree:
+
+ See :searchfox:`mozilla-beta:browser/base/content/browser-places.js`
+ for more details.
+ """
+
+ def run(self):
+ base = "https://searchfox.org/{source}/source/{path}"
+
+ if ":" in self.target:
+ source, path = self.target.split(":", 1)
+ else:
+ source = "mozilla-central"
+ path = self.target
+
+ url = base.format(source=source, path=path)
+
+ if self.has_explicit_title:
+ title = self.title
+ else:
+ title = path
+
+ node = nodes.reference(self.rawtext, title, refuri=url, **self.options)
+ return [node], []
+
+
+def setup(app):
+ from moztreedocs import manager
+
+ app.add_directive("mozbuildsymbols", MozbuildSymbols)
+ app.add_role("searchfox", Searchfox())
+
+ # Unlike typical Sphinx installs, our documentation is assembled from
+ # many sources and staged in a common location. This arguably isn't a best
+ # practice, but it was the easiest to implement at the time.
+ #
+ # Here, we invoke our custom code for staging/generating all our
+ # documentation.
+
+ # Export and write "governance" documentation to disk.
+ config_path = find_mots_config_path(app)
+ if config_path:
+ export_mots(config_path)
+
+ manager.generate_docs(app)
+ app.srcdir = manager.staging_dir
diff --git a/python/mozbuild/mozbuild/telemetry.py b/python/mozbuild/mozbuild/telemetry.py
new file mode 100644
index 0000000000..d656a9a2aa
--- /dev/null
+++ b/python/mozbuild/mozbuild/telemetry.py
@@ -0,0 +1,264 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+This file contains functions used for telemetry.
+"""
+
+import math
+import os
+import platform
+import sys
+
+import distro
+import mozpack.path as mozpath
+
+from .base import BuildEnvironmentNotFoundException
+
+
+def cpu_brand_linux():
+ """
+ Read the CPU brand string out of /proc/cpuinfo on Linux.
+ """
+ with open("/proc/cpuinfo", "r") as f:
+ for line in f:
+ if line.startswith("model name"):
+ _, brand = line.split(": ", 1)
+ return brand.rstrip()
+ # not found?
+ return None
+
+
+def cpu_brand_windows():
+ """
+ Read the CPU brand string from the registry on Windows.
+ """
+ try:
+ import _winreg
+ except ImportError:
+ import winreg as _winreg
+
+ try:
+ h = _winreg.OpenKey(
+ _winreg.HKEY_LOCAL_MACHINE,
+ r"HARDWARE\DESCRIPTION\System\CentralProcessor\0",
+ )
+ (brand, ty) = _winreg.QueryValueEx(h, "ProcessorNameString")
+ if ty == _winreg.REG_SZ:
+ return brand
+ except WindowsError:
+ pass
+ return None
+
+
+def cpu_brand_mac():
+ """
+ Get the CPU brand string via sysctl on macos.
+ """
+ import ctypes
+ import ctypes.util
+
+ libc = ctypes.cdll.LoadLibrary(ctypes.util.find_library("c"))
+ # First, find the required buffer size.
+ bufsize = ctypes.c_size_t(0)
+ result = libc.sysctlbyname(
+ b"machdep.cpu.brand_string", None, ctypes.byref(bufsize), None, 0
+ )
+ if result != 0:
+ return None
+ bufsize.value += 1
+ buf = ctypes.create_string_buffer(bufsize.value)
+ # Now actually get the value.
+ result = libc.sysctlbyname(
+ b"machdep.cpu.brand_string", buf, ctypes.byref(bufsize), None, 0
+ )
+ if result != 0:
+ return None
+
+ return buf.value.decode()
+
+
+def get_cpu_brand():
+ """
+ Get the CPU brand string as returned by CPUID.
+ """
+ return {
+ "Linux": cpu_brand_linux,
+ "Windows": cpu_brand_windows,
+ "Darwin": cpu_brand_mac,
+ }.get(platform.system(), lambda: None)()
+
+
+def get_os_name():
+ return {"Linux": "linux", "Windows": "windows", "Darwin": "macos"}.get(
+ platform.system(), "other"
+ )
+
+
+def get_psutil_stats():
+ """Return whether psutil exists and its associated stats.
+
+ @returns (bool, int, int, int) whether psutil exists, the logical CPU count,
+ physical CPU count, and total number of bytes of memory.
+ """
+ try:
+ import psutil
+
+ return (
+ True,
+ psutil.cpu_count(),
+ psutil.cpu_count(logical=False),
+ psutil.virtual_memory().total,
+ )
+ except ImportError:
+ return False, None, None, None
+
+
+def get_system_info():
+ """
+ Gather info to fill the `system` keys in the schema.
+ """
+ # Normalize OS names a bit, and bucket non-tier-1 platforms into "other".
+ has_psutil, logical_cores, physical_cores, memory_total = get_psutil_stats()
+ info = {"os": get_os_name()}
+ if has_psutil:
+ # `total` on Linux is gathered from /proc/meminfo's `MemTotal`, which is the
+ # total amount of physical memory minus some kernel usage, so round up to the
+ # nearest GB to get a sensible answer.
+ info["memory_gb"] = int(math.ceil(float(memory_total) / (1024 * 1024 * 1024)))
+ info["logical_cores"] = logical_cores
+ if physical_cores is not None:
+ info["physical_cores"] = physical_cores
+ cpu_brand = get_cpu_brand()
+ if cpu_brand is not None:
+ info["cpu_brand"] = cpu_brand
+ # TODO: drive_is_ssd, virtual_machine: https://bugzilla.mozilla.org/show_bug.cgi?id=1481613
+ return info
+
+
+def get_build_opts(substs):
+ """
+ Translate selected items from `substs` into `build_opts` keys in the schema.
+ """
+ try:
+ opts = {
+ k: ty(substs.get(s, None))
+ for (k, s, ty) in (
+ # Selected substitutions.
+ ("artifact", "MOZ_ARTIFACT_BUILDS", bool),
+ ("debug", "MOZ_DEBUG", bool),
+ ("opt", "MOZ_OPTIMIZE", bool),
+ ("ccache", "CCACHE", bool),
+ ("sccache", "MOZ_USING_SCCACHE", bool),
+ )
+ }
+ compiler = substs.get("CC_TYPE", None)
+ if compiler:
+ opts["compiler"] = str(compiler)
+ if substs.get("CXX_IS_ICECREAM", None):
+ opts["icecream"] = True
+ return opts
+ except BuildEnvironmentNotFoundException:
+ return {}
+
+
+def get_build_attrs(attrs):
+ """
+ Extracts clobber and cpu usage info from command attributes.
+ """
+ res = {}
+ clobber = attrs.get("clobber")
+ if clobber:
+ res["clobber"] = clobber
+ usage = attrs.get("usage")
+ if usage:
+ cpu_percent = usage.get("cpu_percent")
+ if cpu_percent:
+ res["cpu_percent"] = int(round(cpu_percent))
+ return res
+
+
+def filter_args(command, argv, topsrcdir, topobjdir, cwd=None):
+ """
+ Given the full list of command-line arguments, remove anything up to and including `command`,
+ and attempt to filter absolute pathnames out of any arguments after that.
+ """
+ if cwd is None:
+ cwd = os.getcwd()
+
+ # Each key is a pathname and the values are replacement sigils
+ paths = {
+ topsrcdir: "$topsrcdir/",
+ topobjdir: "$topobjdir/",
+ mozpath.normpath(os.path.expanduser("~")): "$HOME/",
+ # This might override one of the existing entries, that's OK.
+ # We don't use a sigil here because we treat all arguments as potentially relative
+ # paths, so we'd like to get them back as they were specified.
+ mozpath.normpath(cwd): "",
+ }
+
+ args = list(argv)
+ while args:
+ a = args.pop(0)
+ if a == command:
+ break
+
+ def filter_path(p):
+ p = mozpath.abspath(p)
+ base = mozpath.basedir(p, paths.keys())
+ if base:
+ return paths[base] + mozpath.relpath(p, base)
+ # Best-effort.
+ return "<path omitted>"
+
+ return [filter_path(arg) for arg in args]
+
+
+def get_distro_and_version():
+ if sys.platform.startswith("linux"):
+ dist, version, _ = distro.linux_distribution(full_distribution_name=False)
+ return dist, version
+ elif sys.platform.startswith("darwin"):
+ return "macos", platform.mac_ver()[0]
+ elif sys.platform.startswith("win32") or sys.platform.startswith("msys"):
+ ver = sys.getwindowsversion()
+ return "windows", "%s.%s.%s" % (ver.major, ver.minor, ver.build)
+ else:
+ return sys.platform, ""
+
+
+def get_shell_info():
+ """Returns if the current shell was opened by vscode and if it's a SSH connection"""
+
+ return (
+ True if "vscode" in os.getenv("TERM_PROGRAM", "") else False,
+ bool(os.getenv("SSH_CLIENT", False)),
+ )
+
+
+def get_vscode_running():
+ """Return if the vscode is currently running."""
+ try:
+ import psutil
+
+ for proc in psutil.process_iter():
+ try:
+ # On Windows we have "Code.exe"
+ # On MacOS we have "Code Helper (Renderer)"
+ # On Linux we have ""
+ if (
+ proc.name == "Code.exe"
+ or proc.name == "Code Helper (Renderer)"
+ or proc.name == "code"
+ ):
+ return True
+ except Exception:
+ # may not be able to access process info for all processes
+ continue
+ except Exception:
+ # On some platforms, sometimes, the generator throws an
+ # exception preventing us to enumerate.
+ return False
+
+ return False
diff --git a/python/mozbuild/mozbuild/test/__init__.py b/python/mozbuild/mozbuild/test/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/__init__.py
diff --git a/python/mozbuild/mozbuild/test/action/data/html_fragment_preprocesor/example_basic.xml b/python/mozbuild/mozbuild/test/action/data/html_fragment_preprocesor/example_basic.xml
new file mode 100644
index 0000000000..251b4a3069
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/action/data/html_fragment_preprocesor/example_basic.xml
@@ -0,0 +1,10 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- This Source Code Form is subject to the terms of the Mozilla Public
+ - License, v. 2.0. If a copy of the MPL was not distributed with this
+ - file, You can obtain one at http://mozilla.org/MPL/2.0/. -->
+
+<template xmlns="http://www.w3.org/1999/xhtml">
+ <div class="main">
+ <p>Hello World</p>
+ </div>
+</template>
diff --git a/python/mozbuild/mozbuild/test/action/data/html_fragment_preprocesor/example_multiple_templates.xml b/python/mozbuild/mozbuild/test/action/data/html_fragment_preprocesor/example_multiple_templates.xml
new file mode 100644
index 0000000000..2e249aec63
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/action/data/html_fragment_preprocesor/example_multiple_templates.xml
@@ -0,0 +1,30 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- This Source Code Form is subject to the terms of the Mozilla Public
+ - License, v. 2.0. If a copy of the MPL was not distributed with this
+ - file, You can obtain one at http://mozilla.org/MPL/2.0/. -->
+
+<template xmlns="http://www.w3.org/1999/xhtml">
+ <template doctype="true">
+ <![CDATA[
+ <!DOCTYPE bindings [
+ <!ENTITY % exampleDTD SYSTEM "chrome://global/locale/example.dtd">
+ %exampleDTD;
+ ]>
+ ]]>
+ </template>
+ <template name="alpha">
+ <div class="main">
+ <p>Hello World</p>
+ </div>
+ </template>
+ <template name="beta">
+ <div class="body">
+ <p>Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.</p>
+ </div>
+ </template>
+ <template name="charlie">
+ <div class="footer">
+ <p>Goodbye</p>
+ </div>
+ </template>
+</template>
diff --git a/python/mozbuild/mozbuild/test/action/data/html_fragment_preprocesor/example_xul.xml b/python/mozbuild/mozbuild/test/action/data/html_fragment_preprocesor/example_xul.xml
new file mode 100644
index 0000000000..5e0ea0b34a
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/action/data/html_fragment_preprocesor/example_xul.xml
@@ -0,0 +1,14 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- This Source Code Form is subject to the terms of the Mozilla Public
+ - License, v. 2.0. If a copy of the MPL was not distributed with this
+ - file, You can obtain one at http://mozilla.org/MPL/2.0/. -->
+
+<template xmlns="http://www.mozilla.org/keymaster/gatekeeper/there.is.only.xul" xmlns:html="http://www.w3.org/1999/xhtml">
+ <html:link href="chrome://global/skin/example.css" rel="stylesheet"/>
+ <hbox id="label-box" part="label-box" flex="1" role="none">
+ <image part="icon" role="none"/>
+ <label id="label" part="label" crop="end" flex="1" role="none"/>
+ <label id="highlightable-label" part="label" crop="end" flex="1" role="none"/>
+ </hbox>
+ <html:slot/>
+</template>
diff --git a/python/mozbuild/mozbuild/test/action/data/invalid/region.properties b/python/mozbuild/mozbuild/test/action/data/invalid/region.properties
new file mode 100644
index 0000000000..d4d8109b69
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/action/data/invalid/region.properties
@@ -0,0 +1,12 @@
+# A region.properties file with invalid unicode byte sequences. The
+# sequences were cribbed from Markus Kuhn's "UTF-8 decoder capability
+# and stress test", available at
+# http://www.cl.cam.ac.uk/~mgk25/ucs/examples/UTF-8-test.txt
+
+# 3.5 Impossible bytes |
+# |
+# The following two bytes cannot appear in a correct UTF-8 string |
+# |
+# 3.5.1 fe = "þ" |
+# 3.5.2 ff = "ÿ" |
+# 3.5.3 fe fe ff ff = "þþÿÿ" |
diff --git a/python/mozbuild/mozbuild/test/action/data/node/node-test-script.js b/python/mozbuild/mozbuild/test/action/data/node/node-test-script.js
new file mode 100644
index 0000000000..f6dbfcc594
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/action/data/node/node-test-script.js
@@ -0,0 +1,11 @@
+#! /usr/bin/env node
+"use strict";
+
+/* eslint-disable no-console */
+
+let args = process.argv.slice(2);
+
+for (let arg of args) {
+ console.log(`dep:${arg}`);
+}
+
diff --git a/python/mozbuild/mozbuild/test/action/test_buildlist.py b/python/mozbuild/mozbuild/test/action/test_buildlist.py
new file mode 100644
index 0000000000..9a1d2738ed
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/action/test_buildlist.py
@@ -0,0 +1,96 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import os.path
+import unittest
+from shutil import rmtree
+from tempfile import mkdtemp
+
+import mozunit
+
+from mozbuild.action.buildlist import addEntriesToListFile
+
+
+class TestBuildList(unittest.TestCase):
+ """
+ Unit tests for buildlist.py
+ """
+
+ def setUp(self):
+ self.tmpdir = mkdtemp()
+
+ def tearDown(self):
+ rmtree(self.tmpdir)
+
+ # utility methods for tests
+ def touch(self, file, dir=None):
+ if dir is None:
+ dir = self.tmpdir
+ f = os.path.join(dir, file)
+ open(f, "w").close()
+ return f
+
+ def assertFileContains(self, filename, l):
+ """Assert that the lines in the file |filename| are equal
+ to the contents of the list |l|, in order."""
+ l = l[:]
+ f = open(filename, "r")
+ lines = [line.rstrip() for line in f.readlines()]
+ f.close()
+ for line in lines:
+ self.assertTrue(
+ len(l) > 0,
+ "ran out of expected lines! (expected '{0}', got '{1}')".format(
+ l, lines
+ ),
+ )
+ self.assertEqual(line, l.pop(0))
+ self.assertTrue(
+ len(l) == 0,
+ "not enough lines in file! (expected '{0}'," " got '{1}'".format(l, lines),
+ )
+
+ def test_basic(self):
+ "Test that addEntriesToListFile works when file doesn't exist."
+ testfile = os.path.join(self.tmpdir, "test.list")
+ l = ["a", "b", "c"]
+ addEntriesToListFile(testfile, l)
+ self.assertFileContains(testfile, l)
+ # ensure that attempting to add the same entries again doesn't change it
+ addEntriesToListFile(testfile, l)
+ self.assertFileContains(testfile, l)
+
+ def test_append(self):
+ "Test adding new entries."
+ testfile = os.path.join(self.tmpdir, "test.list")
+ l = ["a", "b", "c"]
+ addEntriesToListFile(testfile, l)
+ self.assertFileContains(testfile, l)
+ l2 = ["x", "y", "z"]
+ addEntriesToListFile(testfile, l2)
+ l.extend(l2)
+ self.assertFileContains(testfile, l)
+
+ def test_append_some(self):
+ "Test adding new entries mixed with existing entries."
+ testfile = os.path.join(self.tmpdir, "test.list")
+ l = ["a", "b", "c"]
+ addEntriesToListFile(testfile, l)
+ self.assertFileContains(testfile, l)
+ addEntriesToListFile(testfile, ["a", "x", "c", "z"])
+ self.assertFileContains(testfile, ["a", "b", "c", "x", "z"])
+
+ def test_add_multiple(self):
+ """Test that attempting to add the same entry multiple times results in
+ only one entry being added."""
+ testfile = os.path.join(self.tmpdir, "test.list")
+ addEntriesToListFile(testfile, ["a", "b", "a", "a", "b"])
+ self.assertFileContains(testfile, ["a", "b"])
+ addEntriesToListFile(testfile, ["c", "a", "c", "b", "c"])
+ self.assertFileContains(testfile, ["a", "b", "c"])
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozbuild/test/action/test_html_fragment_preprocessor.py b/python/mozbuild/mozbuild/test/action/test_html_fragment_preprocessor.py
new file mode 100644
index 0000000000..3cce1c5f94
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/action/test_html_fragment_preprocessor.py
@@ -0,0 +1,196 @@
+import os
+import unittest
+import xml.etree.ElementTree as ET
+
+import mozpack.path as mozpath
+import mozunit
+
+from mozbuild.action.html_fragment_preprocesor import (
+ fill_html_fragments_map,
+ generate,
+ get_fragment_key,
+ get_html_fragments_from_file,
+)
+
+test_data_path = mozpath.abspath(mozpath.dirname(__file__))
+test_data_path = mozpath.join(test_data_path, "data", "html_fragment_preprocesor")
+
+
+def data(name):
+ return os.path.join(test_data_path, name)
+
+
+TEST_PATH = "/some/path/somewhere/example.xml".replace("/", os.sep)
+EXAMPLE_BASIC = data("example_basic.xml")
+EXAMPLE_TEMPLATES = data("example_multiple_templates.xml")
+EXAMPLE_XUL = data("example_xul.xml")
+DUMMY_FILE = data("dummy.js")
+
+
+class TestNode(unittest.TestCase):
+ """
+ Tests for html_fragment_preprocesor.py.
+ """
+
+ maxDiff = None
+
+ def assertXMLEqual(self, a, b, message):
+ aRoot = ET.fromstring(a)
+ bRoot = ET.fromstring(b)
+ self.assertXMLNodesEqual(aRoot, bRoot, message)
+
+ def assertXMLNodesEqual(self, a, b, message, xpath=""):
+ xpath += "/" + a.tag
+ messageWithPath = message + " at " + xpath
+ self.assertEqual(a.tag, b.tag, messageWithPath + " tag name")
+ self.assertEqual(a.text, b.text, messageWithPath + " text")
+ self.assertEqual(
+ a.attrib.keys(), b.attrib.keys(), messageWithPath + " attribute names"
+ )
+ for aKey, aValue in a.attrib.items():
+ self.assertEqual(
+ aValue,
+ b.attrib[aKey],
+ messageWithPath + "[@" + aKey + "] attribute value",
+ )
+ for aChild, bChild in zip(a, b):
+ self.assertXMLNodesEqual(aChild, bChild, message, xpath)
+
+ def test_get_fragment_key_path(self):
+ key = get_fragment_key("/some/path/somewhere/example.xml")
+ self.assertEqual(key, "example")
+
+ def test_get_fragment_key_with_named_template(self):
+ key = get_fragment_key(TEST_PATH, "some-template")
+ self.assertEqual(key, "example/some-template")
+
+ def test_get_html_fragments_from_template_no_doctype_no_name(self):
+ key = "example"
+ fragment_map = {}
+ template = ET.Element("template")
+ p1 = ET.SubElement(template, "p")
+ p1.text = "Hello World"
+ p2 = ET.SubElement(template, "p")
+ p2.text = "Goodbye"
+ fill_html_fragments_map(fragment_map, TEST_PATH, template)
+ self.assertEqual(fragment_map[key], "<p>Hello World</p><p>Goodbye</p>")
+
+ def test_get_html_fragments_from_named_template_with_html_element(self):
+ key = "example/some-template"
+ fragment_map = {}
+ template = ET.Element("template")
+ template.attrib["name"] = "some-template"
+ p = ET.SubElement(template, "p")
+ p.text = "Hello World"
+ fill_html_fragments_map(fragment_map, TEST_PATH, template)
+ self.assertEqual(fragment_map[key], "<p>Hello World</p>")
+
+ def test_get_html_fragments_from_template_with_doctype(self):
+ key = "example"
+ doctype = "doctype definition goes here"
+ fragment_map = {}
+ template = ET.Element("template")
+ p = ET.SubElement(template, "p")
+ p.text = "Hello World"
+ fill_html_fragments_map(fragment_map, TEST_PATH, template, doctype)
+ self.assertEqual(
+ fragment_map[key], "doctype definition goes here\n<p>Hello World</p>"
+ )
+
+ def test_get_html_fragments_from_file_basic(self):
+ key = "example_basic"
+ fragment_map = {}
+ get_html_fragments_from_file(fragment_map, EXAMPLE_BASIC)
+ self.assertEqual(
+ fragment_map[key],
+ '<div xmlns="http://www.w3.org/1999/xhtml" class="main">'
+ + " <p>Hello World</p> </div>",
+ )
+
+ def test_get_html_fragments_from_file_multiple_templates(self):
+ key1 = "example_multiple_templates/alpha"
+ key2 = "example_multiple_templates/beta"
+ key3 = "example_multiple_templates/charlie"
+ fragment_map = {}
+ get_html_fragments_from_file(fragment_map, EXAMPLE_TEMPLATES)
+ self.assertIn("<p>Hello World</p>", fragment_map[key1], "Has HTML content")
+ self.assertIn(
+ '<!ENTITY % exampleDTD SYSTEM "chrome://global/locale/example.dtd">',
+ fragment_map[key1],
+ "Has doctype",
+ )
+ self.assertIn("<p>Lorem ipsum", fragment_map[key2], "Has HTML content")
+ self.assertIn(
+ '<!ENTITY % exampleDTD SYSTEM "chrome://global/locale/example.dtd">',
+ fragment_map[key2],
+ "Has doctype",
+ )
+ self.assertIn("<p>Goodbye</p>", fragment_map[key3], "Has HTML content")
+ self.assertIn(
+ '<!ENTITY % exampleDTD SYSTEM "chrome://global/locale/example.dtd">',
+ fragment_map[key3],
+ "Has doctype",
+ )
+
+ def test_get_html_fragments_from_file_with_xul(self):
+ key = "example_xul"
+ fragment_map = {}
+ get_html_fragments_from_file(fragment_map, EXAMPLE_XUL)
+ xml = "<root>" + fragment_map[key] + "</root>"
+ self.assertXMLEqual(
+ xml,
+ "<root>"
+ + '<html:link xmlns:html="http://www.w3.org/1999/xhtml" '
+ + 'href="chrome://global/skin/example.css" rel="stylesheet">'
+ + "</html:link> "
+ + '<hbox xmlns="http://www.mozilla.org/keymaster/'
+ + 'gatekeeper/there.is.only.xul" flex="1" id="label-box" '
+ + 'part="label-box" role="none"> '
+ + '<image part="icon" role="none"></image> '
+ + '<label crop="end" flex="1" id="label" part="label" '
+ + 'role="none"></label> '
+ + '<label crop="end" flex="1" id="highlightable-label" '
+ + 'part="label" role="none"></label> '
+ + "</hbox> "
+ + '<html:slot xmlns:html="http://www.w3.org/1999/xhtml">'
+ + "</html:slot></root>",
+ "XML values must match",
+ )
+
+ def test_generate(self):
+ with open(DUMMY_FILE, "w") as file:
+ deps = generate(
+ file,
+ EXAMPLE_BASIC,
+ EXAMPLE_TEMPLATES,
+ EXAMPLE_XUL,
+ )
+ with open(DUMMY_FILE, "r") as file:
+ contents = file.read()
+ self.assertIn(
+ "<!ENTITY % exampleDTD SYSTEM",
+ contents,
+ "Has doctype",
+ )
+ self.assertIn("<p>Lorem ipsum", contents, "Has HTML content")
+ self.assertIn('"example_basic"', contents, "Has basic fragment key")
+ self.assertIn(
+ '"example_multiple_templates/alpha"',
+ contents,
+ "Has multiple templates fragment key",
+ )
+ self.assertIn('"example_xul"', contents, "Has XUL fragment key")
+ self.assertIn(
+ "const getHTMLFragment =",
+ contents,
+ "Has fragment loader method declaration",
+ )
+ os.remove(DUMMY_FILE)
+ self.assertEqual(len(deps), 3, "deps are correct")
+ self.assertIn(EXAMPLE_BASIC, deps, "deps are correct")
+ self.assertIn(EXAMPLE_TEMPLATES, deps, "deps are correct")
+ self.assertIn(EXAMPLE_XUL, deps, "deps are correct")
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozbuild/test/action/test_langpack_manifest.py b/python/mozbuild/mozbuild/test/action/test_langpack_manifest.py
new file mode 100644
index 0000000000..29e8642fc7
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/action/test_langpack_manifest.py
@@ -0,0 +1,269 @@
+# -*- coding: utf-8 -*-
+
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+import json
+import os
+import shutil
+import tempfile
+import unittest
+
+import mozunit
+
+from mozbuild.action import langpack_manifest
+
+
+class TestGenerateManifest(unittest.TestCase):
+ """
+ Unit tests for langpack_manifest.py.
+ """
+
+ def test_parse_flat_ftl(self):
+ src = """
+langpack-creator = bar {"bar"}
+langpack-contributors = { "" }
+"""
+ tmp = tempfile.NamedTemporaryFile(mode="wt", suffix=".ftl", delete=False)
+ try:
+ tmp.write(src)
+ tmp.close()
+ ftl = langpack_manifest.parse_flat_ftl(tmp.name)
+ self.assertEqual(ftl["langpack-creator"], "bar bar")
+ self.assertEqual(ftl["langpack-contributors"], "")
+ finally:
+ os.remove(tmp.name)
+
+ def test_parse_flat_ftl_missing(self):
+ ftl = langpack_manifest.parse_flat_ftl("./does-not-exist.ftl")
+ self.assertEqual(len(ftl), 0)
+
+ def test_manifest(self):
+ ctx = {
+ "langpack-creator": "Suomennosprojekti",
+ "langpack-contributors": "Joe Smith, Mary White",
+ }
+ os.environ["MOZ_BUILD_DATE"] = "20210928100000"
+ manifest = langpack_manifest.create_webmanifest(
+ "fi",
+ "57.0.1",
+ "57.0",
+ "57.0.*",
+ "Firefox",
+ "/var/vcs/l10n-central",
+ "langpack-fi@firefox.mozilla.og",
+ ctx,
+ {},
+ )
+
+ data = json.loads(manifest)
+ self.assertEqual(data["name"], "Language: Suomi (Finnish)")
+ self.assertEqual(
+ data["description"], "Firefox Language Pack for Suomi (fi) – Finnish"
+ )
+ self.assertEqual(
+ data["author"], "Suomennosprojekti (contributors: Joe Smith, Mary White)"
+ )
+ self.assertEqual(data["version"], "57.0.20210928.100000")
+
+ def test_manifest_truncated_name(self):
+ ctx = {
+ "langpack-creator": "Mozilla.org / Softcatalà",
+ "langpack-contributors": "Joe Smith, Mary White",
+ }
+ os.environ["MOZ_BUILD_DATE"] = "20210928100000"
+ manifest = langpack_manifest.create_webmanifest(
+ "ca-valencia",
+ "57.0.1",
+ "57.0",
+ "57.0.*",
+ "Firefox",
+ "/var/vcs/l10n-central",
+ "langpack-ca-valencia@firefox.mozilla.og",
+ ctx,
+ {},
+ )
+
+ data = json.loads(manifest)
+ self.assertEqual(data["name"], "Language: Català (Valencià)")
+ self.assertEqual(
+ data["description"],
+ "Firefox Language Pack for Català (Valencià) (ca-valencia) – Catalan, Valencian",
+ )
+
+ def test_manifest_name_untranslated(self):
+ ctx = {
+ "langpack-creator": "Mozilla.org",
+ "langpack-contributors": "Joe Smith, Mary White",
+ }
+ os.environ["MOZ_BUILD_DATE"] = "20210928100000"
+ manifest = langpack_manifest.create_webmanifest(
+ "en-US",
+ "57.0.1",
+ "57.0",
+ "57.0.*",
+ "Firefox",
+ "/var/vcs/l10n-central",
+ "langpack-ca-valencia@firefox.mozilla.og",
+ ctx,
+ {},
+ )
+
+ data = json.loads(manifest)
+ self.assertEqual(data["name"], "Language: English (US)")
+ self.assertEqual(
+ data["description"],
+ "Firefox Language Pack for English (US) (en-US)",
+ )
+
+ def test_manifest_without_contributors(self):
+ ctx = {
+ "langpack-creator": "Suomennosprojekti",
+ "langpack-contributors": "",
+ }
+ manifest = langpack_manifest.create_webmanifest(
+ "fi",
+ "57.0.1",
+ "57.0",
+ "57.0.*",
+ "Firefox",
+ "/var/vcs/l10n-central",
+ "langpack-fi@firefox.mozilla.og",
+ ctx,
+ {},
+ )
+
+ data = json.loads(manifest)
+ self.assertEqual(data["name"], "Language: Suomi (Finnish)")
+ self.assertEqual(
+ data["description"], "Firefox Language Pack for Suomi (fi) – Finnish"
+ )
+ self.assertEqual(data["author"], "Suomennosprojekti")
+
+ def test_manifest_truncation(self):
+ locale = (
+ "Long locale code that will be truncated and will cause both "
+ "the name and the description to exceed the maximum number of "
+ "characters allowed in manifest.json"
+ )
+ title, description = langpack_manifest.get_title_and_description(
+ "Firefox", locale
+ )
+
+ self.assertEqual(len(title), 45)
+ self.assertEqual(len(description), 132)
+
+ def test_get_version_maybe_buildid(self):
+ for (app_version, buildid, expected_version) in [
+ ("109", "", "109"),
+ ("109.0", "", "109.0"),
+ ("109.0.0", "", "109.0.0"),
+ ("109", "20210928", "109"), # buildid should be 14 chars
+ ("109", "20210928123456", "109.20210928.123456"),
+ ("109.0", "20210928123456", "109.0.20210928.123456"),
+ ("109.0.0", "20210928123456", "109.0.20210928.123456"),
+ ("109", "20230215023456", "109.20230215.23456"),
+ ("109.0", "20230215023456", "109.0.20230215.23456"),
+ ("109.0.0", "20230215023456", "109.0.20230215.23456"),
+ ("109", "20230215003456", "109.20230215.3456"),
+ ("109", "20230215000456", "109.20230215.456"),
+ ("109", "20230215000056", "109.20230215.56"),
+ ("109", "20230215000006", "109.20230215.6"),
+ ("109", "20230215000000", "109.20230215.0"),
+ ("109.1.2.3", "20230201000000", "109.1.20230201.0"),
+ ("109.0a1", "", "109.0"),
+ ("109a0.0b0", "", "109.0"),
+ ("109.0.0b1", "", "109.0.0"),
+ ("109.0.b1", "", "109.0.0"),
+ ("109..1", "", "109.0.1"),
+ ]:
+ os.environ["MOZ_BUILD_DATE"] = buildid
+ version = langpack_manifest.get_version_maybe_buildid(app_version)
+ self.assertEqual(version, expected_version)
+
+ def test_main(self):
+ # We set this env variable so that the manifest.json version string
+ # uses a "buildid", see: `get_version_maybe_buildid()` for more
+ # information.
+ os.environ["MOZ_BUILD_DATE"] = "20210928100000"
+
+ TEST_CASES = [
+ {
+ "app_version": "112.0.1",
+ "max_app_version": "112.*",
+ "expected_version": "112.0.20210928.100000",
+ "expected_min_version": "112.0",
+ "expected_max_version": "112.*",
+ },
+ {
+ "app_version": "112.1.0",
+ "max_app_version": "112.*",
+ "expected_version": "112.1.20210928.100000",
+ # We expect the second part to be "0" even if the app version
+ # has a minor part equal to "1".
+ "expected_min_version": "112.0",
+ "expected_max_version": "112.*",
+ },
+ {
+ "app_version": "114.0a1",
+ "max_app_version": "114.*",
+ "expected_version": "114.0.20210928.100000",
+ # We expect the min version to be equal to the app version
+ # because we don't change alpha versions.
+ "expected_min_version": "114.0a1",
+ "expected_max_version": "114.*",
+ },
+ ]
+
+ tmpdir = tempfile.mkdtemp()
+ try:
+ # These files are required by the `main()` function.
+ for file in ["chrome.manifest", "empty-metadata.ftl"]:
+ langpack_manifest.write_file(os.path.join(tmpdir, file), "")
+
+ for tc in TEST_CASES:
+ extension_id = "some@extension-id"
+ locale = "fr"
+
+ args = [
+ "--input",
+ tmpdir,
+ # This file has been created right above.
+ "--metadata",
+ "empty-metadata.ftl",
+ "--app-name",
+ "Firefox",
+ "--l10n-basedir",
+ "/var/vcs/l10n-central",
+ "--locales",
+ locale,
+ "--langpack-eid",
+ extension_id,
+ "--app-version",
+ tc["app_version"],
+ "--max-app-ver",
+ tc["max_app_version"],
+ ]
+ langpack_manifest.main(args)
+
+ with open(os.path.join(tmpdir, "manifest.json")) as manifest_file:
+ manifest = json.load(manifest_file)
+ self.assertEqual(manifest["version"], tc["expected_version"])
+ self.assertEqual(manifest["langpack_id"], locale)
+ self.assertEqual(
+ manifest["browser_specific_settings"],
+ {
+ "gecko": {
+ "id": extension_id,
+ "strict_min_version": tc["expected_min_version"],
+ "strict_max_version": tc["expected_max_version"],
+ }
+ },
+ )
+ finally:
+ shutil.rmtree(tmpdir, ignore_errors=True)
+ del os.environ["MOZ_BUILD_DATE"]
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozbuild/test/action/test_node.py b/python/mozbuild/mozbuild/test/action/test_node.py
new file mode 100644
index 0000000000..f1ab5afd17
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/action/test_node.py
@@ -0,0 +1,80 @@
+# -*- coding: utf-8 -*-
+
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+import os
+import unittest
+
+import buildconfig
+import mozpack.path as mozpath
+import mozunit
+
+from mozbuild.action.node import SCRIPT_ALLOWLIST, generate
+from mozbuild.nodeutil import find_node_executable
+
+test_data_path = mozpath.abspath(mozpath.dirname(__file__))
+test_data_path = mozpath.join(test_data_path, "data", "node")
+
+
+def data(name):
+ return os.path.join(test_data_path, name)
+
+
+TEST_SCRIPT = data("node-test-script.js")
+NONEXISTENT_TEST_SCRIPT = data("non-existent-test-script.js")
+
+
+class TestNode(unittest.TestCase):
+ """
+ Tests for node.py.
+ """
+
+ def setUp(self):
+ if not buildconfig.substs.get("NODEJS"):
+ buildconfig.substs["NODEJS"] = find_node_executable()[0]
+ SCRIPT_ALLOWLIST.append(TEST_SCRIPT)
+
+ def tearDown(self):
+ try:
+ SCRIPT_ALLOWLIST.remove(TEST_SCRIPT)
+ except Exception:
+ pass
+
+ def test_generate_no_returned_deps(self):
+ deps = generate("dummy_argument", TEST_SCRIPT)
+
+ self.assertSetEqual(deps, set([]))
+
+ def test_generate_returns_passed_deps(self):
+ deps = generate("dummy_argument", TEST_SCRIPT, "a", "b")
+
+ self.assertSetEqual(deps, set(["a", "b"]))
+
+ def test_called_process_error_handled(self):
+ SCRIPT_ALLOWLIST.append(NONEXISTENT_TEST_SCRIPT)
+
+ with self.assertRaises(SystemExit) as cm:
+ generate("dummy_arg", NONEXISTENT_TEST_SCRIPT)
+
+ self.assertEqual(cm.exception.code, 1)
+ SCRIPT_ALLOWLIST.remove(NONEXISTENT_TEST_SCRIPT)
+
+ def test_nodejs_not_set(self):
+ buildconfig.substs["NODEJS"] = None
+
+ with self.assertRaises(SystemExit) as cm:
+ generate("dummy_arg", TEST_SCRIPT)
+
+ self.assertEqual(cm.exception.code, 1)
+
+ def test_generate_missing_allowlist_entry_exit_code(self):
+ SCRIPT_ALLOWLIST.remove(TEST_SCRIPT)
+ with self.assertRaises(SystemExit) as cm:
+ generate("dummy_arg", TEST_SCRIPT)
+
+ self.assertEqual(cm.exception.code, 1)
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozbuild/test/action/test_process_install_manifest.py b/python/mozbuild/mozbuild/test/action/test_process_install_manifest.py
new file mode 100644
index 0000000000..3aea4bca73
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/action/test_process_install_manifest.py
@@ -0,0 +1,65 @@
+# -*- coding: utf-8 -*-
+
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+import os
+
+import mozunit
+from mozpack.manifests import InstallManifest
+from mozpack.test.test_files import TestWithTmpDir
+
+import mozbuild.action.process_install_manifest as process_install_manifest
+
+
+class TestGenerateManifest(TestWithTmpDir):
+ """
+ Unit tests for process_install_manifest.py.
+ """
+
+ def test_process_manifest(self):
+ source = self.tmppath("source")
+ os.mkdir(source)
+ os.mkdir("%s/base" % source)
+ os.mkdir("%s/base/foo" % source)
+ os.mkdir("%s/base2" % source)
+
+ with open("%s/base/foo/file1" % source, "a"):
+ pass
+
+ with open("%s/base/foo/file2" % source, "a"):
+ pass
+
+ with open("%s/base2/file3" % source, "a"):
+ pass
+
+ m = InstallManifest()
+ m.add_pattern_link("%s/base" % source, "**", "")
+ m.add_link("%s/base2/file3" % source, "foo/file3")
+
+ p = self.tmppath("m")
+ m.write(path=p)
+
+ dest = self.tmppath("dest")
+ track = self.tmppath("track")
+
+ for i in range(2):
+ process_install_manifest.process_manifest(dest, [p], track)
+
+ self.assertTrue(os.path.exists(self.tmppath("dest/foo/file1")))
+ self.assertTrue(os.path.exists(self.tmppath("dest/foo/file2")))
+ self.assertTrue(os.path.exists(self.tmppath("dest/foo/file3")))
+
+ m = InstallManifest()
+ m.write(path=p)
+
+ for i in range(2):
+ process_install_manifest.process_manifest(dest, [p], track)
+
+ self.assertFalse(os.path.exists(self.tmppath("dest/foo/file1")))
+ self.assertFalse(os.path.exists(self.tmppath("dest/foo/file2")))
+ self.assertFalse(os.path.exists(self.tmppath("dest/foo/file3")))
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozbuild/test/backend/__init__.py b/python/mozbuild/mozbuild/test/backend/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/__init__.py
diff --git a/python/mozbuild/mozbuild/test/backend/common.py b/python/mozbuild/mozbuild/test/backend/common.py
new file mode 100644
index 0000000000..07cfa7540f
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/common.py
@@ -0,0 +1,253 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import unittest
+from collections import defaultdict
+from shutil import rmtree
+from tempfile import mkdtemp
+
+import mozpack.path as mozpath
+from mach.logging import LoggingManager
+
+from mozbuild.backend.configenvironment import ConfigEnvironment
+from mozbuild.frontend.emitter import TreeMetadataEmitter
+from mozbuild.frontend.reader import BuildReader
+
+log_manager = LoggingManager()
+log_manager.add_terminal_logging()
+
+
+test_data_path = mozpath.abspath(mozpath.dirname(__file__))
+test_data_path = mozpath.join(test_data_path, "data")
+
+
+CONFIGS = defaultdict(
+ lambda: {
+ "defines": {},
+ "substs": {"OS_TARGET": "WINNT"},
+ },
+ {
+ "binary-components": {
+ "defines": {},
+ "substs": {
+ "LIB_PREFIX": "lib",
+ "LIB_SUFFIX": "a",
+ "COMPILE_ENVIRONMENT": "1",
+ },
+ },
+ "database": {
+ "defines": {},
+ "substs": {
+ "CC": "clang",
+ "CXX": "clang++",
+ "LIB_PREFIX": "lib",
+ "LIB_SUFFIX": "a",
+ },
+ },
+ "rust-library": {
+ "defines": {},
+ "substs": {
+ "COMPILE_ENVIRONMENT": "1",
+ "RUST_TARGET": "x86_64-unknown-linux-gnu",
+ "LIB_PREFIX": "lib",
+ "LIB_SUFFIX": "a",
+ },
+ },
+ "host-rust-library": {
+ "defines": {},
+ "substs": {
+ "COMPILE_ENVIRONMENT": "1",
+ "RUST_HOST_TARGET": "x86_64-unknown-linux-gnu",
+ "RUST_TARGET": "armv7-linux-androideabi",
+ "LIB_PREFIX": "lib",
+ "LIB_SUFFIX": "a",
+ },
+ },
+ "host-rust-library-features": {
+ "defines": {},
+ "substs": {
+ "COMPILE_ENVIRONMENT": "1",
+ "RUST_HOST_TARGET": "x86_64-unknown-linux-gnu",
+ "RUST_TARGET": "armv7-linux-androideabi",
+ "LIB_PREFIX": "lib",
+ "LIB_SUFFIX": "a",
+ },
+ },
+ "rust-library-features": {
+ "defines": {},
+ "substs": {
+ "COMPILE_ENVIRONMENT": "1",
+ "RUST_TARGET": "x86_64-unknown-linux-gnu",
+ "LIB_PREFIX": "lib",
+ "LIB_SUFFIX": "a",
+ },
+ },
+ "rust-programs": {
+ "defines": {},
+ "substs": {
+ "COMPILE_ENVIRONMENT": "1",
+ "RUST_TARGET": "i686-pc-windows-msvc",
+ "RUST_HOST_TARGET": "i686-pc-windows-msvc",
+ "BIN_SUFFIX": ".exe",
+ "HOST_BIN_SUFFIX": ".exe",
+ },
+ },
+ "test-support-binaries-tracked": {
+ "defines": {},
+ "substs": {
+ "COMPILE_ENVIRONMENT": "1",
+ "LIB_SUFFIX": "dll",
+ "BIN_SUFFIX": ".exe",
+ },
+ },
+ "sources": {
+ "defines": {},
+ "substs": {
+ "LIB_PREFIX": "lib",
+ "LIB_SUFFIX": "a",
+ },
+ },
+ "stub0": {
+ "defines": {
+ "MOZ_TRUE_1": "1",
+ "MOZ_TRUE_2": "1",
+ },
+ "substs": {
+ "MOZ_FOO": "foo",
+ "MOZ_BAR": "bar",
+ },
+ },
+ "substitute_config_files": {
+ "defines": {},
+ "substs": {
+ "MOZ_FOO": "foo",
+ "MOZ_BAR": "bar",
+ },
+ },
+ "test_config": {
+ "defines": {
+ "foo": "baz qux",
+ "baz": 1,
+ },
+ "substs": {
+ "foo": "bar baz",
+ },
+ },
+ "visual-studio": {
+ "defines": {},
+ "substs": {
+ "MOZ_APP_NAME": "my_app",
+ },
+ },
+ "prog-lib-c-only": {
+ "defines": {},
+ "substs": {
+ "COMPILE_ENVIRONMENT": "1",
+ "LIB_SUFFIX": ".a",
+ "BIN_SUFFIX": "",
+ },
+ },
+ "gn-processor": {
+ "defines": {},
+ "substs": {
+ "BUILD_BACKENDS": [
+ "GnMozbuildWriter",
+ "RecursiveMake",
+ ],
+ "COMPILE_ENVIRONMENT": "1",
+ "STL_FLAGS": [],
+ "RUST_TARGET": "x86_64-unknown-linux-gnu",
+ "LIB_PREFIX": "lib",
+ "LIB_SUFFIX": "a",
+ "OS_TARGET": "Darwin",
+ },
+ },
+ "ipdl_sources": {
+ "defines": {},
+ "substs": {
+ "COMPILE_ENVIRONMENT": "1",
+ "LIB_SUFFIX": ".a",
+ "BIN_SUFFIX": "",
+ },
+ },
+ "program-paths": {
+ "defines": {},
+ "substs": {
+ "COMPILE_ENVIRONMENT": "1",
+ "BIN_SUFFIX": ".prog",
+ },
+ },
+ "linkage": {
+ "defines": {},
+ "substs": {
+ "CC_TYPE": "clang",
+ "COMPILE_ENVIRONMENT": "1",
+ "LIB_SUFFIX": "a",
+ "BIN_SUFFIX": ".exe",
+ "DLL_SUFFIX": ".so",
+ "OBJ_SUFFIX": "o",
+ "EXPAND_LIBS_LIST_STYLE": "list",
+ },
+ },
+ },
+)
+
+
+class BackendTester(unittest.TestCase):
+ def setUp(self):
+ self._old_env = dict(os.environ)
+ os.environ.pop("MOZ_OBJDIR", None)
+
+ def tearDown(self):
+ os.environ.clear()
+ os.environ.update(self._old_env)
+
+ def _get_environment(self, name):
+ """Obtain a new instance of a ConfigEnvironment for a known profile.
+
+ A new temporary object directory is created for the environment. The
+ environment is cleaned up automatically when the test finishes.
+ """
+ config = CONFIGS[name]
+ config["substs"]["MOZ_UI_LOCALE"] = "en-US"
+
+ srcdir = mozpath.join(test_data_path, name)
+ config["substs"]["top_srcdir"] = srcdir
+
+ # Create the objdir in the srcdir to ensure that they share the
+ # same drive on Windows.
+ objdir = mkdtemp(dir=srcdir)
+ self.addCleanup(rmtree, objdir)
+
+ return ConfigEnvironment(srcdir, objdir, **config)
+
+ def _emit(self, name, env=None):
+ env = env or self._get_environment(name)
+ reader = BuildReader(env)
+ emitter = TreeMetadataEmitter(env)
+
+ return env, emitter.emit(reader.read_topsrcdir())
+
+ def _consume(self, name, cls, env=None):
+ env, objs = self._emit(name, env=env)
+ backend = cls(env)
+ backend.consume(objs)
+
+ return env
+
+ def _tree_paths(self, topdir, filename):
+ for dirpath, dirnames, filenames in os.walk(topdir):
+ for f in filenames:
+ if f == filename:
+ yield mozpath.relpath(mozpath.join(dirpath, f), topdir)
+
+ def _mozbuild_paths(self, env):
+ return self._tree_paths(env.topsrcdir, "moz.build")
+
+ def _makefile_in_paths(self, env):
+ return self._tree_paths(env.topsrcdir, "Makefile.in")
+
+
+__all__ = ["BackendTester"]
diff --git a/python/mozbuild/mozbuild/test/backend/data/build/app/moz.build b/python/mozbuild/mozbuild/test/backend/data/build/app/moz.build
new file mode 100644
index 0000000000..27641b2080
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/build/app/moz.build
@@ -0,0 +1,54 @@
+DIST_SUBDIR = "app"
+
+EXTRA_JS_MODULES += [
+ "../foo.jsm",
+]
+
+EXTRA_JS_MODULES.child += [
+ "../bar.jsm",
+]
+
+EXTRA_PP_JS_MODULES += [
+ "../baz.jsm",
+]
+
+EXTRA_PP_JS_MODULES.child2 += [
+ "../qux.jsm",
+]
+
+FINAL_TARGET_FILES += [
+ "../foo.ini",
+]
+
+FINAL_TARGET_FILES.child += [
+ "../bar.ini",
+]
+
+FINAL_TARGET_PP_FILES += [
+ "../baz.ini",
+ "../foo.css",
+]
+
+FINAL_TARGET_PP_FILES.child2 += [
+ "../qux.ini",
+]
+
+EXTRA_COMPONENTS += [
+ "../components.manifest",
+ "../foo.js",
+]
+
+EXTRA_PP_COMPONENTS += [
+ "../bar.js",
+]
+
+JS_PREFERENCE_FILES += [
+ "../prefs.js",
+]
+
+JAR_MANIFESTS += [
+ "../jar.mn",
+]
+
+DEFINES["FOO"] = "bar"
+DEFINES["BAR"] = True
diff --git a/python/mozbuild/mozbuild/test/backend/data/build/bar.ini b/python/mozbuild/mozbuild/test/backend/data/build/bar.ini
new file mode 100644
index 0000000000..91dcbe1536
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/build/bar.ini
@@ -0,0 +1 @@
+bar.ini
diff --git a/python/mozbuild/mozbuild/test/backend/data/build/bar.js b/python/mozbuild/mozbuild/test/backend/data/build/bar.js
new file mode 100644
index 0000000000..1a608e8a56
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/build/bar.js
@@ -0,0 +1,2 @@
+#filter substitution
+bar.js: FOO is @FOO@
diff --git a/python/mozbuild/mozbuild/test/backend/data/build/bar.jsm b/python/mozbuild/mozbuild/test/backend/data/build/bar.jsm
new file mode 100644
index 0000000000..05db2e2f6a
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/build/bar.jsm
@@ -0,0 +1 @@
+bar.jsm
diff --git a/python/mozbuild/mozbuild/test/backend/data/build/baz.ini b/python/mozbuild/mozbuild/test/backend/data/build/baz.ini
new file mode 100644
index 0000000000..975a1e437d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/build/baz.ini
@@ -0,0 +1,2 @@
+#filter substitution
+baz.ini: FOO is @FOO@
diff --git a/python/mozbuild/mozbuild/test/backend/data/build/baz.jsm b/python/mozbuild/mozbuild/test/backend/data/build/baz.jsm
new file mode 100644
index 0000000000..f39ed02082
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/build/baz.jsm
@@ -0,0 +1,2 @@
+#filter substitution
+baz.jsm: FOO is @FOO@
diff --git a/python/mozbuild/mozbuild/test/backend/data/build/components.manifest b/python/mozbuild/mozbuild/test/backend/data/build/components.manifest
new file mode 100644
index 0000000000..b5bb87254c
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/build/components.manifest
@@ -0,0 +1,2 @@
+component {foo} foo.js
+component {bar} bar.js
diff --git a/python/mozbuild/mozbuild/test/backend/data/build/foo.css b/python/mozbuild/mozbuild/test/backend/data/build/foo.css
new file mode 100644
index 0000000000..1803d6c572
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/build/foo.css
@@ -0,0 +1,2 @@
+%filter substitution
+foo.css: FOO is @FOO@
diff --git a/python/mozbuild/mozbuild/test/backend/data/build/foo.ini b/python/mozbuild/mozbuild/test/backend/data/build/foo.ini
new file mode 100644
index 0000000000..c93c9d7658
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/build/foo.ini
@@ -0,0 +1 @@
+foo.ini
diff --git a/python/mozbuild/mozbuild/test/backend/data/build/foo.js b/python/mozbuild/mozbuild/test/backend/data/build/foo.js
new file mode 100644
index 0000000000..4fa71e2d27
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/build/foo.js
@@ -0,0 +1 @@
+foo.js
diff --git a/python/mozbuild/mozbuild/test/backend/data/build/foo.jsm b/python/mozbuild/mozbuild/test/backend/data/build/foo.jsm
new file mode 100644
index 0000000000..d58fd61c16
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/build/foo.jsm
@@ -0,0 +1 @@
+foo.jsm
diff --git a/python/mozbuild/mozbuild/test/backend/data/build/jar.mn b/python/mozbuild/mozbuild/test/backend/data/build/jar.mn
new file mode 100644
index 0000000000..393055c4ea
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/build/jar.mn
@@ -0,0 +1,11 @@
+foo.jar:
+% content bar %child/
+% content foo %
+ foo.js
+* foo.css
+ bar.js (subdir/bar.js)
+ qux.js (subdir/bar.js)
+* child/hoge.js (bar.js)
+* child/baz.jsm
+
+% override chrome://foo/bar.svg#hello chrome://bar/bar.svg#hello
diff --git a/python/mozbuild/mozbuild/test/backend/data/build/moz.build b/python/mozbuild/mozbuild/test/backend/data/build/moz.build
new file mode 100644
index 0000000000..700516754d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/build/moz.build
@@ -0,0 +1,68 @@
+CONFIGURE_SUBST_FILES += [
+ "/config/autoconf.mk",
+ "/config/emptyvars.mk",
+]
+
+EXTRA_JS_MODULES += [
+ "foo.jsm",
+]
+
+EXTRA_JS_MODULES.child += [
+ "bar.jsm",
+]
+
+EXTRA_PP_JS_MODULES += [
+ "baz.jsm",
+]
+
+EXTRA_PP_JS_MODULES.child2 += [
+ "qux.jsm",
+]
+
+FINAL_TARGET_FILES += [
+ "foo.ini",
+]
+
+FINAL_TARGET_FILES.child += [
+ "bar.ini",
+]
+
+FINAL_TARGET_PP_FILES += [
+ "baz.ini",
+]
+
+FINAL_TARGET_PP_FILES.child2 += [
+ "foo.css",
+ "qux.ini",
+]
+
+EXTRA_COMPONENTS += [
+ "components.manifest",
+ "foo.js",
+]
+
+EXTRA_PP_COMPONENTS += [
+ "bar.js",
+]
+
+JS_PREFERENCE_FILES += [
+ "prefs.js",
+]
+
+RESOURCE_FILES += [
+ "resource",
+]
+
+RESOURCE_FILES.child += [
+ "resource2",
+]
+
+DEFINES["FOO"] = "foo"
+
+JAR_MANIFESTS += [
+ "jar.mn",
+]
+
+DIRS += [
+ "app",
+]
diff --git a/python/mozbuild/mozbuild/test/backend/data/build/prefs.js b/python/mozbuild/mozbuild/test/backend/data/build/prefs.js
new file mode 100644
index 0000000000..a030da9fd7
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/build/prefs.js
@@ -0,0 +1 @@
+prefs.js
diff --git a/python/mozbuild/mozbuild/test/backend/data/build/qux.ini b/python/mozbuild/mozbuild/test/backend/data/build/qux.ini
new file mode 100644
index 0000000000..3ce157eb6d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/build/qux.ini
@@ -0,0 +1,5 @@
+#ifdef BAR
+qux.ini: BAR is defined
+#else
+qux.ini: BAR is not defined
+#endif
diff --git a/python/mozbuild/mozbuild/test/backend/data/build/qux.jsm b/python/mozbuild/mozbuild/test/backend/data/build/qux.jsm
new file mode 100644
index 0000000000..9c5fe28d58
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/build/qux.jsm
@@ -0,0 +1,5 @@
+#ifdef BAR
+qux.jsm: BAR is defined
+#else
+qux.jsm: BAR is not defined
+#endif
diff --git a/python/mozbuild/mozbuild/test/backend/data/build/resource b/python/mozbuild/mozbuild/test/backend/data/build/resource
new file mode 100644
index 0000000000..91e75c679e
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/build/resource
@@ -0,0 +1 @@
+resource
diff --git a/python/mozbuild/mozbuild/test/backend/data/build/resource2 b/python/mozbuild/mozbuild/test/backend/data/build/resource2
new file mode 100644
index 0000000000..b7c2700964
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/build/resource2
@@ -0,0 +1 @@
+resource2
diff --git a/python/mozbuild/mozbuild/test/backend/data/build/subdir/bar.js b/python/mozbuild/mozbuild/test/backend/data/build/subdir/bar.js
new file mode 100644
index 0000000000..80c887a84a
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/build/subdir/bar.js
@@ -0,0 +1 @@
+bar.js
diff --git a/python/mozbuild/mozbuild/test/backend/data/database/bar.c b/python/mozbuild/mozbuild/test/backend/data/database/bar.c
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/database/bar.c
diff --git a/python/mozbuild/mozbuild/test/backend/data/database/baz.cpp b/python/mozbuild/mozbuild/test/backend/data/database/baz.cpp
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/database/baz.cpp
diff --git a/python/mozbuild/mozbuild/test/backend/data/database/build/non-unified-compat b/python/mozbuild/mozbuild/test/backend/data/database/build/non-unified-compat
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/database/build/non-unified-compat
diff --git a/python/mozbuild/mozbuild/test/backend/data/database/foo.c b/python/mozbuild/mozbuild/test/backend/data/database/foo.c
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/database/foo.c
diff --git a/python/mozbuild/mozbuild/test/backend/data/database/moz.build b/python/mozbuild/mozbuild/test/backend/data/database/moz.build
new file mode 100644
index 0000000000..ebc5d05b5c
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/database/moz.build
@@ -0,0 +1,14 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def Library(name):
+ """Template for libraries."""
+ LIBRARY_NAME = name
+
+
+Library("dummy")
+
+SOURCES = ["bar.c", "baz.cpp", "foo.c", "qux.cpp"]
diff --git a/python/mozbuild/mozbuild/test/backend/data/database/qux.cpp b/python/mozbuild/mozbuild/test/backend/data/database/qux.cpp
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/database/qux.cpp
diff --git a/python/mozbuild/mozbuild/test/backend/data/defines/moz.build b/python/mozbuild/mozbuild/test/backend/data/defines/moz.build
new file mode 100644
index 0000000000..b603cac3ff
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/defines/moz.build
@@ -0,0 +1,9 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+value = "xyz"
+DEFINES["FOO"] = True
+DEFINES["BAZ"] = '"ab\'cd"'
+DEFINES["QUX"] = False
+DEFINES["BAR"] = 7
+DEFINES["VALUE"] = value
diff --git a/python/mozbuild/mozbuild/test/backend/data/dist-files/install.rdf b/python/mozbuild/mozbuild/test/backend/data/dist-files/install.rdf
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/dist-files/install.rdf
diff --git a/python/mozbuild/mozbuild/test/backend/data/dist-files/main.js b/python/mozbuild/mozbuild/test/backend/data/dist-files/main.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/dist-files/main.js
diff --git a/python/mozbuild/mozbuild/test/backend/data/dist-files/moz.build b/python/mozbuild/mozbuild/test/backend/data/dist-files/moz.build
new file mode 100644
index 0000000000..25961f149f
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/dist-files/moz.build
@@ -0,0 +1,8 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+FINAL_TARGET_PP_FILES += [
+ "install.rdf",
+ "main.js",
+]
diff --git a/python/mozbuild/mozbuild/test/backend/data/exports-generated/dom1.h b/python/mozbuild/mozbuild/test/backend/data/exports-generated/dom1.h
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/exports-generated/dom1.h
diff --git a/python/mozbuild/mozbuild/test/backend/data/exports-generated/foo.h b/python/mozbuild/mozbuild/test/backend/data/exports-generated/foo.h
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/exports-generated/foo.h
diff --git a/python/mozbuild/mozbuild/test/backend/data/exports-generated/gfx.h b/python/mozbuild/mozbuild/test/backend/data/exports-generated/gfx.h
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/exports-generated/gfx.h
diff --git a/python/mozbuild/mozbuild/test/backend/data/exports-generated/moz.build b/python/mozbuild/mozbuild/test/backend/data/exports-generated/moz.build
new file mode 100644
index 0000000000..44c31a3d9c
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/exports-generated/moz.build
@@ -0,0 +1,12 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+EXPORTS += ["!bar.h", "foo.h"]
+EXPORTS.mozilla += ["!mozilla2.h", "mozilla1.h"]
+EXPORTS.mozilla.dom += ["!dom2.h", "!dom3.h", "dom1.h"]
+EXPORTS.gfx += ["gfx.h"]
+
+GENERATED_FILES += ["bar.h"]
+GENERATED_FILES += ["mozilla2.h"]
+GENERATED_FILES += ["dom2.h"]
+GENERATED_FILES += ["dom3.h"]
diff --git a/python/mozbuild/mozbuild/test/backend/data/exports-generated/mozilla1.h b/python/mozbuild/mozbuild/test/backend/data/exports-generated/mozilla1.h
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/exports-generated/mozilla1.h
diff --git a/python/mozbuild/mozbuild/test/backend/data/exports/dom1.h b/python/mozbuild/mozbuild/test/backend/data/exports/dom1.h
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/exports/dom1.h
diff --git a/python/mozbuild/mozbuild/test/backend/data/exports/dom2.h b/python/mozbuild/mozbuild/test/backend/data/exports/dom2.h
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/exports/dom2.h
diff --git a/python/mozbuild/mozbuild/test/backend/data/exports/foo.h b/python/mozbuild/mozbuild/test/backend/data/exports/foo.h
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/exports/foo.h
diff --git a/python/mozbuild/mozbuild/test/backend/data/exports/gfx.h b/python/mozbuild/mozbuild/test/backend/data/exports/gfx.h
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/exports/gfx.h
diff --git a/python/mozbuild/mozbuild/test/backend/data/exports/moz.build b/python/mozbuild/mozbuild/test/backend/data/exports/moz.build
new file mode 100644
index 0000000000..371f26f572
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/exports/moz.build
@@ -0,0 +1,8 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+EXPORTS += ["foo.h"]
+EXPORTS.mozilla += ["mozilla1.h", "mozilla2.h"]
+EXPORTS.mozilla.dom += ["dom1.h", "dom2.h"]
+EXPORTS.mozilla.gfx += ["gfx.h"]
+EXPORTS.nspr.private += ["pprio.h"]
diff --git a/python/mozbuild/mozbuild/test/backend/data/exports/mozilla1.h b/python/mozbuild/mozbuild/test/backend/data/exports/mozilla1.h
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/exports/mozilla1.h
diff --git a/python/mozbuild/mozbuild/test/backend/data/exports/mozilla2.h b/python/mozbuild/mozbuild/test/backend/data/exports/mozilla2.h
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/exports/mozilla2.h
diff --git a/python/mozbuild/mozbuild/test/backend/data/exports/pprio.h b/python/mozbuild/mozbuild/test/backend/data/exports/pprio.h
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/exports/pprio.h
diff --git a/python/mozbuild/mozbuild/test/backend/data/final-target-files-wildcard/bar.xyz b/python/mozbuild/mozbuild/test/backend/data/final-target-files-wildcard/bar.xyz
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/final-target-files-wildcard/bar.xyz
diff --git a/python/mozbuild/mozbuild/test/backend/data/final-target-files-wildcard/foo.xyz b/python/mozbuild/mozbuild/test/backend/data/final-target-files-wildcard/foo.xyz
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/final-target-files-wildcard/foo.xyz
diff --git a/python/mozbuild/mozbuild/test/backend/data/final-target-files-wildcard/moz.build b/python/mozbuild/mozbuild/test/backend/data/final-target-files-wildcard/moz.build
new file mode 100644
index 0000000000..d665855234
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/final-target-files-wildcard/moz.build
@@ -0,0 +1,5 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+FINAL_TARGET_FILES.foo += ["*.xyz"]
diff --git a/python/mozbuild/mozbuild/test/backend/data/final_target/both/moz.build b/python/mozbuild/mozbuild/test/backend/data/final_target/both/moz.build
new file mode 100644
index 0000000000..dfbda9183b
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/final_target/both/moz.build
@@ -0,0 +1,6 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+XPI_NAME = "mycrazyxpi"
+DIST_SUBDIR = "asubdir"
diff --git a/python/mozbuild/mozbuild/test/backend/data/final_target/dist-subdir/moz.build b/python/mozbuild/mozbuild/test/backend/data/final_target/dist-subdir/moz.build
new file mode 100644
index 0000000000..e44dd197ad
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/final_target/dist-subdir/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIST_SUBDIR = "asubdir"
diff --git a/python/mozbuild/mozbuild/test/backend/data/final_target/final-target/moz.build b/python/mozbuild/mozbuild/test/backend/data/final_target/final-target/moz.build
new file mode 100644
index 0000000000..e008f94478
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/final_target/final-target/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+FINAL_TARGET = "random-final-target"
diff --git a/python/mozbuild/mozbuild/test/backend/data/final_target/moz.build b/python/mozbuild/mozbuild/test/backend/data/final_target/moz.build
new file mode 100644
index 0000000000..319062b78f
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/final_target/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS += ["xpi-name", "dist-subdir", "both", "final-target"]
diff --git a/python/mozbuild/mozbuild/test/backend/data/final_target/xpi-name/moz.build b/python/mozbuild/mozbuild/test/backend/data/final_target/xpi-name/moz.build
new file mode 100644
index 0000000000..980810caa3
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/final_target/xpi-name/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+XPI_NAME = "mycrazyxpi"
diff --git a/python/mozbuild/mozbuild/test/backend/data/generated-files-force/foo-data b/python/mozbuild/mozbuild/test/backend/data/generated-files-force/foo-data
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/generated-files-force/foo-data
diff --git a/python/mozbuild/mozbuild/test/backend/data/generated-files-force/generate-bar.py b/python/mozbuild/mozbuild/test/backend/data/generated-files-force/generate-bar.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/generated-files-force/generate-bar.py
diff --git a/python/mozbuild/mozbuild/test/backend/data/generated-files-force/generate-foo.py b/python/mozbuild/mozbuild/test/backend/data/generated-files-force/generate-foo.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/generated-files-force/generate-foo.py
diff --git a/python/mozbuild/mozbuild/test/backend/data/generated-files-force/moz.build b/python/mozbuild/mozbuild/test/backend/data/generated-files-force/moz.build
new file mode 100644
index 0000000000..d86b7b09ea
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/generated-files-force/moz.build
@@ -0,0 +1,14 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+GENERATED_FILES += ["bar.c", "foo.c", "quux.c"]
+
+bar = GENERATED_FILES["bar.c"]
+bar.script = "generate-bar.py:baz"
+bar.force = True
+
+foo = GENERATED_FILES["foo.c"]
+foo.script = "generate-foo.py"
+foo.inputs = ["foo-data"]
+foo.force = False
diff --git a/python/mozbuild/mozbuild/test/backend/data/generated-files/foo-data b/python/mozbuild/mozbuild/test/backend/data/generated-files/foo-data
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/generated-files/foo-data
diff --git a/python/mozbuild/mozbuild/test/backend/data/generated-files/generate-bar.py b/python/mozbuild/mozbuild/test/backend/data/generated-files/generate-bar.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/generated-files/generate-bar.py
diff --git a/python/mozbuild/mozbuild/test/backend/data/generated-files/generate-foo.py b/python/mozbuild/mozbuild/test/backend/data/generated-files/generate-foo.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/generated-files/generate-foo.py
diff --git a/python/mozbuild/mozbuild/test/backend/data/generated-files/moz.build b/python/mozbuild/mozbuild/test/backend/data/generated-files/moz.build
new file mode 100644
index 0000000000..01b444238e
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/generated-files/moz.build
@@ -0,0 +1,12 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+GENERATED_FILES += ["bar.c", "foo.h", "quux.c"]
+
+bar = GENERATED_FILES["bar.c"]
+bar.script = "generate-bar.py:baz"
+
+foo = GENERATED_FILES["foo.h"]
+foo.script = "generate-foo.py"
+foo.inputs = ["foo-data"]
diff --git a/python/mozbuild/mozbuild/test/backend/data/generated_includes/moz.build b/python/mozbuild/mozbuild/test/backend/data/generated_includes/moz.build
new file mode 100644
index 0000000000..31f9042c0a
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/generated_includes/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+LOCAL_INCLUDES += ["!/bar/baz", "!foo"]
diff --git a/python/mozbuild/mozbuild/test/backend/data/host-defines/moz.build b/python/mozbuild/mozbuild/test/backend/data/host-defines/moz.build
new file mode 100644
index 0000000000..f1a632c841
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/host-defines/moz.build
@@ -0,0 +1,9 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+value = "xyz"
+HOST_DEFINES["FOO"] = True
+HOST_DEFINES["BAZ"] = '"ab\'cd"'
+HOST_DEFINES["BAR"] = 7
+HOST_DEFINES["VALUE"] = value
+HOST_DEFINES["QUX"] = False
diff --git a/python/mozbuild/mozbuild/test/backend/data/host-rust-library-features/Cargo.toml b/python/mozbuild/mozbuild/test/backend/data/host-rust-library-features/Cargo.toml
new file mode 100644
index 0000000000..147cb3acb3
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/host-rust-library-features/Cargo.toml
@@ -0,0 +1,13 @@
+[package]
+name = "hostrusttool"
+version = "0.1.0"
+authors = ["The Mozilla Project Developers"]
+
+[lib]
+crate-type = ["staticlib"]
+
+[profile.dev]
+panic = "abort"
+
+[profile.release]
+panic = "abort"
diff --git a/python/mozbuild/mozbuild/test/backend/data/host-rust-library-features/moz.build b/python/mozbuild/mozbuild/test/backend/data/host-rust-library-features/moz.build
new file mode 100644
index 0000000000..96fccf2063
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/host-rust-library-features/moz.build
@@ -0,0 +1,22 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def HostLibrary(name):
+ """Template for libraries."""
+ HOST_LIBRARY_NAME = name
+
+
+@template
+def HostRustLibrary(name, features=None):
+ """Template for Rust libraries."""
+ HostLibrary(name)
+
+ IS_RUST_LIBRARY = True
+
+ if features:
+ HOST_RUST_LIBRARY_FEATURES = features
+
+
+HostRustLibrary("hostrusttool", ["musthave", "cantlivewithout"])
diff --git a/python/mozbuild/mozbuild/test/backend/data/host-rust-library/Cargo.toml b/python/mozbuild/mozbuild/test/backend/data/host-rust-library/Cargo.toml
new file mode 100644
index 0000000000..349664c621
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/host-rust-library/Cargo.toml
@@ -0,0 +1,15 @@
+[package]
+name = "hostrusttool"
+version = "0.1.0"
+authors = [
+ "The Mozilla Project Developers",
+]
+
+[lib]
+crate-type = ["staticlib"]
+
+[profile.dev]
+panic = "abort"
+
+[profile.release]
+panic = "abort"
diff --git a/python/mozbuild/mozbuild/test/backend/data/host-rust-library/moz.build b/python/mozbuild/mozbuild/test/backend/data/host-rust-library/moz.build
new file mode 100644
index 0000000000..515f5d1a9f
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/host-rust-library/moz.build
@@ -0,0 +1,22 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def HostLibrary(name):
+ """Template for libraries."""
+ HOST_LIBRARY_NAME = name
+
+
+@template
+def HostRustLibrary(name, features=None):
+ """Template for Rust libraries."""
+ HostLibrary(name)
+
+ IS_RUST_LIBRARY = True
+
+ if features:
+ HOST_RUST_LIBRARY_FEATURES = features
+
+
+HostRustLibrary("hostrusttool")
diff --git a/python/mozbuild/mozbuild/test/backend/data/install_substitute_config_files/moz.build b/python/mozbuild/mozbuild/test/backend/data/install_substitute_config_files/moz.build
new file mode 100644
index 0000000000..c38b472911
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/install_substitute_config_files/moz.build
@@ -0,0 +1,6 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+# We want to test recursion into the subdir, so do the real work in 'sub'
+DIRS += ["sub"]
diff --git a/python/mozbuild/mozbuild/test/backend/data/install_substitute_config_files/sub/foo.h.in b/python/mozbuild/mozbuild/test/backend/data/install_substitute_config_files/sub/foo.h.in
new file mode 100644
index 0000000000..da287dfcaa
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/install_substitute_config_files/sub/foo.h.in
@@ -0,0 +1 @@
+#define MOZ_FOO @MOZ_FOO@
diff --git a/python/mozbuild/mozbuild/test/backend/data/install_substitute_config_files/sub/moz.build b/python/mozbuild/mozbuild/test/backend/data/install_substitute_config_files/sub/moz.build
new file mode 100644
index 0000000000..1420a99a8f
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/install_substitute_config_files/sub/moz.build
@@ -0,0 +1,7 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+CONFIGURE_SUBST_FILES = ["foo.h"]
+
+EXPORTS.out += ["!foo.h"]
diff --git a/python/mozbuild/mozbuild/test/backend/data/ipdl_sources/bar/moz.build b/python/mozbuild/mozbuild/test/backend/data/ipdl_sources/bar/moz.build
new file mode 100644
index 0000000000..f7d1560af3
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/ipdl_sources/bar/moz.build
@@ -0,0 +1,16 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+PREPROCESSED_IPDL_SOURCES += [
+ "bar1.ipdl",
+]
+
+IPDL_SOURCES += [
+ "bar.ipdl",
+ "bar2.ipdlh",
+]
+
+FINAL_LIBRARY = "dummy"
diff --git a/python/mozbuild/mozbuild/test/backend/data/ipdl_sources/foo/moz.build b/python/mozbuild/mozbuild/test/backend/data/ipdl_sources/foo/moz.build
new file mode 100644
index 0000000000..02e9f78154
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/ipdl_sources/foo/moz.build
@@ -0,0 +1,16 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+PREPROCESSED_IPDL_SOURCES += [
+ "foo1.ipdl",
+]
+
+IPDL_SOURCES += [
+ "foo.ipdl",
+ "foo2.ipdlh",
+]
+
+FINAL_LIBRARY = "dummy"
diff --git a/python/mozbuild/mozbuild/test/backend/data/ipdl_sources/ipdl/moz.build b/python/mozbuild/mozbuild/test/backend/data/ipdl_sources/ipdl/moz.build
new file mode 100644
index 0000000000..066397cb84
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/ipdl_sources/ipdl/moz.build
@@ -0,0 +1,9 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This file just exists to establish a directory as the IPDL root directory.
+
+FINAL_LIBRARY = "dummy"
diff --git a/python/mozbuild/mozbuild/test/backend/data/ipdl_sources/moz.build b/python/mozbuild/mozbuild/test/backend/data/ipdl_sources/moz.build
new file mode 100644
index 0000000000..4f0ddaa10e
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/ipdl_sources/moz.build
@@ -0,0 +1,19 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+@template
+def Library(name):
+ LIBRARY_NAME = name
+
+
+Library("dummy")
+
+DIRS += [
+ "bar",
+ "foo",
+ "ipdl",
+]
diff --git a/python/mozbuild/mozbuild/test/backend/data/jar-manifests/moz.build b/python/mozbuild/mozbuild/test/backend/data/jar-manifests/moz.build
new file mode 100644
index 0000000000..d988c0ff9b
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/jar-manifests/moz.build
@@ -0,0 +1,7 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+JAR_MANIFESTS += ["jar.mn"]
diff --git a/python/mozbuild/mozbuild/test/backend/data/linkage/moz.build b/python/mozbuild/mozbuild/test/backend/data/linkage/moz.build
new file mode 100644
index 0000000000..f01a012760
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/linkage/moz.build
@@ -0,0 +1,11 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+include("templates.mozbuild")
+
+DIRS += [
+ "real",
+ "shared",
+ "prog",
+ "static",
+]
diff --git a/python/mozbuild/mozbuild/test/backend/data/linkage/prog/moz.build b/python/mozbuild/mozbuild/test/backend/data/linkage/prog/moz.build
new file mode 100644
index 0000000000..3741f4be09
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/linkage/prog/moz.build
@@ -0,0 +1,11 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS += ["qux"]
+
+Program("MyProgram")
+
+USE_LIBS += [
+ "bar",
+ "baz",
+]
diff --git a/python/mozbuild/mozbuild/test/backend/data/linkage/prog/qux/moz.build b/python/mozbuild/mozbuild/test/backend/data/linkage/prog/qux/moz.build
new file mode 100644
index 0000000000..3152de6211
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/linkage/prog/qux/moz.build
@@ -0,0 +1,6 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+SOURCES += ["qux1.c"]
+
+SharedLibrary("qux")
diff --git a/python/mozbuild/mozbuild/test/backend/data/linkage/prog/qux/qux1.c b/python/mozbuild/mozbuild/test/backend/data/linkage/prog/qux/qux1.c
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/linkage/prog/qux/qux1.c
diff --git a/python/mozbuild/mozbuild/test/backend/data/linkage/real/foo/foo1.c b/python/mozbuild/mozbuild/test/backend/data/linkage/real/foo/foo1.c
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/linkage/real/foo/foo1.c
diff --git a/python/mozbuild/mozbuild/test/backend/data/linkage/real/foo/foo2.c b/python/mozbuild/mozbuild/test/backend/data/linkage/real/foo/foo2.c
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/linkage/real/foo/foo2.c
diff --git a/python/mozbuild/mozbuild/test/backend/data/linkage/real/foo/moz.build b/python/mozbuild/mozbuild/test/backend/data/linkage/real/foo/moz.build
new file mode 100644
index 0000000000..a0bd7526e6
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/linkage/real/foo/moz.build
@@ -0,0 +1,6 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+SOURCES += ["foo1.c", "foo2.c"]
+
+FINAL_LIBRARY = "foo"
diff --git a/python/mozbuild/mozbuild/test/backend/data/linkage/real/moz.build b/python/mozbuild/mozbuild/test/backend/data/linkage/real/moz.build
new file mode 100644
index 0000000000..32f9c1d656
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/linkage/real/moz.build
@@ -0,0 +1,14 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS += [
+ "foo",
+]
+
+NO_EXPAND_LIBS = True
+
+OS_LIBS += ["-lbaz"]
+
+USE_LIBS += ["static:baz"]
+
+Library("foo")
diff --git a/python/mozbuild/mozbuild/test/backend/data/linkage/shared/baz/baz1.c b/python/mozbuild/mozbuild/test/backend/data/linkage/shared/baz/baz1.c
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/linkage/shared/baz/baz1.c
diff --git a/python/mozbuild/mozbuild/test/backend/data/linkage/shared/baz/moz.build b/python/mozbuild/mozbuild/test/backend/data/linkage/shared/baz/moz.build
new file mode 100644
index 0000000000..3299fa28f4
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/linkage/shared/baz/moz.build
@@ -0,0 +1,6 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+SOURCES += ["baz1.c"]
+
+FINAL_LIBRARY = "baz"
diff --git a/python/mozbuild/mozbuild/test/backend/data/linkage/shared/moz.build b/python/mozbuild/mozbuild/test/backend/data/linkage/shared/moz.build
new file mode 100644
index 0000000000..42d79fe1fd
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/linkage/shared/moz.build
@@ -0,0 +1,14 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS += [
+ "baz",
+]
+
+STATIC_LIBRARY_NAME = "baz_s"
+FORCE_STATIC_LIB = True
+
+OS_LIBS += ["-lfoo"]
+USE_LIBS += ["qux"]
+
+SharedLibrary("baz")
diff --git a/python/mozbuild/mozbuild/test/backend/data/linkage/static/bar/bar1.cc b/python/mozbuild/mozbuild/test/backend/data/linkage/static/bar/bar1.cc
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/linkage/static/bar/bar1.cc
diff --git a/python/mozbuild/mozbuild/test/backend/data/linkage/static/bar/bar2.cc b/python/mozbuild/mozbuild/test/backend/data/linkage/static/bar/bar2.cc
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/linkage/static/bar/bar2.cc
diff --git a/python/mozbuild/mozbuild/test/backend/data/linkage/static/bar/bar_helper/bar_helper1.cpp b/python/mozbuild/mozbuild/test/backend/data/linkage/static/bar/bar_helper/bar_helper1.cpp
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/linkage/static/bar/bar_helper/bar_helper1.cpp
diff --git a/python/mozbuild/mozbuild/test/backend/data/linkage/static/bar/bar_helper/moz.build b/python/mozbuild/mozbuild/test/backend/data/linkage/static/bar/bar_helper/moz.build
new file mode 100644
index 0000000000..12d0fc83fb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/linkage/static/bar/bar_helper/moz.build
@@ -0,0 +1,8 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+SOURCES += [
+ "bar_helper1.cpp",
+]
+
+FINAL_LIBRARY = "bar"
diff --git a/python/mozbuild/mozbuild/test/backend/data/linkage/static/bar/moz.build b/python/mozbuild/mozbuild/test/backend/data/linkage/static/bar/moz.build
new file mode 100644
index 0000000000..d9d75803ed
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/linkage/static/bar/moz.build
@@ -0,0 +1,13 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+SOURCES += [
+ "bar1.cc",
+ "bar2.cc",
+]
+
+DIRS += [
+ "bar_helper",
+]
+
+FINAL_LIBRARY = "bar"
diff --git a/python/mozbuild/mozbuild/test/backend/data/linkage/static/moz.build b/python/mozbuild/mozbuild/test/backend/data/linkage/static/moz.build
new file mode 100644
index 0000000000..37b3d96cc7
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/linkage/static/moz.build
@@ -0,0 +1,12 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS += [
+ "bar",
+]
+
+USE_LIBS += ["foo"]
+
+OS_LIBS += ["-lbar"]
+
+Library("bar")
diff --git a/python/mozbuild/mozbuild/test/backend/data/linkage/templates.mozbuild b/python/mozbuild/mozbuild/test/backend/data/linkage/templates.mozbuild
new file mode 100644
index 0000000000..1f874060df
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/linkage/templates.mozbuild
@@ -0,0 +1,23 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+@template
+def Library(name):
+ LIBRARY_NAME = name
+
+@template
+def SharedLibrary(name):
+ FORCE_SHARED_LIB = True
+ LIBRARY_NAME = name
+
+@template
+def Binary():
+ # Add -lfoo for testing purposes.
+ OS_LIBS += ['foo']
+
+
+@template
+def Program(name):
+ PROGRAM = name
+
+ Binary() \ No newline at end of file
diff --git a/python/mozbuild/mozbuild/test/backend/data/local_includes/bar/baz/dummy_file_for_nonempty_directory b/python/mozbuild/mozbuild/test/backend/data/local_includes/bar/baz/dummy_file_for_nonempty_directory
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/local_includes/bar/baz/dummy_file_for_nonempty_directory
diff --git a/python/mozbuild/mozbuild/test/backend/data/local_includes/foo/dummy_file_for_nonempty_directory b/python/mozbuild/mozbuild/test/backend/data/local_includes/foo/dummy_file_for_nonempty_directory
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/local_includes/foo/dummy_file_for_nonempty_directory
diff --git a/python/mozbuild/mozbuild/test/backend/data/local_includes/moz.build b/python/mozbuild/mozbuild/test/backend/data/local_includes/moz.build
new file mode 100644
index 0000000000..1c29ac2ea2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/local_includes/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+LOCAL_INCLUDES += ["/bar/baz", "foo"]
diff --git a/python/mozbuild/mozbuild/test/backend/data/localized-files/en-US/bar.ini b/python/mozbuild/mozbuild/test/backend/data/localized-files/en-US/bar.ini
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/localized-files/en-US/bar.ini
diff --git a/python/mozbuild/mozbuild/test/backend/data/localized-files/en-US/foo.js b/python/mozbuild/mozbuild/test/backend/data/localized-files/en-US/foo.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/localized-files/en-US/foo.js
diff --git a/python/mozbuild/mozbuild/test/backend/data/localized-files/moz.build b/python/mozbuild/mozbuild/test/backend/data/localized-files/moz.build
new file mode 100644
index 0000000000..93a97c7b84
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/localized-files/moz.build
@@ -0,0 +1,9 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+LOCALIZED_FILES += [
+ "en-US/abc/*.abc",
+ "en-US/bar.ini",
+ "en-US/foo.js",
+]
diff --git a/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/en-US/localized-input b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/en-US/localized-input
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/en-US/localized-input
diff --git a/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/foo-data b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/foo-data
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/foo-data
diff --git a/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/generate-foo.py b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/generate-foo.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/generate-foo.py
diff --git a/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/inner/locales/en-US/localized-input b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/inner/locales/en-US/localized-input
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/inner/locales/en-US/localized-input
diff --git a/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/locales/en-US/localized-input b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/locales/en-US/localized-input
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/locales/en-US/localized-input
diff --git a/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/moz.build b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/moz.build
new file mode 100644
index 0000000000..2b0cf472c9
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/moz.build
@@ -0,0 +1,32 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+LOCALIZED_GENERATED_FILES += ["foo{AB_CD}.xyz"]
+
+foo = LOCALIZED_GENERATED_FILES["foo{AB_CD}.xyz"]
+foo.script = "generate-foo.py"
+foo.inputs = [
+ "en-US/localized-input",
+ "non-localized-input",
+]
+
+LOCALIZED_GENERATED_FILES += ["bar{AB_rCD}.xyz"]
+
+bar = LOCALIZED_GENERATED_FILES["bar{AB_rCD}.xyz"]
+bar.script = "generate-foo.py"
+bar.inputs = [
+ # Absolute source path.
+ "/inner/locales/en-US/localized-input",
+ "non-localized-input",
+]
+
+LOCALIZED_GENERATED_FILES += ["zot{AB_rCD}.xyz"]
+
+bar = LOCALIZED_GENERATED_FILES["zot{AB_rCD}.xyz"]
+bar.script = "generate-foo.py"
+bar.inputs = [
+ # Relative source path.
+ "locales/en-US/localized-input",
+ "non-localized-input",
+]
diff --git a/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/non-localized-input b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/non-localized-input
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-AB_CD/non-localized-input
diff --git a/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-force/en-US/localized-input b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-force/en-US/localized-input
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-force/en-US/localized-input
diff --git a/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-force/foo-data b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-force/foo-data
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-force/foo-data
diff --git a/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-force/generate-foo.py b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-force/generate-foo.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-force/generate-foo.py
diff --git a/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-force/moz.build b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-force/moz.build
new file mode 100644
index 0000000000..26fb165e06
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-force/moz.build
@@ -0,0 +1,22 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+LOCALIZED_GENERATED_FILES += ["foo.xyz"]
+
+foo = LOCALIZED_GENERATED_FILES["foo.xyz"]
+foo.script = "generate-foo.py"
+foo.inputs = [
+ "en-US/localized-input",
+ "non-localized-input",
+]
+
+LOCALIZED_GENERATED_FILES += ["abc.xyz"]
+
+abc = LOCALIZED_GENERATED_FILES["abc.xyz"]
+abc.script = "generate-foo.py"
+abc.inputs = [
+ "en-US/localized-input",
+ "non-localized-input",
+]
+abc.force = True
diff --git a/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-force/non-localized-input b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-force/non-localized-input
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files-force/non-localized-input
diff --git a/python/mozbuild/mozbuild/test/backend/data/localized-generated-files/en-US/localized-input b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files/en-US/localized-input
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files/en-US/localized-input
diff --git a/python/mozbuild/mozbuild/test/backend/data/localized-generated-files/foo-data b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files/foo-data
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files/foo-data
diff --git a/python/mozbuild/mozbuild/test/backend/data/localized-generated-files/generate-foo.py b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files/generate-foo.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files/generate-foo.py
diff --git a/python/mozbuild/mozbuild/test/backend/data/localized-generated-files/moz.build b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files/moz.build
new file mode 100644
index 0000000000..f44325dfb1
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files/moz.build
@@ -0,0 +1,15 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+LOCALIZED_GENERATED_FILES += ["foo.xyz"]
+
+foo = LOCALIZED_GENERATED_FILES["foo.xyz"]
+foo.script = "generate-foo.py"
+foo.inputs = [
+ "en-US/localized-input",
+ "non-localized-input",
+]
+
+# Also check that using it in LOCALIZED_FILES does the right thing.
+LOCALIZED_FILES += ["!foo.xyz"]
diff --git a/python/mozbuild/mozbuild/test/backend/data/localized-generated-files/non-localized-input b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files/non-localized-input
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/localized-generated-files/non-localized-input
diff --git a/python/mozbuild/mozbuild/test/backend/data/localized-pp-files/en-US/bar.ini b/python/mozbuild/mozbuild/test/backend/data/localized-pp-files/en-US/bar.ini
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/localized-pp-files/en-US/bar.ini
diff --git a/python/mozbuild/mozbuild/test/backend/data/localized-pp-files/en-US/foo.js b/python/mozbuild/mozbuild/test/backend/data/localized-pp-files/en-US/foo.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/localized-pp-files/en-US/foo.js
diff --git a/python/mozbuild/mozbuild/test/backend/data/localized-pp-files/moz.build b/python/mozbuild/mozbuild/test/backend/data/localized-pp-files/moz.build
new file mode 100644
index 0000000000..8cec207128
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/localized-pp-files/moz.build
@@ -0,0 +1,8 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+LOCALIZED_PP_FILES += [
+ "en-US/bar.ini",
+ "en-US/foo.js",
+]
diff --git a/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/c-library/c-library.c b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/c-library/c-library.c
new file mode 100644
index 0000000000..3b09e769db
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/c-library/c-library.c
@@ -0,0 +1,2 @@
+// Any copyright is dedicated to the Public Domain.
+// http://creativecommons.org/publicdomain/zero/1.0/
diff --git a/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/c-library/moz.build b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/c-library/moz.build
new file mode 100644
index 0000000000..8e15d10c43
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/c-library/moz.build
@@ -0,0 +1,7 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+SharedLibrary("c_library")
+
+SOURCES = ["c-library.c"]
diff --git a/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/c-program/c_test_program.c b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/c-program/c_test_program.c
new file mode 100644
index 0000000000..3b09e769db
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/c-program/c_test_program.c
@@ -0,0 +1,2 @@
+// Any copyright is dedicated to the Public Domain.
+// http://creativecommons.org/publicdomain/zero/1.0/
diff --git a/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/c-program/moz.build b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/c-program/moz.build
new file mode 100644
index 0000000000..27f2cd3d5d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/c-program/moz.build
@@ -0,0 +1,7 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+Program("c_test_program")
+
+SOURCES = ["c_test_program.c"]
diff --git a/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/c-simple-programs/c_simple_program.c b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/c-simple-programs/c_simple_program.c
new file mode 100644
index 0000000000..3b09e769db
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/c-simple-programs/c_simple_program.c
@@ -0,0 +1,2 @@
+// Any copyright is dedicated to the Public Domain.
+// http://creativecommons.org/publicdomain/zero/1.0/
diff --git a/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/c-simple-programs/moz.build b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/c-simple-programs/moz.build
new file mode 100644
index 0000000000..db958d1d1f
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/c-simple-programs/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+SimplePrograms(["c_simple_program"], ext=".c")
diff --git a/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-library/c-source.c b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-library/c-source.c
new file mode 100644
index 0000000000..3b09e769db
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-library/c-source.c
@@ -0,0 +1,2 @@
+// Any copyright is dedicated to the Public Domain.
+// http://creativecommons.org/publicdomain/zero/1.0/
diff --git a/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-library/cxx-library.cpp b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-library/cxx-library.cpp
new file mode 100644
index 0000000000..3b09e769db
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-library/cxx-library.cpp
@@ -0,0 +1,2 @@
+// Any copyright is dedicated to the Public Domain.
+// http://creativecommons.org/publicdomain/zero/1.0/
diff --git a/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-library/moz.build b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-library/moz.build
new file mode 100644
index 0000000000..ee75ad0cb9
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-library/moz.build
@@ -0,0 +1,10 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+SharedLibrary("cxx-library")
+
+SOURCES = [
+ "c-source.c",
+ "cxx-library.cpp",
+]
diff --git a/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-program/cxx_test_program.cpp b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-program/cxx_test_program.cpp
new file mode 100644
index 0000000000..3b09e769db
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-program/cxx_test_program.cpp
@@ -0,0 +1,2 @@
+// Any copyright is dedicated to the Public Domain.
+// http://creativecommons.org/publicdomain/zero/1.0/
diff --git a/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-program/moz.build b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-program/moz.build
new file mode 100644
index 0000000000..175f18c88a
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-program/moz.build
@@ -0,0 +1,7 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+Program("cxx_test_program")
+
+SOURCES = ["cxx_test_program.cpp"]
diff --git a/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-simple-programs/cxx_simple_program.cpp b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-simple-programs/cxx_simple_program.cpp
new file mode 100644
index 0000000000..3b09e769db
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-simple-programs/cxx_simple_program.cpp
@@ -0,0 +1,2 @@
+// Any copyright is dedicated to the Public Domain.
+// http://creativecommons.org/publicdomain/zero/1.0/
diff --git a/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-simple-programs/moz.build b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-simple-programs/moz.build
new file mode 100644
index 0000000000..e055370900
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/cxx-simple-programs/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+SimplePrograms(["cxx_simple_program"])
diff --git a/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/moz.build b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/moz.build
new file mode 100644
index 0000000000..7f0a6b430b
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/moz.build
@@ -0,0 +1,35 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS += [
+ "c-program",
+ "cxx-program",
+ "c-simple-programs",
+ "cxx-simple-programs",
+ "c-library",
+ "cxx-library",
+]
+
+
+@template
+def Program(name):
+ PROGRAM = name
+
+
+@template
+def SimplePrograms(names, ext=".cpp"):
+ SIMPLE_PROGRAMS += names
+ SOURCES += ["%s%s" % (name, ext) for name in names]
+
+
+@template
+def Library(name):
+ LIBRARY_NAME = name
+
+
+@template
+def SharedLibrary(name):
+ Library(name)
+
+ FORCE_SHARED_LIB = True
diff --git a/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/simple-programs/moz.build b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/simple-programs/moz.build
new file mode 100644
index 0000000000..62966a58e1
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/prog-lib-c-only/simple-programs/moz.build
@@ -0,0 +1,3 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
diff --git a/python/mozbuild/mozbuild/test/backend/data/program-paths/dist-bin/moz.build b/python/mozbuild/mozbuild/test/backend/data/program-paths/dist-bin/moz.build
new file mode 100644
index 0000000000..d8b952c014
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/program-paths/dist-bin/moz.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+Program("dist-bin")
diff --git a/python/mozbuild/mozbuild/test/backend/data/program-paths/dist-subdir/moz.build b/python/mozbuild/mozbuild/test/backend/data/program-paths/dist-subdir/moz.build
new file mode 100644
index 0000000000..fc2f664c01
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/program-paths/dist-subdir/moz.build
@@ -0,0 +1,5 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIST_SUBDIR = "foo"
+Program("dist-subdir")
diff --git a/python/mozbuild/mozbuild/test/backend/data/program-paths/final-target/moz.build b/python/mozbuild/mozbuild/test/backend/data/program-paths/final-target/moz.build
new file mode 100644
index 0000000000..a0d5805262
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/program-paths/final-target/moz.build
@@ -0,0 +1,5 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+FINAL_TARGET = "final/target"
+Program("final-target")
diff --git a/python/mozbuild/mozbuild/test/backend/data/program-paths/moz.build b/python/mozbuild/mozbuild/test/backend/data/program-paths/moz.build
new file mode 100644
index 0000000000..d1d087fd45
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/program-paths/moz.build
@@ -0,0 +1,15 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def Program(name):
+ PROGRAM = name
+
+
+DIRS += [
+ "dist-bin",
+ "dist-subdir",
+ "final-target",
+ "not-installed",
+]
diff --git a/python/mozbuild/mozbuild/test/backend/data/program-paths/not-installed/moz.build b/python/mozbuild/mozbuild/test/backend/data/program-paths/not-installed/moz.build
new file mode 100644
index 0000000000..c725ab7326
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/program-paths/not-installed/moz.build
@@ -0,0 +1,5 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIST_INSTALL = False
+Program("not-installed")
diff --git a/python/mozbuild/mozbuild/test/backend/data/resources/bar.res.in b/python/mozbuild/mozbuild/test/backend/data/resources/bar.res.in
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/resources/bar.res.in
diff --git a/python/mozbuild/mozbuild/test/backend/data/resources/cursor.cur b/python/mozbuild/mozbuild/test/backend/data/resources/cursor.cur
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/resources/cursor.cur
diff --git a/python/mozbuild/mozbuild/test/backend/data/resources/desktop1.ttf b/python/mozbuild/mozbuild/test/backend/data/resources/desktop1.ttf
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/resources/desktop1.ttf
diff --git a/python/mozbuild/mozbuild/test/backend/data/resources/desktop2.ttf b/python/mozbuild/mozbuild/test/backend/data/resources/desktop2.ttf
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/resources/desktop2.ttf
diff --git a/python/mozbuild/mozbuild/test/backend/data/resources/extra.manifest b/python/mozbuild/mozbuild/test/backend/data/resources/extra.manifest
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/resources/extra.manifest
diff --git a/python/mozbuild/mozbuild/test/backend/data/resources/font1.ttf b/python/mozbuild/mozbuild/test/backend/data/resources/font1.ttf
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/resources/font1.ttf
diff --git a/python/mozbuild/mozbuild/test/backend/data/resources/font2.ttf b/python/mozbuild/mozbuild/test/backend/data/resources/font2.ttf
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/resources/font2.ttf
diff --git a/python/mozbuild/mozbuild/test/backend/data/resources/foo.res b/python/mozbuild/mozbuild/test/backend/data/resources/foo.res
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/resources/foo.res
diff --git a/python/mozbuild/mozbuild/test/backend/data/resources/mobile.ttf b/python/mozbuild/mozbuild/test/backend/data/resources/mobile.ttf
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/resources/mobile.ttf
diff --git a/python/mozbuild/mozbuild/test/backend/data/resources/moz.build b/python/mozbuild/mozbuild/test/backend/data/resources/moz.build
new file mode 100644
index 0000000000..619af26e64
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/resources/moz.build
@@ -0,0 +1,9 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+RESOURCE_FILES += ["bar.res.in", "foo.res"]
+RESOURCE_FILES.cursors += ["cursor.cur"]
+RESOURCE_FILES.fonts += ["font1.ttf", "font2.ttf"]
+RESOURCE_FILES.fonts.desktop += ["desktop1.ttf", "desktop2.ttf"]
+RESOURCE_FILES.fonts.mobile += ["mobile.ttf"]
+RESOURCE_FILES.tests += ["extra.manifest", "test.manifest"]
diff --git a/python/mozbuild/mozbuild/test/backend/data/resources/test.manifest b/python/mozbuild/mozbuild/test/backend/data/resources/test.manifest
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/resources/test.manifest
diff --git a/python/mozbuild/mozbuild/test/backend/data/rust-library-features/Cargo.toml b/python/mozbuild/mozbuild/test/backend/data/rust-library-features/Cargo.toml
new file mode 100644
index 0000000000..0d778b2b0e
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/rust-library-features/Cargo.toml
@@ -0,0 +1,15 @@
+[package]
+name = "feature-library"
+version = "0.1.0"
+authors = [
+ "The Mozilla Project Developers",
+]
+
+[lib]
+crate-type = ["staticlib"]
+
+[profile.dev]
+panic = "abort"
+
+[profile.release]
+panic = "abort"
diff --git a/python/mozbuild/mozbuild/test/backend/data/rust-library-features/moz.build b/python/mozbuild/mozbuild/test/backend/data/rust-library-features/moz.build
new file mode 100644
index 0000000000..f17f29b0e7
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/rust-library-features/moz.build
@@ -0,0 +1,20 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def Library(name):
+ """Template for libraries."""
+ LIBRARY_NAME = name
+
+
+@template
+def RustLibrary(name, features):
+ """Template for Rust libraries."""
+ Library(name)
+
+ IS_RUST_LIBRARY = True
+ RUST_LIBRARY_FEATURES = features
+
+
+RustLibrary("feature-library", ["musthave", "cantlivewithout"])
diff --git a/python/mozbuild/mozbuild/test/backend/data/rust-library/Cargo.toml b/python/mozbuild/mozbuild/test/backend/data/rust-library/Cargo.toml
new file mode 100644
index 0000000000..5e9e44632f
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/rust-library/Cargo.toml
@@ -0,0 +1,15 @@
+[package]
+name = "test-library"
+version = "0.1.0"
+authors = [
+ "The Mozilla Project Developers",
+]
+
+[lib]
+crate-type = ["staticlib"]
+
+[profile.dev]
+panic = "abort"
+
+[profile.release]
+panic = "abort"
diff --git a/python/mozbuild/mozbuild/test/backend/data/rust-library/moz.build b/python/mozbuild/mozbuild/test/backend/data/rust-library/moz.build
new file mode 100644
index 0000000000..b0f29a1ef5
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/rust-library/moz.build
@@ -0,0 +1,19 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def Library(name):
+ """Template for libraries."""
+ LIBRARY_NAME = name
+
+
+@template
+def RustLibrary(name):
+ """Template for Rust libraries."""
+ Library(name)
+
+ IS_RUST_LIBRARY = True
+
+
+RustLibrary("test-library")
diff --git a/python/mozbuild/mozbuild/test/backend/data/rust-programs/code/Cargo.toml b/python/mozbuild/mozbuild/test/backend/data/rust-programs/code/Cargo.toml
new file mode 100644
index 0000000000..e0d400e070
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/rust-programs/code/Cargo.toml
@@ -0,0 +1,10 @@
+[package]
+authors = ["The Mozilla Project Developers"]
+name = "testing"
+version = "0.0.1"
+
+[[bin]]
+name = "target"
+
+[[bin]]
+name = "host"
diff --git a/python/mozbuild/mozbuild/test/backend/data/rust-programs/code/moz.build b/python/mozbuild/mozbuild/test/backend/data/rust-programs/code/moz.build
new file mode 100644
index 0000000000..f0efdb3799
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/rust-programs/code/moz.build
@@ -0,0 +1,6 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+RUST_PROGRAMS += ["target"]
+HOST_RUST_PROGRAMS += ["host"]
diff --git a/python/mozbuild/mozbuild/test/backend/data/rust-programs/moz.build b/python/mozbuild/mozbuild/test/backend/data/rust-programs/moz.build
new file mode 100644
index 0000000000..cb635f6adb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/rust-programs/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS += ["code"]
diff --git a/python/mozbuild/mozbuild/test/backend/data/sources/bar.cpp b/python/mozbuild/mozbuild/test/backend/data/sources/bar.cpp
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/sources/bar.cpp
diff --git a/python/mozbuild/mozbuild/test/backend/data/sources/bar.s b/python/mozbuild/mozbuild/test/backend/data/sources/bar.s
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/sources/bar.s
diff --git a/python/mozbuild/mozbuild/test/backend/data/sources/baz.c b/python/mozbuild/mozbuild/test/backend/data/sources/baz.c
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/sources/baz.c
diff --git a/python/mozbuild/mozbuild/test/backend/data/sources/foo.asm b/python/mozbuild/mozbuild/test/backend/data/sources/foo.asm
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/sources/foo.asm
diff --git a/python/mozbuild/mozbuild/test/backend/data/sources/foo.cpp b/python/mozbuild/mozbuild/test/backend/data/sources/foo.cpp
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/sources/foo.cpp
diff --git a/python/mozbuild/mozbuild/test/backend/data/sources/fuga.mm b/python/mozbuild/mozbuild/test/backend/data/sources/fuga.mm
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/sources/fuga.mm
diff --git a/python/mozbuild/mozbuild/test/backend/data/sources/hoge.mm b/python/mozbuild/mozbuild/test/backend/data/sources/hoge.mm
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/sources/hoge.mm
diff --git a/python/mozbuild/mozbuild/test/backend/data/sources/moz.build b/python/mozbuild/mozbuild/test/backend/data/sources/moz.build
new file mode 100644
index 0000000000..40d5a8d38d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/sources/moz.build
@@ -0,0 +1,26 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def Library(name):
+ """Template for libraries."""
+ LIBRARY_NAME = name
+
+
+Library("dummy")
+
+SOURCES += ["bar.s", "foo.asm"]
+
+HOST_SOURCES += ["bar.cpp", "foo.cpp"]
+HOST_SOURCES += ["baz.c", "qux.c"]
+
+SOURCES += ["baz.c", "qux.c"]
+
+SOURCES += ["fuga.mm", "hoge.mm"]
+
+SOURCES += ["titi.S", "toto.S"]
+
+WASM_SOURCES += ["bar.cpp"]
+WASM_SOURCES += ["baz.c"]
diff --git a/python/mozbuild/mozbuild/test/backend/data/sources/qux.c b/python/mozbuild/mozbuild/test/backend/data/sources/qux.c
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/sources/qux.c
diff --git a/python/mozbuild/mozbuild/test/backend/data/sources/titi.S b/python/mozbuild/mozbuild/test/backend/data/sources/titi.S
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/sources/titi.S
diff --git a/python/mozbuild/mozbuild/test/backend/data/sources/toto.S b/python/mozbuild/mozbuild/test/backend/data/sources/toto.S
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/sources/toto.S
diff --git a/python/mozbuild/mozbuild/test/backend/data/stub0/Makefile.in b/python/mozbuild/mozbuild/test/backend/data/stub0/Makefile.in
new file mode 100644
index 0000000000..02ff0a3f90
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/stub0/Makefile.in
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+FOO := foo
diff --git a/python/mozbuild/mozbuild/test/backend/data/stub0/dir1/Makefile.in b/python/mozbuild/mozbuild/test/backend/data/stub0/dir1/Makefile.in
new file mode 100644
index 0000000000..17c147d97a
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/stub0/dir1/Makefile.in
@@ -0,0 +1,7 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+include $(DEPTH)/config/autoconf.mk
+
+include $(topsrcdir)/config/rules.mk
+
diff --git a/python/mozbuild/mozbuild/test/backend/data/stub0/dir1/moz.build b/python/mozbuild/mozbuild/test/backend/data/stub0/dir1/moz.build
new file mode 100644
index 0000000000..62966a58e1
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/stub0/dir1/moz.build
@@ -0,0 +1,3 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
diff --git a/python/mozbuild/mozbuild/test/backend/data/stub0/dir2/moz.build b/python/mozbuild/mozbuild/test/backend/data/stub0/dir2/moz.build
new file mode 100644
index 0000000000..62966a58e1
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/stub0/dir2/moz.build
@@ -0,0 +1,3 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
diff --git a/python/mozbuild/mozbuild/test/backend/data/stub0/dir3/Makefile.in b/python/mozbuild/mozbuild/test/backend/data/stub0/dir3/Makefile.in
new file mode 100644
index 0000000000..17c147d97a
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/stub0/dir3/Makefile.in
@@ -0,0 +1,7 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+include $(DEPTH)/config/autoconf.mk
+
+include $(topsrcdir)/config/rules.mk
+
diff --git a/python/mozbuild/mozbuild/test/backend/data/stub0/dir3/moz.build b/python/mozbuild/mozbuild/test/backend/data/stub0/dir3/moz.build
new file mode 100644
index 0000000000..62966a58e1
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/stub0/dir3/moz.build
@@ -0,0 +1,3 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
diff --git a/python/mozbuild/mozbuild/test/backend/data/stub0/moz.build b/python/mozbuild/mozbuild/test/backend/data/stub0/moz.build
new file mode 100644
index 0000000000..4f6e7cb318
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/stub0/moz.build
@@ -0,0 +1,7 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS += ["dir1"]
+DIRS += ["dir2"]
+TEST_DIRS += ["dir3"]
diff --git a/python/mozbuild/mozbuild/test/backend/data/substitute_config_files/Makefile.in b/python/mozbuild/mozbuild/test/backend/data/substitute_config_files/Makefile.in
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/substitute_config_files/Makefile.in
diff --git a/python/mozbuild/mozbuild/test/backend/data/substitute_config_files/foo.in b/python/mozbuild/mozbuild/test/backend/data/substitute_config_files/foo.in
new file mode 100644
index 0000000000..5331f1f051
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/substitute_config_files/foo.in
@@ -0,0 +1 @@
+TEST = @MOZ_FOO@
diff --git a/python/mozbuild/mozbuild/test/backend/data/substitute_config_files/moz.build b/python/mozbuild/mozbuild/test/backend/data/substitute_config_files/moz.build
new file mode 100644
index 0000000000..bded13e07d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/substitute_config_files/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+CONFIGURE_SUBST_FILES = ["foo"]
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/another-file.sjs b/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/another-file.sjs
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/another-file.sjs
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/browser.ini b/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/browser.ini
new file mode 100644
index 0000000000..4f1335d6b1
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/browser.ini
@@ -0,0 +1,6 @@
+[DEFAULT]
+support-files =
+ another-file.sjs
+ data/**
+
+[test_sub.js] \ No newline at end of file
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/data/one.txt b/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/data/one.txt
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/data/one.txt
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/data/two.txt b/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/data/two.txt
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/data/two.txt
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/test_sub.js b/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/test_sub.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/child/test_sub.js
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/mochitest.ini b/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/mochitest.ini
new file mode 100644
index 0000000000..a9860f3de8
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/mochitest.ini
@@ -0,0 +1,8 @@
+[DEFAULT]
+support-files =
+ support-file.txt
+ !/child/test_sub.js
+ !/child/another-file.sjs
+ !/child/data/**
+
+[test_foo.js]
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/moz.build b/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/moz.build
new file mode 100644
index 0000000000..9df54dbc99
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/moz.build
@@ -0,0 +1,5 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+MOCHITEST_MANIFESTS += ["mochitest.ini"]
+BROWSER_CHROME_MANIFESTS += ["child/browser.ini"]
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/support-file.txt b/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/support-file.txt
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/support-file.txt
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/test_foo.js b/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/test_foo.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifest-shared-support/test_foo.js
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-backend-sources/mochitest-common.ini b/python/mozbuild/mozbuild/test/backend/data/test-manifests-backend-sources/mochitest-common.ini
new file mode 100644
index 0000000000..31d07b5af3
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-backend-sources/mochitest-common.ini
@@ -0,0 +1 @@
+[test_bar.js]
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-backend-sources/mochitest.ini b/python/mozbuild/mozbuild/test/backend/data/test-manifests-backend-sources/mochitest.ini
new file mode 100644
index 0000000000..cf7a3c44bd
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-backend-sources/mochitest.ini
@@ -0,0 +1,2 @@
+[test_foo.js]
+[include:mochitest-common.ini]
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-backend-sources/moz.build b/python/mozbuild/mozbuild/test/backend/data/test-manifests-backend-sources/moz.build
new file mode 100644
index 0000000000..8058c0b836
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-backend-sources/moz.build
@@ -0,0 +1,6 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+MOCHITEST_MANIFESTS += [
+ "mochitest.ini",
+]
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-backend-sources/test_bar.js b/python/mozbuild/mozbuild/test/backend/data/test-manifests-backend-sources/test_bar.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-backend-sources/test_bar.js
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-backend-sources/test_foo.js b/python/mozbuild/mozbuild/test/backend/data/test-manifests-backend-sources/test_foo.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-backend-sources/test_foo.js
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/mochitest1.ini b/python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/mochitest1.ini
new file mode 100644
index 0000000000..1f9816a899
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/mochitest1.ini
@@ -0,0 +1,4 @@
+[DEFAULT]
+support-files = support-file.txt
+
+[test_foo.js]
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/mochitest2.ini b/python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/mochitest2.ini
new file mode 100644
index 0000000000..e2a2fc96a7
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/mochitest2.ini
@@ -0,0 +1,4 @@
+[DEFAULT]
+support-files = support-file.txt
+
+[test_bar.js]
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/moz.build b/python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/moz.build
new file mode 100644
index 0000000000..a86b934fa1
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/moz.build
@@ -0,0 +1,7 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+MOCHITEST_MANIFESTS += [
+ "mochitest1.ini",
+ "mochitest2.ini",
+]
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/test_bar.js b/python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/test_bar.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/test_bar.js
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/test_foo.js b/python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/test_foo.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-duplicate-support-files/test_foo.js
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/instrumentation.ini b/python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/instrumentation.ini
new file mode 100644
index 0000000000..03d4f794e2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/instrumentation.ini
@@ -0,0 +1 @@
+[not_packaged.java]
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/mochitest.ini b/python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/mochitest.ini
new file mode 100644
index 0000000000..009b2b2239
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/mochitest.ini
@@ -0,0 +1 @@
+[mochitest.js]
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/mochitest.js b/python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/mochitest.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/mochitest.js
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/moz.build b/python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/moz.build
new file mode 100644
index 0000000000..f0496e09d9
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/moz.build
@@ -0,0 +1,10 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+MOCHITEST_MANIFESTS += [
+ "mochitest.ini",
+]
+
+ANDROID_INSTRUMENTATION_MANIFESTS += [
+ "instrumentation.ini",
+]
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/not_packaged.java b/python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/not_packaged.java
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-package-tests/not_packaged.java
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/dir1/test_bar.js b/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/dir1/test_bar.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/dir1/test_bar.js
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/dir1/xpcshell.ini b/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/dir1/xpcshell.ini
new file mode 100644
index 0000000000..0cddad8ba9
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/dir1/xpcshell.ini
@@ -0,0 +1,3 @@
+[DEFAULT]
+
+[test_bar.js]
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/mochitest.ini b/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/mochitest.ini
new file mode 100644
index 0000000000..81869e1fa0
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/mochitest.ini
@@ -0,0 +1,3 @@
+[DEFAULT]
+
+[mochitest.js]
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/mochitest.js b/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/mochitest.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/mochitest.js
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/moz.build b/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/moz.build
new file mode 100644
index 0000000000..42462a3059
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/moz.build
@@ -0,0 +1,9 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+XPCSHELL_TESTS_MANIFESTS += [
+ "dir1/xpcshell.ini",
+ "xpcshell.ini",
+]
+
+MOCHITEST_MANIFESTS += ["mochitest.ini"]
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/xpcshell.ini b/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/xpcshell.ini
new file mode 100644
index 0000000000..f6a5351e94
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/xpcshell.ini
@@ -0,0 +1,4 @@
+[DEFAULT]
+support-files = support/**
+
+[xpcshell.js]
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/xpcshell.js b/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/xpcshell.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-manifests-written/xpcshell.js
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-support-binaries-tracked/moz.build b/python/mozbuild/mozbuild/test/backend/data/test-support-binaries-tracked/moz.build
new file mode 100644
index 0000000000..eb83fd1826
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-support-binaries-tracked/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS += ["test", "src"]
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-support-binaries-tracked/src/moz.build b/python/mozbuild/mozbuild/test/backend/data/test-support-binaries-tracked/src/moz.build
new file mode 100644
index 0000000000..69cde19c29
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-support-binaries-tracked/src/moz.build
@@ -0,0 +1,12 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def Library(name):
+ """Template for libraries"""
+ LIBRARY_NAME = name
+
+
+Library("foo")
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-support-binaries-tracked/test/moz.build b/python/mozbuild/mozbuild/test/backend/data/test-support-binaries-tracked/test/moz.build
new file mode 100644
index 0000000000..a43f4083b3
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-support-binaries-tracked/test/moz.build
@@ -0,0 +1,32 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+FINAL_TARGET = "_tests/xpcshell/tests/mozbuildtest"
+
+
+@template
+def Library(name):
+ """Template for libraries"""
+ LIBRARY_NAME = name
+
+
+@template
+def SimplePrograms(names, ext=".cpp"):
+ """Template for simple program executables.
+
+ Those have a single source with the same base name as the executable.
+ """
+ SIMPLE_PROGRAMS += names
+ SOURCES += ["%s%s" % (name, ext) for name in names]
+
+
+@template
+def HostLibrary(name):
+ """Template for build tools libraries."""
+ HOST_LIBRARY_NAME = name
+
+
+Library("test-library")
+HostLibrary("host-test-library")
+SimplePrograms(["test-one", "test-two"])
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-support-binaries-tracked/test/test-one.cpp b/python/mozbuild/mozbuild/test/backend/data/test-support-binaries-tracked/test/test-one.cpp
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-support-binaries-tracked/test/test-one.cpp
diff --git a/python/mozbuild/mozbuild/test/backend/data/test-support-binaries-tracked/test/test-two.cpp b/python/mozbuild/mozbuild/test/backend/data/test-support-binaries-tracked/test/test-two.cpp
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test-support-binaries-tracked/test/test-two.cpp
diff --git a/python/mozbuild/mozbuild/test/backend/data/test_config/file.in b/python/mozbuild/mozbuild/test/backend/data/test_config/file.in
new file mode 100644
index 0000000000..07aa30deb6
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test_config/file.in
@@ -0,0 +1,3 @@
+#ifdef foo
+@foo@
+@bar@
diff --git a/python/mozbuild/mozbuild/test/backend/data/test_config/moz.build b/python/mozbuild/mozbuild/test/backend/data/test_config/moz.build
new file mode 100644
index 0000000000..5cf4c78f90
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/test_config/moz.build
@@ -0,0 +1,3 @@
+CONFIGURE_SUBST_FILES = [
+ "file",
+]
diff --git a/python/mozbuild/mozbuild/test/backend/data/variable_passthru/Makefile.in b/python/mozbuild/mozbuild/test/backend/data/variable_passthru/Makefile.in
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/variable_passthru/Makefile.in
diff --git a/python/mozbuild/mozbuild/test/backend/data/variable_passthru/baz.def b/python/mozbuild/mozbuild/test/backend/data/variable_passthru/baz.def
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/variable_passthru/baz.def
diff --git a/python/mozbuild/mozbuild/test/backend/data/variable_passthru/moz.build b/python/mozbuild/mozbuild/test/backend/data/variable_passthru/moz.build
new file mode 100644
index 0000000000..81595d2db3
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/variable_passthru/moz.build
@@ -0,0 +1,11 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DELAYLOAD_DLLS = ["foo.dll", "bar.dll"]
+
+RCFILE = "foo.rc"
+RCINCLUDE = "bar.rc"
+DEFFILE = "baz.def"
+
+WIN32_EXE_LDFLAGS += ["-subsystem:console"]
diff --git a/python/mozbuild/mozbuild/test/backend/data/variable_passthru/test1.c b/python/mozbuild/mozbuild/test/backend/data/variable_passthru/test1.c
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/variable_passthru/test1.c
diff --git a/python/mozbuild/mozbuild/test/backend/data/variable_passthru/test1.cpp b/python/mozbuild/mozbuild/test/backend/data/variable_passthru/test1.cpp
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/variable_passthru/test1.cpp
diff --git a/python/mozbuild/mozbuild/test/backend/data/variable_passthru/test1.mm b/python/mozbuild/mozbuild/test/backend/data/variable_passthru/test1.mm
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/variable_passthru/test1.mm
diff --git a/python/mozbuild/mozbuild/test/backend/data/variable_passthru/test2.c b/python/mozbuild/mozbuild/test/backend/data/variable_passthru/test2.c
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/variable_passthru/test2.c
diff --git a/python/mozbuild/mozbuild/test/backend/data/variable_passthru/test2.cpp b/python/mozbuild/mozbuild/test/backend/data/variable_passthru/test2.cpp
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/variable_passthru/test2.cpp
diff --git a/python/mozbuild/mozbuild/test/backend/data/variable_passthru/test2.mm b/python/mozbuild/mozbuild/test/backend/data/variable_passthru/test2.mm
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/variable_passthru/test2.mm
diff --git a/python/mozbuild/mozbuild/test/backend/data/visual-studio/dir1/bar.cpp b/python/mozbuild/mozbuild/test/backend/data/visual-studio/dir1/bar.cpp
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/visual-studio/dir1/bar.cpp
diff --git a/python/mozbuild/mozbuild/test/backend/data/visual-studio/dir1/foo.cpp b/python/mozbuild/mozbuild/test/backend/data/visual-studio/dir1/foo.cpp
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/visual-studio/dir1/foo.cpp
diff --git a/python/mozbuild/mozbuild/test/backend/data/visual-studio/dir1/moz.build b/python/mozbuild/mozbuild/test/backend/data/visual-studio/dir1/moz.build
new file mode 100644
index 0000000000..ae1fc0c370
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/visual-studio/dir1/moz.build
@@ -0,0 +1,9 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+FINAL_LIBRARY = "test"
+SOURCES += ["bar.cpp", "foo.cpp"]
+LOCAL_INCLUDES += ["/includeA/foo"]
+DEFINES["DEFINEFOO"] = True
+DEFINES["DEFINEBAR"] = "bar"
diff --git a/python/mozbuild/mozbuild/test/backend/data/visual-studio/moz.build b/python/mozbuild/mozbuild/test/backend/data/visual-studio/moz.build
new file mode 100644
index 0000000000..a0a888fa01
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/visual-studio/moz.build
@@ -0,0 +1,7 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS += ["dir1"]
+
+Library("test")
diff --git a/python/mozbuild/mozbuild/test/backend/data/xpidl/bar.idl b/python/mozbuild/mozbuild/test/backend/data/xpidl/bar.idl
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/xpidl/bar.idl
diff --git a/python/mozbuild/mozbuild/test/backend/data/xpidl/config/makefiles/xpidl/Makefile.in b/python/mozbuild/mozbuild/test/backend/data/xpidl/config/makefiles/xpidl/Makefile.in
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/xpidl/config/makefiles/xpidl/Makefile.in
diff --git a/python/mozbuild/mozbuild/test/backend/data/xpidl/foo.idl b/python/mozbuild/mozbuild/test/backend/data/xpidl/foo.idl
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/xpidl/foo.idl
diff --git a/python/mozbuild/mozbuild/test/backend/data/xpidl/moz.build b/python/mozbuild/mozbuild/test/backend/data/xpidl/moz.build
new file mode 100644
index 0000000000..df521ac7c5
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/data/xpidl/moz.build
@@ -0,0 +1,6 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+XPIDL_MODULE = "my_module"
+XPIDL_SOURCES = ["bar.idl", "foo.idl"]
diff --git a/python/mozbuild/mozbuild/test/backend/test_build.py b/python/mozbuild/mozbuild/test/backend/test_build.py
new file mode 100644
index 0000000000..3287ba5e57
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/test_build.py
@@ -0,0 +1,265 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import shutil
+import sys
+import unittest
+from contextlib import contextmanager
+from tempfile import mkdtemp
+
+import buildconfig
+import mozpack.path as mozpath
+import six
+from mozfile import which
+from mozpack.files import FileFinder
+from mozunit import main
+
+from mozbuild.backend import get_backend_class
+from mozbuild.backend.configenvironment import ConfigEnvironment
+from mozbuild.backend.fastermake import FasterMakeBackend
+from mozbuild.backend.recursivemake import RecursiveMakeBackend
+from mozbuild.base import MozbuildObject
+from mozbuild.frontend.emitter import TreeMetadataEmitter
+from mozbuild.frontend.reader import BuildReader
+from mozbuild.util import ensureParentDir
+
+
+def make_path():
+ try:
+ return buildconfig.substs["GMAKE"]
+ except KeyError:
+ fetches_dir = os.environ.get("MOZ_FETCHES_DIR")
+ extra_search_dirs = ()
+ if fetches_dir:
+ extra_search_dirs = (os.path.join(fetches_dir, "mozmake"),)
+ # Fallback for when running the test without an objdir.
+ for name in ("gmake", "make", "mozmake", "gnumake", "mingw32-make"):
+ path = which(name, extra_search_dirs=extra_search_dirs)
+ if path:
+ return path
+
+
+BASE_SUBSTS = [
+ ("PYTHON", mozpath.normsep(sys.executable)),
+ ("PYTHON3", mozpath.normsep(sys.executable)),
+ ("MOZ_UI_LOCALE", "en-US"),
+ ("GMAKE", make_path()),
+]
+
+
+class TestBuild(unittest.TestCase):
+ def setUp(self):
+ self._old_env = dict(os.environ)
+ os.environ.pop("MOZCONFIG", None)
+ os.environ.pop("MOZ_OBJDIR", None)
+ os.environ.pop("MOZ_PGO", None)
+
+ def tearDown(self):
+ os.environ.clear()
+ os.environ.update(self._old_env)
+
+ @contextmanager
+ def do_test_backend(self, *backends, **kwargs):
+ # Create the objdir in the srcdir to ensure that they share
+ # the same drive on Windows.
+ topobjdir = mkdtemp(dir=buildconfig.topsrcdir)
+ try:
+ config = ConfigEnvironment(buildconfig.topsrcdir, topobjdir, **kwargs)
+ reader = BuildReader(config)
+ emitter = TreeMetadataEmitter(config)
+ moz_build = mozpath.join(config.topsrcdir, "test.mozbuild")
+ definitions = list(emitter.emit(reader.read_mozbuild(moz_build, config)))
+ for backend in backends:
+ backend(config).consume(definitions)
+
+ yield config
+ except Exception:
+ raise
+ finally:
+ if not os.environ.get("MOZ_NO_CLEANUP"):
+ shutil.rmtree(topobjdir)
+
+ @contextmanager
+ def line_handler(self):
+ lines = []
+
+ def handle_make_line(line):
+ lines.append(line)
+
+ try:
+ yield handle_make_line
+ except Exception:
+ print("\n".join(lines))
+ raise
+
+ if os.environ.get("MOZ_VERBOSE_MAKE"):
+ print("\n".join(lines))
+
+ def test_recursive_make(self):
+ substs = list(BASE_SUBSTS)
+ with self.do_test_backend(RecursiveMakeBackend, substs=substs) as config:
+ build = MozbuildObject(config.topsrcdir, None, None, config.topobjdir)
+ build._config_environment = config
+ overrides = [
+ "install_manifest_depends=",
+ "MOZ_JAR_MAKER_FILE_FORMAT=flat",
+ "TEST_MOZBUILD=1",
+ ]
+ with self.line_handler() as handle_make_line:
+ build._run_make(
+ directory=config.topobjdir,
+ target=overrides,
+ silent=False,
+ line_handler=handle_make_line,
+ )
+
+ self.validate(config)
+
+ def test_faster_recursive_make(self):
+ substs = list(BASE_SUBSTS) + [
+ ("BUILD_BACKENDS", "FasterMake+RecursiveMake"),
+ ]
+ with self.do_test_backend(
+ get_backend_class("FasterMake+RecursiveMake"), substs=substs
+ ) as config:
+ buildid = mozpath.join(config.topobjdir, "config", "buildid")
+ ensureParentDir(buildid)
+ with open(buildid, "w") as fh:
+ fh.write("20100101012345\n")
+
+ build = MozbuildObject(config.topsrcdir, None, None, config.topobjdir)
+ build._config_environment = config
+ overrides = [
+ "install_manifest_depends=",
+ "MOZ_JAR_MAKER_FILE_FORMAT=flat",
+ "TEST_MOZBUILD=1",
+ ]
+ with self.line_handler() as handle_make_line:
+ build._run_make(
+ directory=config.topobjdir,
+ target=overrides,
+ silent=False,
+ line_handler=handle_make_line,
+ )
+
+ self.validate(config)
+
+ def test_faster_make(self):
+ substs = list(BASE_SUBSTS) + [
+ ("MOZ_BUILD_APP", "dummy_app"),
+ ("MOZ_WIDGET_TOOLKIT", "dummy_widget"),
+ ]
+ with self.do_test_backend(
+ RecursiveMakeBackend, FasterMakeBackend, substs=substs
+ ) as config:
+ buildid = mozpath.join(config.topobjdir, "config", "buildid")
+ ensureParentDir(buildid)
+ with open(buildid, "w") as fh:
+ fh.write("20100101012345\n")
+
+ build = MozbuildObject(config.topsrcdir, None, None, config.topobjdir)
+ build._config_environment = config
+ overrides = [
+ "TEST_MOZBUILD=1",
+ ]
+ with self.line_handler() as handle_make_line:
+ build._run_make(
+ directory=mozpath.join(config.topobjdir, "faster"),
+ target=overrides,
+ silent=False,
+ line_handler=handle_make_line,
+ )
+
+ self.validate(config)
+
+ def validate(self, config):
+ self.maxDiff = None
+ test_path = mozpath.join(
+ "$SRCDIR",
+ "python",
+ "mozbuild",
+ "mozbuild",
+ "test",
+ "backend",
+ "data",
+ "build",
+ )
+
+ result = {
+ p: six.ensure_text(f.open().read())
+ for p, f in FileFinder(mozpath.join(config.topobjdir, "dist"))
+ }
+ self.assertTrue(len(result))
+ self.assertEqual(
+ result,
+ {
+ "bin/baz.ini": "baz.ini: FOO is foo\n",
+ "bin/child/bar.ini": "bar.ini\n",
+ "bin/child2/foo.css": "foo.css: FOO is foo\n",
+ "bin/child2/qux.ini": "qux.ini: BAR is not defined\n",
+ "bin/chrome.manifest": "manifest chrome/foo.manifest\n"
+ "manifest components/components.manifest\n",
+ "bin/chrome/foo.manifest": "content bar foo/child/\n"
+ "content foo foo/\n"
+ "override chrome://foo/bar.svg#hello "
+ "chrome://bar/bar.svg#hello\n",
+ "bin/chrome/foo/bar.js": "bar.js\n",
+ "bin/chrome/foo/child/baz.jsm": '//@line 2 "%s/baz.jsm"\nbaz.jsm: FOO is foo\n'
+ % (test_path),
+ "bin/chrome/foo/child/hoge.js": '//@line 2 "%s/bar.js"\nbar.js: FOO is foo\n'
+ % (test_path),
+ "bin/chrome/foo/foo.css": "foo.css: FOO is foo\n",
+ "bin/chrome/foo/foo.js": "foo.js\n",
+ "bin/chrome/foo/qux.js": "bar.js\n",
+ "bin/components/bar.js": '//@line 2 "%s/bar.js"\nbar.js: FOO is foo\n'
+ % (test_path),
+ "bin/components/components.manifest": "component {foo} foo.js\ncomponent {bar} bar.js\n", # NOQA: E501
+ "bin/components/foo.js": "foo.js\n",
+ "bin/defaults/pref/prefs.js": "prefs.js\n",
+ "bin/foo.ini": "foo.ini\n",
+ "bin/modules/baz.jsm": '//@line 2 "%s/baz.jsm"\nbaz.jsm: FOO is foo\n'
+ % (test_path),
+ "bin/modules/child/bar.jsm": "bar.jsm\n",
+ "bin/modules/child2/qux.jsm": '//@line 4 "%s/qux.jsm"\nqux.jsm: BAR is not defined\n' # NOQA: E501
+ % (test_path),
+ "bin/modules/foo.jsm": "foo.jsm\n",
+ "bin/res/resource": "resource\n",
+ "bin/res/child/resource2": "resource2\n",
+ "bin/app/baz.ini": "baz.ini: FOO is bar\n",
+ "bin/app/child/bar.ini": "bar.ini\n",
+ "bin/app/child2/qux.ini": "qux.ini: BAR is defined\n",
+ "bin/app/chrome.manifest": "manifest chrome/foo.manifest\n"
+ "manifest components/components.manifest\n",
+ "bin/app/chrome/foo.manifest": "content bar foo/child/\n"
+ "content foo foo/\n"
+ "override chrome://foo/bar.svg#hello "
+ "chrome://bar/bar.svg#hello\n",
+ "bin/app/chrome/foo/bar.js": "bar.js\n",
+ "bin/app/chrome/foo/child/baz.jsm": '//@line 2 "%s/baz.jsm"\nbaz.jsm: FOO is bar\n'
+ % (test_path),
+ "bin/app/chrome/foo/child/hoge.js": '//@line 2 "%s/bar.js"\nbar.js: FOO is bar\n'
+ % (test_path),
+ "bin/app/chrome/foo/foo.css": "foo.css: FOO is bar\n",
+ "bin/app/chrome/foo/foo.js": "foo.js\n",
+ "bin/app/chrome/foo/qux.js": "bar.js\n",
+ "bin/app/components/bar.js": '//@line 2 "%s/bar.js"\nbar.js: FOO is bar\n'
+ % (test_path),
+ "bin/app/components/components.manifest": "component {foo} foo.js\ncomponent {bar} bar.js\n", # NOQA: E501
+ "bin/app/components/foo.js": "foo.js\n",
+ "bin/app/defaults/preferences/prefs.js": "prefs.js\n",
+ "bin/app/foo.css": "foo.css: FOO is bar\n",
+ "bin/app/foo.ini": "foo.ini\n",
+ "bin/app/modules/baz.jsm": '//@line 2 "%s/baz.jsm"\nbaz.jsm: FOO is bar\n'
+ % (test_path),
+ "bin/app/modules/child/bar.jsm": "bar.jsm\n",
+ "bin/app/modules/child2/qux.jsm": '//@line 2 "%s/qux.jsm"\nqux.jsm: BAR is defined\n' # NOQA: E501
+ % (test_path),
+ "bin/app/modules/foo.jsm": "foo.jsm\n",
+ },
+ )
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/test/backend/test_configenvironment.py b/python/mozbuild/mozbuild/test/backend/test_configenvironment.py
new file mode 100644
index 0000000000..7900cdd737
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/test_configenvironment.py
@@ -0,0 +1,73 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import unittest
+
+import mozpack.path as mozpath
+from mozunit import main
+
+import mozbuild.backend.configenvironment as ConfigStatus
+from mozbuild.util import ReadOnlyDict
+
+
+class ConfigEnvironment(ConfigStatus.ConfigEnvironment):
+ def __init__(self, *args, **kwargs):
+ ConfigStatus.ConfigEnvironment.__init__(self, *args, **kwargs)
+ # Be helpful to unit tests
+ if "top_srcdir" not in self.substs:
+ if os.path.isabs(self.topsrcdir):
+ top_srcdir = self.topsrcdir.replace(os.sep, "/")
+ else:
+ top_srcdir = mozpath.relpath(self.topsrcdir, self.topobjdir).replace(
+ os.sep, "/"
+ )
+
+ d = dict(self.substs)
+ d["top_srcdir"] = top_srcdir
+ self.substs = ReadOnlyDict(d)
+
+
+class TestEnvironment(unittest.TestCase):
+ def test_auto_substs(self):
+ """Test the automatically set values of ACDEFINES, ALLSUBSTS
+ and ALLEMPTYSUBSTS.
+ """
+ env = ConfigEnvironment(
+ ".",
+ ".",
+ defines={"foo": "bar", "baz": "qux 42", "abc": "d'e'f"},
+ substs={
+ "FOO": "bar",
+ "FOOBAR": "",
+ "ABC": "def",
+ "bar": "baz qux",
+ "zzz": '"abc def"',
+ "qux": "",
+ },
+ )
+ # Original order of the defines need to be respected in ACDEFINES
+ self.assertEqual(
+ env.substs["ACDEFINES"],
+ """-Dabc='d'\\''e'\\''f' -Dbaz='qux 42' -Dfoo=bar""",
+ )
+ # Likewise for ALLSUBSTS, which also must contain ACDEFINES
+ self.assertEqual(
+ env.substs["ALLSUBSTS"],
+ '''ABC = def
+ACDEFINES = -Dabc='d'\\''e'\\''f' -Dbaz='qux 42' -Dfoo=bar
+FOO = bar
+bar = baz qux
+zzz = "abc def"''',
+ )
+ # ALLEMPTYSUBSTS contains all substs with no value.
+ self.assertEqual(
+ env.substs["ALLEMPTYSUBSTS"],
+ """FOOBAR =
+qux =""",
+ )
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/test/backend/test_database.py b/python/mozbuild/mozbuild/test/backend/test_database.py
new file mode 100644
index 0000000000..3bc0dfefb1
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/test_database.py
@@ -0,0 +1,91 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import json
+import os
+
+import six
+from mozunit import main
+
+from mozbuild.backend.clangd import ClangdBackend
+from mozbuild.backend.static_analysis import StaticAnalysisBackend
+from mozbuild.compilation.database import CompileDBBackend
+from mozbuild.test.backend.common import BackendTester
+
+
+class TestCompileDBBackends(BackendTester):
+ def perform_check(self, compile_commands_path, topsrcdir, topobjdir):
+ self.assertTrue(os.path.exists(compile_commands_path))
+ compile_db = json.loads(open(compile_commands_path, "r").read())
+
+ # Verify that we have the same number of items
+ self.assertEqual(len(compile_db), 4)
+
+ expected_db = [
+ {
+ "directory": topobjdir,
+ "command": "clang -o /dev/null -c -ferror-limit=0 {}/bar.c".format(
+ topsrcdir
+ ),
+ "file": "{}/bar.c".format(topsrcdir),
+ },
+ {
+ "directory": topobjdir,
+ "command": "clang -o /dev/null -c -ferror-limit=0 {}/foo.c".format(
+ topsrcdir
+ ),
+ "file": "{}/foo.c".format(topsrcdir),
+ },
+ {
+ "directory": topobjdir,
+ "command": "clang++ -o /dev/null -c -ferror-limit=0 {}/baz.cpp".format(
+ topsrcdir
+ ),
+ "file": "{}/baz.cpp".format(topsrcdir),
+ },
+ {
+ "directory": topobjdir,
+ "command": "clang++ -o /dev/null -c -ferror-limit=0 {}/qux.cpp".format(
+ topsrcdir
+ ),
+ "file": "{}/qux.cpp".format(topsrcdir),
+ },
+ ]
+
+ # Verify item consistency against `expected_db`
+ six.assertCountEqual(self, compile_db, expected_db)
+
+ def test_database(self):
+ """Ensure we can generate a `compile_commands.json` and that is correct."""
+
+ env = self._consume("database", CompileDBBackend)
+ compile_commands_path = os.path.join(env.topobjdir, "compile_commands.json")
+
+ self.perform_check(compile_commands_path, env.topsrcdir, env.topobjdir)
+
+ def test_clangd(self):
+ """Ensure we can generate a `compile_commands.json` and that is correct.
+ in order to be used by ClandBackend"""
+
+ env = self._consume("database", ClangdBackend)
+ compile_commands_path = os.path.join(
+ env.topobjdir, "clangd", "compile_commands.json"
+ )
+
+ self.perform_check(compile_commands_path, env.topsrcdir, env.topobjdir)
+
+ def test_static_analysis(self):
+ """Ensure we can generate a `compile_commands.json` and that is correct.
+ in order to be used by StaticAnalysisBackend"""
+
+ env = self._consume("database", StaticAnalysisBackend)
+ compile_commands_path = os.path.join(
+ env.topobjdir, "static-analysis", "compile_commands.json"
+ )
+
+ self.perform_check(compile_commands_path, env.topsrcdir, env.topobjdir)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/test/backend/test_fastermake.py b/python/mozbuild/mozbuild/test/backend/test_fastermake.py
new file mode 100644
index 0000000000..1c9670b091
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/test_fastermake.py
@@ -0,0 +1,42 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+
+import mozpack.path as mozpath
+from mozpack.copier import FileRegistry
+from mozpack.manifests import InstallManifest
+from mozunit import main
+
+from mozbuild.backend.fastermake import FasterMakeBackend
+from mozbuild.test.backend.common import BackendTester
+
+
+class TestFasterMakeBackend(BackendTester):
+ def test_basic(self):
+ """Ensure the FasterMakeBackend works without error."""
+ env = self._consume("stub0", FasterMakeBackend)
+ self.assertTrue(
+ os.path.exists(mozpath.join(env.topobjdir, "backend.FasterMakeBackend"))
+ )
+ self.assertTrue(
+ os.path.exists(mozpath.join(env.topobjdir, "backend.FasterMakeBackend.in"))
+ )
+
+ def test_final_target_files_wildcard(self):
+ """Ensure that wildcards in FINAL_TARGET_FILES work properly."""
+ env = self._consume("final-target-files-wildcard", FasterMakeBackend)
+ m = InstallManifest(
+ path=mozpath.join(env.topobjdir, "faster", "install_dist_bin")
+ )
+ self.assertEqual(len(m), 1)
+ reg = FileRegistry()
+ m.populate_registry(reg)
+ expected = [("foo/bar.xyz", "bar.xyz"), ("foo/foo.xyz", "foo.xyz")]
+ actual = [(path, mozpath.relpath(f.path, env.topsrcdir)) for (path, f) in reg]
+ self.assertEqual(expected, actual)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/test/backend/test_partialconfigenvironment.py b/python/mozbuild/mozbuild/test/backend/test_partialconfigenvironment.py
new file mode 100644
index 0000000000..13b1656981
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/test_partialconfigenvironment.py
@@ -0,0 +1,173 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import unittest
+from shutil import rmtree
+from tempfile import mkdtemp
+
+import buildconfig
+import mozpack.path as mozpath
+from mozunit import main
+
+from mozbuild.backend.configenvironment import PartialConfigEnvironment
+
+config = {
+ "defines": {
+ "MOZ_FOO": "1",
+ "MOZ_BAR": "2",
+ },
+ "substs": {
+ "MOZ_SUBST_1": "1",
+ "MOZ_SUBST_2": "2",
+ "CPP": "cpp",
+ },
+}
+
+
+class TestPartial(unittest.TestCase):
+ def setUp(self):
+ self._old_env = dict(os.environ)
+
+ def tearDown(self):
+ os.environ.clear()
+ os.environ.update(self._old_env)
+
+ def _objdir(self):
+ objdir = mkdtemp(dir=buildconfig.topsrcdir)
+ self.addCleanup(rmtree, objdir)
+ return objdir
+
+ def test_auto_substs(self):
+ """Test the automatically set values of ACDEFINES, and ALLDEFINES"""
+ env = PartialConfigEnvironment(self._objdir())
+ env.write_vars(config)
+ self.assertEqual(env.substs["ACDEFINES"], "-DMOZ_BAR=2 -DMOZ_FOO=1")
+ self.assertEqual(
+ env.defines["ALLDEFINES"],
+ {
+ "MOZ_BAR": "2",
+ "MOZ_FOO": "1",
+ },
+ )
+
+ def test_remove_subst(self):
+ """Test removing a subst from the config. The file should be overwritten with 'None'"""
+ env = PartialConfigEnvironment(self._objdir())
+ path = mozpath.join(env.topobjdir, "config.statusd", "substs", "MYSUBST")
+ myconfig = config.copy()
+ env.write_vars(myconfig)
+ with self.assertRaises(KeyError):
+ _ = env.substs["MYSUBST"]
+ self.assertFalse(os.path.exists(path))
+
+ myconfig["substs"]["MYSUBST"] = "new"
+ env.write_vars(myconfig)
+
+ self.assertEqual(env.substs["MYSUBST"], "new")
+ self.assertTrue(os.path.exists(path))
+
+ del myconfig["substs"]["MYSUBST"]
+ env.write_vars(myconfig)
+ with self.assertRaises(KeyError):
+ _ = env.substs["MYSUBST"]
+ # Now that the subst is gone, the file still needs to be present so that
+ # make can update dependencies correctly. Overwriting the file with
+ # 'None' is the same as deleting it as far as the
+ # PartialConfigEnvironment is concerned, but make can't track a
+ # dependency on a file that doesn't exist.
+ self.assertTrue(os.path.exists(path))
+
+ def _assert_deps(self, env, deps):
+ deps = sorted(
+ [
+ "$(wildcard %s)" % (mozpath.join(env.topobjdir, "config.statusd", d))
+ for d in deps
+ ]
+ )
+ self.assertEqual(sorted(env.get_dependencies()), deps)
+
+ def test_dependencies(self):
+ """Test getting dependencies on defines and substs."""
+ env = PartialConfigEnvironment(self._objdir())
+ env.write_vars(config)
+ self._assert_deps(env, [])
+
+ self.assertEqual(env.defines["MOZ_FOO"], "1")
+ self._assert_deps(env, ["defines/MOZ_FOO"])
+
+ self.assertEqual(env.defines["MOZ_BAR"], "2")
+ self._assert_deps(env, ["defines/MOZ_FOO", "defines/MOZ_BAR"])
+
+ # Getting a define again shouldn't add a redundant dependency
+ self.assertEqual(env.defines["MOZ_FOO"], "1")
+ self._assert_deps(env, ["defines/MOZ_FOO", "defines/MOZ_BAR"])
+
+ self.assertEqual(env.substs["MOZ_SUBST_1"], "1")
+ self._assert_deps(
+ env, ["defines/MOZ_FOO", "defines/MOZ_BAR", "substs/MOZ_SUBST_1"]
+ )
+
+ with self.assertRaises(KeyError):
+ _ = env.substs["NON_EXISTENT"]
+ self._assert_deps(
+ env,
+ [
+ "defines/MOZ_FOO",
+ "defines/MOZ_BAR",
+ "substs/MOZ_SUBST_1",
+ "substs/NON_EXISTENT",
+ ],
+ )
+ self.assertEqual(env.substs.get("NON_EXISTENT"), None)
+
+ def test_set_subst(self):
+ """Test setting a subst"""
+ env = PartialConfigEnvironment(self._objdir())
+ env.write_vars(config)
+
+ self.assertEqual(env.substs["MOZ_SUBST_1"], "1")
+ env.substs["MOZ_SUBST_1"] = "updated"
+ self.assertEqual(env.substs["MOZ_SUBST_1"], "updated")
+
+ # A new environment should pull the result from the file again.
+ newenv = PartialConfigEnvironment(env.topobjdir)
+ self.assertEqual(newenv.substs["MOZ_SUBST_1"], "1")
+
+ def test_env_override(self):
+ """Test overriding a subst with an environment variable"""
+ env = PartialConfigEnvironment(self._objdir())
+ env.write_vars(config)
+
+ self.assertEqual(env.substs["MOZ_SUBST_1"], "1")
+ self.assertEqual(env.substs["CPP"], "cpp")
+
+ # Reset the environment and set some environment variables.
+ env = PartialConfigEnvironment(env.topobjdir)
+ os.environ["MOZ_SUBST_1"] = "subst 1 environ"
+ os.environ["CPP"] = "cpp environ"
+
+ # The MOZ_SUBST_1 should be overridden by the environment, while CPP is
+ # a special variable and should not.
+ self.assertEqual(env.substs["MOZ_SUBST_1"], "subst 1 environ")
+ self.assertEqual(env.substs["CPP"], "cpp")
+
+ def test_update(self):
+ """Test calling update on the substs or defines pseudo dicts"""
+ env = PartialConfigEnvironment(self._objdir())
+ env.write_vars(config)
+
+ mysubsts = {"NEW": "new"}
+ mysubsts.update(env.substs.iteritems())
+ self.assertEqual(mysubsts["NEW"], "new")
+ self.assertEqual(mysubsts["CPP"], "cpp")
+
+ mydefines = {"DEBUG": "1"}
+ mydefines.update(env.defines.iteritems())
+ self.assertEqual(mydefines["DEBUG"], "1")
+ self.assertEqual(mydefines["MOZ_FOO"], "1")
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/test/backend/test_recursivemake.py b/python/mozbuild/mozbuild/test/backend/test_recursivemake.py
new file mode 100644
index 0000000000..acbada060b
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/test_recursivemake.py
@@ -0,0 +1,1307 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import io
+import os
+import unittest
+
+import mozpack.path as mozpath
+import six
+import six.moves.cPickle as pickle
+from mozpack.manifests import InstallManifest
+from mozunit import main
+
+from mozbuild.backend.recursivemake import RecursiveMakeBackend, RecursiveMakeTraversal
+from mozbuild.backend.test_manifest import TestManifestBackend
+from mozbuild.frontend.emitter import TreeMetadataEmitter
+from mozbuild.frontend.reader import BuildReader
+from mozbuild.test.backend.common import BackendTester
+
+
+class TestRecursiveMakeTraversal(unittest.TestCase):
+ def test_traversal(self):
+ traversal = RecursiveMakeTraversal()
+ traversal.add("", dirs=["A", "B", "C"])
+ traversal.add("", dirs=["D"])
+ traversal.add("A")
+ traversal.add("B", dirs=["E", "F"])
+ traversal.add("C", dirs=["G", "H"])
+ traversal.add("D", dirs=["I", "K"])
+ traversal.add("D", dirs=["J", "L"])
+ traversal.add("E")
+ traversal.add("F")
+ traversal.add("G")
+ traversal.add("H")
+ traversal.add("I", dirs=["M", "N"])
+ traversal.add("J", dirs=["O", "P"])
+ traversal.add("K", dirs=["Q", "R"])
+ traversal.add("L", dirs=["S"])
+ traversal.add("M")
+ traversal.add("N", dirs=["T"])
+ traversal.add("O")
+ traversal.add("P", dirs=["U"])
+ traversal.add("Q")
+ traversal.add("R", dirs=["V"])
+ traversal.add("S", dirs=["W"])
+ traversal.add("T")
+ traversal.add("U")
+ traversal.add("V")
+ traversal.add("W", dirs=["X"])
+ traversal.add("X")
+
+ parallels = set(("G", "H", "I", "J", "O", "P", "Q", "R", "U"))
+
+ def filter(current, subdirs):
+ return (
+ current,
+ [d for d in subdirs.dirs if d in parallels],
+ [d for d in subdirs.dirs if d not in parallels],
+ )
+
+ start, deps = traversal.compute_dependencies(filter)
+ self.assertEqual(start, ("X",))
+ self.maxDiff = None
+ self.assertEqual(
+ deps,
+ {
+ "A": ("",),
+ "B": ("A",),
+ "C": ("F",),
+ "D": ("G", "H"),
+ "E": ("B",),
+ "F": ("E",),
+ "G": ("C",),
+ "H": ("C",),
+ "I": ("D",),
+ "J": ("D",),
+ "K": ("T", "O", "U"),
+ "L": ("Q", "V"),
+ "M": ("I",),
+ "N": ("M",),
+ "O": ("J",),
+ "P": ("J",),
+ "Q": ("K",),
+ "R": ("K",),
+ "S": ("L",),
+ "T": ("N",),
+ "U": ("P",),
+ "V": ("R",),
+ "W": ("S",),
+ "X": ("W",),
+ },
+ )
+
+ self.assertEqual(
+ list(traversal.traverse("", filter)),
+ [
+ "",
+ "A",
+ "B",
+ "E",
+ "F",
+ "C",
+ "G",
+ "H",
+ "D",
+ "I",
+ "M",
+ "N",
+ "T",
+ "J",
+ "O",
+ "P",
+ "U",
+ "K",
+ "Q",
+ "R",
+ "V",
+ "L",
+ "S",
+ "W",
+ "X",
+ ],
+ )
+
+ self.assertEqual(list(traversal.traverse("C", filter)), ["C", "G", "H"])
+
+ def test_traversal_2(self):
+ traversal = RecursiveMakeTraversal()
+ traversal.add("", dirs=["A", "B", "C"])
+ traversal.add("A")
+ traversal.add("B", dirs=["D", "E", "F"])
+ traversal.add("C", dirs=["G", "H", "I"])
+ traversal.add("D")
+ traversal.add("E")
+ traversal.add("F")
+ traversal.add("G")
+ traversal.add("H")
+ traversal.add("I")
+
+ start, deps = traversal.compute_dependencies()
+ self.assertEqual(start, ("I",))
+ self.assertEqual(
+ deps,
+ {
+ "A": ("",),
+ "B": ("A",),
+ "C": ("F",),
+ "D": ("B",),
+ "E": ("D",),
+ "F": ("E",),
+ "G": ("C",),
+ "H": ("G",),
+ "I": ("H",),
+ },
+ )
+
+ def test_traversal_filter(self):
+ traversal = RecursiveMakeTraversal()
+ traversal.add("", dirs=["A", "B", "C"])
+ traversal.add("A")
+ traversal.add("B", dirs=["D", "E", "F"])
+ traversal.add("C", dirs=["G", "H", "I"])
+ traversal.add("D")
+ traversal.add("E")
+ traversal.add("F")
+ traversal.add("G")
+ traversal.add("H")
+ traversal.add("I")
+
+ def filter(current, subdirs):
+ if current == "B":
+ current = None
+ return current, [], subdirs.dirs
+
+ start, deps = traversal.compute_dependencies(filter)
+ self.assertEqual(start, ("I",))
+ self.assertEqual(
+ deps,
+ {
+ "A": ("",),
+ "C": ("F",),
+ "D": ("A",),
+ "E": ("D",),
+ "F": ("E",),
+ "G": ("C",),
+ "H": ("G",),
+ "I": ("H",),
+ },
+ )
+
+ def test_traversal_parallel(self):
+ traversal = RecursiveMakeTraversal()
+ traversal.add("", dirs=["A", "B", "C"])
+ traversal.add("A")
+ traversal.add("B", dirs=["D", "E", "F"])
+ traversal.add("C", dirs=["G", "H", "I"])
+ traversal.add("D")
+ traversal.add("E")
+ traversal.add("F")
+ traversal.add("G")
+ traversal.add("H")
+ traversal.add("I")
+ traversal.add("J")
+
+ def filter(current, subdirs):
+ return current, subdirs.dirs, []
+
+ start, deps = traversal.compute_dependencies(filter)
+ self.assertEqual(start, ("A", "D", "E", "F", "G", "H", "I", "J"))
+ self.assertEqual(
+ deps,
+ {
+ "A": ("",),
+ "B": ("",),
+ "C": ("",),
+ "D": ("B",),
+ "E": ("B",),
+ "F": ("B",),
+ "G": ("C",),
+ "H": ("C",),
+ "I": ("C",),
+ "J": ("",),
+ },
+ )
+
+
+class TestRecursiveMakeBackend(BackendTester):
+ def test_basic(self):
+ """Ensure the RecursiveMakeBackend works without error."""
+ env = self._consume("stub0", RecursiveMakeBackend)
+ self.assertTrue(
+ os.path.exists(mozpath.join(env.topobjdir, "backend.RecursiveMakeBackend"))
+ )
+ self.assertTrue(
+ os.path.exists(
+ mozpath.join(env.topobjdir, "backend.RecursiveMakeBackend.in")
+ )
+ )
+
+ def test_output_files(self):
+ """Ensure proper files are generated."""
+ env = self._consume("stub0", RecursiveMakeBackend)
+
+ expected = ["", "dir1", "dir2"]
+
+ for d in expected:
+ out_makefile = mozpath.join(env.topobjdir, d, "Makefile")
+ out_backend = mozpath.join(env.topobjdir, d, "backend.mk")
+
+ self.assertTrue(os.path.exists(out_makefile))
+ self.assertTrue(os.path.exists(out_backend))
+
+ def test_makefile_conversion(self):
+ """Ensure Makefile.in is converted properly."""
+ env = self._consume("stub0", RecursiveMakeBackend)
+
+ p = mozpath.join(env.topobjdir, "Makefile")
+
+ lines = [
+ l.strip() for l in open(p, "rt").readlines()[1:] if not l.startswith("#")
+ ]
+ self.assertEqual(
+ lines,
+ [
+ "DEPTH := .",
+ "topobjdir := %s" % env.topobjdir,
+ "topsrcdir := %s" % env.topsrcdir,
+ "srcdir := %s" % env.topsrcdir,
+ "srcdir_rel := %s" % mozpath.relpath(env.topsrcdir, env.topobjdir),
+ "relativesrcdir := .",
+ "include $(DEPTH)/config/autoconf.mk",
+ "",
+ "FOO := foo",
+ "",
+ "include $(topsrcdir)/config/recurse.mk",
+ ],
+ )
+
+ def test_missing_makefile_in(self):
+ """Ensure missing Makefile.in results in Makefile creation."""
+ env = self._consume("stub0", RecursiveMakeBackend)
+
+ p = mozpath.join(env.topobjdir, "dir2", "Makefile")
+ self.assertTrue(os.path.exists(p))
+
+ lines = [l.strip() for l in open(p, "rt").readlines()]
+ self.assertEqual(len(lines), 10)
+
+ self.assertTrue(lines[0].startswith("# THIS FILE WAS AUTOMATICALLY"))
+
+ def test_backend_mk(self):
+ """Ensure backend.mk file is written out properly."""
+ env = self._consume("stub0", RecursiveMakeBackend)
+
+ p = mozpath.join(env.topobjdir, "backend.mk")
+
+ lines = [l.strip() for l in open(p, "rt").readlines()[2:]]
+ self.assertEqual(lines, ["DIRS := dir1 dir2"])
+
+ # Make env.substs writable to add ENABLE_TESTS
+ env.substs = dict(env.substs)
+ env.substs["ENABLE_TESTS"] = "1"
+ self._consume("stub0", RecursiveMakeBackend, env=env)
+ p = mozpath.join(env.topobjdir, "backend.mk")
+
+ lines = [l.strip() for l in open(p, "rt").readlines()[2:]]
+ self.assertEqual(lines, ["DIRS := dir1 dir2 dir3"])
+
+ def test_mtime_no_change(self):
+ """Ensure mtime is not updated if file content does not change."""
+
+ env = self._consume("stub0", RecursiveMakeBackend)
+
+ makefile_path = mozpath.join(env.topobjdir, "Makefile")
+ backend_path = mozpath.join(env.topobjdir, "backend.mk")
+ makefile_mtime = os.path.getmtime(makefile_path)
+ backend_mtime = os.path.getmtime(backend_path)
+
+ reader = BuildReader(env)
+ emitter = TreeMetadataEmitter(env)
+ backend = RecursiveMakeBackend(env)
+ backend.consume(emitter.emit(reader.read_topsrcdir()))
+
+ self.assertEqual(os.path.getmtime(makefile_path), makefile_mtime)
+ self.assertEqual(os.path.getmtime(backend_path), backend_mtime)
+
+ def test_substitute_config_files(self):
+ """Ensure substituted config files are produced."""
+ env = self._consume("substitute_config_files", RecursiveMakeBackend)
+
+ p = mozpath.join(env.topobjdir, "foo")
+ self.assertTrue(os.path.exists(p))
+ lines = [l.strip() for l in open(p, "rt").readlines()]
+ self.assertEqual(lines, ["TEST = foo"])
+
+ def test_install_substitute_config_files(self):
+ """Ensure we recurse into the dirs that install substituted config files."""
+ env = self._consume("install_substitute_config_files", RecursiveMakeBackend)
+
+ root_deps_path = mozpath.join(env.topobjdir, "root-deps.mk")
+ lines = [l.strip() for l in open(root_deps_path, "rt").readlines()]
+
+ # Make sure we actually recurse into the sub directory during export to
+ # install the subst file.
+ self.assertTrue(any(l == "recurse_export: sub/export" for l in lines))
+
+ def test_variable_passthru(self):
+ """Ensure variable passthru is written out correctly."""
+ env = self._consume("variable_passthru", RecursiveMakeBackend)
+
+ backend_path = mozpath.join(env.topobjdir, "backend.mk")
+ lines = [l.strip() for l in open(backend_path, "rt").readlines()[2:]]
+
+ expected = {
+ "RCFILE": ["RCFILE := $(srcdir)/foo.rc"],
+ "RCINCLUDE": ["RCINCLUDE := $(srcdir)/bar.rc"],
+ "WIN32_EXE_LDFLAGS": ["WIN32_EXE_LDFLAGS += -subsystem:console"],
+ }
+
+ for var, val in expected.items():
+ # print("test_variable_passthru[%s]" % (var))
+ found = [str for str in lines if str.startswith(var)]
+ self.assertEqual(found, val)
+
+ def test_sources(self):
+ """Ensure SOURCES, HOST_SOURCES and WASM_SOURCES are handled properly."""
+ env = self._consume("sources", RecursiveMakeBackend)
+
+ backend_path = mozpath.join(env.topobjdir, "backend.mk")
+ lines = [l.strip() for l in open(backend_path, "rt").readlines()[2:]]
+
+ expected = {
+ "ASFILES": ["ASFILES += $(srcdir)/bar.s", "ASFILES += $(srcdir)/foo.asm"],
+ "CMMSRCS": ["CMMSRCS += $(srcdir)/fuga.mm", "CMMSRCS += $(srcdir)/hoge.mm"],
+ "CSRCS": ["CSRCS += $(srcdir)/baz.c", "CSRCS += $(srcdir)/qux.c"],
+ "HOST_CPPSRCS": [
+ "HOST_CPPSRCS += $(srcdir)/bar.cpp",
+ "HOST_CPPSRCS += $(srcdir)/foo.cpp",
+ ],
+ "HOST_CSRCS": [
+ "HOST_CSRCS += $(srcdir)/baz.c",
+ "HOST_CSRCS += $(srcdir)/qux.c",
+ ],
+ "SSRCS": ["SSRCS += $(srcdir)/titi.S", "SSRCS += $(srcdir)/toto.S"],
+ "WASM_CSRCS": ["WASM_CSRCS += $(srcdir)/baz.c"],
+ "WASM_CPPSRCS": ["WASM_CPPSRCS += $(srcdir)/bar.cpp"],
+ }
+
+ for var, val in expected.items():
+ found = [str for str in lines if str.startswith(var)]
+ self.assertEqual(found, val)
+
+ def test_exports(self):
+ """Ensure EXPORTS is handled properly."""
+ env = self._consume("exports", RecursiveMakeBackend)
+
+ # EXPORTS files should appear in the dist_include install manifest.
+ m = InstallManifest(
+ path=mozpath.join(
+ env.topobjdir, "_build_manifests", "install", "dist_include"
+ )
+ )
+ self.assertEqual(len(m), 7)
+ self.assertIn("foo.h", m)
+ self.assertIn("mozilla/mozilla1.h", m)
+ self.assertIn("mozilla/dom/dom2.h", m)
+
+ def test_generated_files(self):
+ """Ensure GENERATED_FILES is handled properly."""
+ env = self._consume("generated-files", RecursiveMakeBackend)
+
+ backend_path = mozpath.join(env.topobjdir, "backend.mk")
+ lines = [l.strip() for l in open(backend_path, "rt").readlines()[2:]]
+
+ expected = [
+ "include $(topsrcdir)/config/AB_rCD.mk",
+ "PRE_COMPILE_TARGETS += $(MDDEPDIR)/bar.c.stub",
+ "bar.c: $(MDDEPDIR)/bar.c.stub ;",
+ "EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/bar.c.pp",
+ "$(MDDEPDIR)/bar.c.stub: %s/generate-bar.py" % env.topsrcdir,
+ "$(REPORT_BUILD)",
+ "$(call py_action,file_generate,%s/generate-bar.py baz bar.c $(MDDEPDIR)/bar.c.pp $(MDDEPDIR)/bar.c.stub)" # noqa
+ % env.topsrcdir,
+ "@$(TOUCH) $@",
+ "",
+ "EXPORT_TARGETS += $(MDDEPDIR)/foo.h.stub",
+ "foo.h: $(MDDEPDIR)/foo.h.stub ;",
+ "EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/foo.h.pp",
+ "$(MDDEPDIR)/foo.h.stub: %s/generate-foo.py $(srcdir)/foo-data"
+ % (env.topsrcdir),
+ "$(REPORT_BUILD)",
+ "$(call py_action,file_generate,%s/generate-foo.py main foo.h $(MDDEPDIR)/foo.h.pp $(MDDEPDIR)/foo.h.stub $(srcdir)/foo-data)" # noqa
+ % (env.topsrcdir),
+ "@$(TOUCH) $@",
+ "",
+ ]
+
+ self.maxDiff = None
+ self.assertEqual(lines, expected)
+
+ def test_generated_files_force(self):
+ """Ensure GENERATED_FILES with .force is handled properly."""
+ env = self._consume("generated-files-force", RecursiveMakeBackend)
+
+ backend_path = mozpath.join(env.topobjdir, "backend.mk")
+ lines = [l.strip() for l in open(backend_path, "rt").readlines()[2:]]
+
+ expected = [
+ "include $(topsrcdir)/config/AB_rCD.mk",
+ "PRE_COMPILE_TARGETS += $(MDDEPDIR)/bar.c.stub",
+ "bar.c: $(MDDEPDIR)/bar.c.stub ;",
+ "EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/bar.c.pp",
+ "$(MDDEPDIR)/bar.c.stub: %s/generate-bar.py FORCE" % env.topsrcdir,
+ "$(REPORT_BUILD)",
+ "$(call py_action,file_generate,%s/generate-bar.py baz bar.c $(MDDEPDIR)/bar.c.pp $(MDDEPDIR)/bar.c.stub)" # noqa
+ % env.topsrcdir,
+ "@$(TOUCH) $@",
+ "",
+ "PRE_COMPILE_TARGETS += $(MDDEPDIR)/foo.c.stub",
+ "foo.c: $(MDDEPDIR)/foo.c.stub ;",
+ "EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/foo.c.pp",
+ "$(MDDEPDIR)/foo.c.stub: %s/generate-foo.py $(srcdir)/foo-data"
+ % (env.topsrcdir),
+ "$(REPORT_BUILD)",
+ "$(call py_action,file_generate,%s/generate-foo.py main foo.c $(MDDEPDIR)/foo.c.pp $(MDDEPDIR)/foo.c.stub $(srcdir)/foo-data)" # noqa
+ % (env.topsrcdir),
+ "@$(TOUCH) $@",
+ "",
+ ]
+
+ self.maxDiff = None
+ self.assertEqual(lines, expected)
+
+ def test_localized_generated_files(self):
+ """Ensure LOCALIZED_GENERATED_FILES is handled properly."""
+ env = self._consume("localized-generated-files", RecursiveMakeBackend)
+
+ backend_path = mozpath.join(env.topobjdir, "backend.mk")
+ lines = [l.strip() for l in open(backend_path, "rt").readlines()[2:]]
+
+ expected = [
+ "include $(topsrcdir)/config/AB_rCD.mk",
+ "MISC_TARGETS += $(MDDEPDIR)/foo.xyz.stub",
+ "foo.xyz: $(MDDEPDIR)/foo.xyz.stub ;",
+ "EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/foo.xyz.pp",
+ "$(MDDEPDIR)/foo.xyz.stub: %s/generate-foo.py $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input $(if $(IS_LANGUAGE_REPACK),FORCE)" # noqa
+ % env.topsrcdir,
+ "$(REPORT_BUILD)",
+ "$(call py_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main foo.xyz $(MDDEPDIR)/foo.xyz.pp $(MDDEPDIR)/foo.xyz.stub $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input)" # noqa
+ % env.topsrcdir,
+ "@$(TOUCH) $@",
+ "",
+ "LOCALIZED_FILES_0_FILES += foo.xyz",
+ "LOCALIZED_FILES_0_DEST = $(FINAL_TARGET)/",
+ "LOCALIZED_FILES_0_TARGET := misc",
+ "INSTALL_TARGETS += LOCALIZED_FILES_0",
+ ]
+
+ self.maxDiff = None
+ self.assertEqual(lines, expected)
+
+ def test_localized_generated_files_force(self):
+ """Ensure LOCALIZED_GENERATED_FILES with .force is handled properly."""
+ env = self._consume("localized-generated-files-force", RecursiveMakeBackend)
+
+ backend_path = mozpath.join(env.topobjdir, "backend.mk")
+ lines = [l.strip() for l in open(backend_path, "rt").readlines()[2:]]
+
+ expected = [
+ "include $(topsrcdir)/config/AB_rCD.mk",
+ "MISC_TARGETS += $(MDDEPDIR)/foo.xyz.stub",
+ "foo.xyz: $(MDDEPDIR)/foo.xyz.stub ;",
+ "EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/foo.xyz.pp",
+ "$(MDDEPDIR)/foo.xyz.stub: %s/generate-foo.py $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input $(if $(IS_LANGUAGE_REPACK),FORCE)" # noqa
+ % env.topsrcdir,
+ "$(REPORT_BUILD)",
+ "$(call py_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main foo.xyz $(MDDEPDIR)/foo.xyz.pp $(MDDEPDIR)/foo.xyz.stub $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input)" # noqa
+ % env.topsrcdir,
+ "@$(TOUCH) $@",
+ "",
+ "MISC_TARGETS += $(MDDEPDIR)/abc.xyz.stub",
+ "abc.xyz: $(MDDEPDIR)/abc.xyz.stub ;",
+ "EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/abc.xyz.pp",
+ "$(MDDEPDIR)/abc.xyz.stub: %s/generate-foo.py $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input FORCE" # noqa
+ % env.topsrcdir,
+ "$(REPORT_BUILD)",
+ "$(call py_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main abc.xyz $(MDDEPDIR)/abc.xyz.pp $(MDDEPDIR)/abc.xyz.stub $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input)" # noqa
+ % env.topsrcdir,
+ "@$(TOUCH) $@",
+ "",
+ ]
+
+ self.maxDiff = None
+ self.assertEqual(lines, expected)
+
+ def test_localized_generated_files_AB_CD(self):
+ """Ensure LOCALIZED_GENERATED_FILES is handled properly
+ when {AB_CD} and {AB_rCD} are used."""
+ env = self._consume("localized-generated-files-AB_CD", RecursiveMakeBackend)
+
+ backend_path = mozpath.join(env.topobjdir, "backend.mk")
+ lines = [l.strip() for l in open(backend_path, "rt").readlines()[2:]]
+
+ expected = [
+ "include $(topsrcdir)/config/AB_rCD.mk",
+ "MISC_TARGETS += $(MDDEPDIR)/foo$(AB_CD).xyz.stub",
+ "foo$(AB_CD).xyz: $(MDDEPDIR)/foo$(AB_CD).xyz.stub ;",
+ "EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/foo$(AB_CD).xyz.pp",
+ "$(MDDEPDIR)/foo$(AB_CD).xyz.stub: %s/generate-foo.py $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input $(if $(IS_LANGUAGE_REPACK),FORCE)" # noqa
+ % env.topsrcdir,
+ "$(REPORT_BUILD)",
+ "$(call py_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main foo$(AB_CD).xyz $(MDDEPDIR)/foo$(AB_CD).xyz.pp $(MDDEPDIR)/foo$(AB_CD).xyz.stub $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input)" # noqa
+ % env.topsrcdir,
+ "@$(TOUCH) $@",
+ "",
+ "bar$(AB_rCD).xyz: $(MDDEPDIR)/bar$(AB_rCD).xyz.stub ;",
+ "EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/bar$(AB_rCD).xyz.pp",
+ "$(MDDEPDIR)/bar$(AB_rCD).xyz.stub: %s/generate-foo.py $(call MERGE_RELATIVE_FILE,localized-input,inner/locales) $(srcdir)/non-localized-input $(if $(IS_LANGUAGE_REPACK),FORCE)" # noqa
+ % env.topsrcdir,
+ "$(REPORT_BUILD)",
+ "$(call py_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main bar$(AB_rCD).xyz $(MDDEPDIR)/bar$(AB_rCD).xyz.pp $(MDDEPDIR)/bar$(AB_rCD).xyz.stub $(call MERGE_RELATIVE_FILE,localized-input,inner/locales) $(srcdir)/non-localized-input)" # noqa
+ % env.topsrcdir,
+ "@$(TOUCH) $@",
+ "",
+ "zot$(AB_rCD).xyz: $(MDDEPDIR)/zot$(AB_rCD).xyz.stub ;",
+ "EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/zot$(AB_rCD).xyz.pp",
+ "$(MDDEPDIR)/zot$(AB_rCD).xyz.stub: %s/generate-foo.py $(call MERGE_RELATIVE_FILE,localized-input,locales) $(srcdir)/non-localized-input $(if $(IS_LANGUAGE_REPACK),FORCE)" # noqa
+ % env.topsrcdir,
+ "$(REPORT_BUILD)",
+ "$(call py_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main zot$(AB_rCD).xyz $(MDDEPDIR)/zot$(AB_rCD).xyz.pp $(MDDEPDIR)/zot$(AB_rCD).xyz.stub $(call MERGE_RELATIVE_FILE,localized-input,locales) $(srcdir)/non-localized-input)" # noqa
+ % env.topsrcdir,
+ "@$(TOUCH) $@",
+ "",
+ ]
+
+ self.maxDiff = None
+ self.assertEqual(lines, expected)
+
+ def test_exports_generated(self):
+ """Ensure EXPORTS that are listed in GENERATED_FILES
+ are handled properly."""
+ env = self._consume("exports-generated", RecursiveMakeBackend)
+
+ # EXPORTS files should appear in the dist_include install manifest.
+ m = InstallManifest(
+ path=mozpath.join(
+ env.topobjdir, "_build_manifests", "install", "dist_include"
+ )
+ )
+ self.assertEqual(len(m), 8)
+ self.assertIn("foo.h", m)
+ self.assertIn("mozilla/mozilla1.h", m)
+ self.assertIn("mozilla/dom/dom1.h", m)
+ self.assertIn("gfx/gfx.h", m)
+ self.assertIn("bar.h", m)
+ self.assertIn("mozilla/mozilla2.h", m)
+ self.assertIn("mozilla/dom/dom2.h", m)
+ self.assertIn("mozilla/dom/dom3.h", m)
+ # EXPORTS files that are also GENERATED_FILES should be handled as
+ # INSTALL_TARGETS.
+ backend_path = mozpath.join(env.topobjdir, "backend.mk")
+ lines = [l.strip() for l in open(backend_path, "rt").readlines()[2:]]
+ expected = [
+ "include $(topsrcdir)/config/AB_rCD.mk",
+ "dist_include_FILES += bar.h",
+ "dist_include_DEST := $(DEPTH)/dist/include/",
+ "dist_include_TARGET := export",
+ "INSTALL_TARGETS += dist_include",
+ "dist_include_mozilla_FILES += mozilla2.h",
+ "dist_include_mozilla_DEST := $(DEPTH)/dist/include/mozilla",
+ "dist_include_mozilla_TARGET := export",
+ "INSTALL_TARGETS += dist_include_mozilla",
+ "dist_include_mozilla_dom_FILES += dom2.h",
+ "dist_include_mozilla_dom_FILES += dom3.h",
+ "dist_include_mozilla_dom_DEST := $(DEPTH)/dist/include/mozilla/dom",
+ "dist_include_mozilla_dom_TARGET := export",
+ "INSTALL_TARGETS += dist_include_mozilla_dom",
+ ]
+ self.maxDiff = None
+ self.assertEqual(lines, expected)
+
+ def test_resources(self):
+ """Ensure RESOURCE_FILES is handled properly."""
+ env = self._consume("resources", RecursiveMakeBackend)
+
+ # RESOURCE_FILES should appear in the dist_bin install manifest.
+ m = InstallManifest(
+ path=os.path.join(env.topobjdir, "_build_manifests", "install", "dist_bin")
+ )
+ self.assertEqual(len(m), 10)
+ self.assertIn("res/foo.res", m)
+ self.assertIn("res/fonts/font1.ttf", m)
+ self.assertIn("res/fonts/desktop/desktop2.ttf", m)
+
+ self.assertIn("res/bar.res.in", m)
+ self.assertIn("res/tests/test.manifest", m)
+ self.assertIn("res/tests/extra.manifest", m)
+
+ def test_test_manifests_files_written(self):
+ """Ensure test manifests get turned into files."""
+ env = self._consume("test-manifests-written", RecursiveMakeBackend)
+
+ tests_dir = mozpath.join(env.topobjdir, "_tests")
+ m_master = mozpath.join(
+ tests_dir, "testing", "mochitest", "tests", "mochitest.ini"
+ )
+ x_master = mozpath.join(tests_dir, "xpcshell", "xpcshell.ini")
+ self.assertTrue(os.path.exists(m_master))
+ self.assertTrue(os.path.exists(x_master))
+
+ lines = [l.strip() for l in open(x_master, "rt").readlines()]
+ self.assertEqual(
+ lines,
+ [
+ "# THIS FILE WAS AUTOMATICALLY GENERATED. DO NOT MODIFY BY HAND.",
+ "",
+ "[include:dir1/xpcshell.ini]",
+ "[include:xpcshell.ini]",
+ ],
+ )
+
+ def test_test_manifest_pattern_matches_recorded(self):
+ """Pattern matches in test manifests' support-files should be recorded."""
+ env = self._consume("test-manifests-written", RecursiveMakeBackend)
+ m = InstallManifest(
+ path=mozpath.join(
+ env.topobjdir, "_build_manifests", "install", "_test_files"
+ )
+ )
+
+ # This is not the most robust test in the world, but it gets the job
+ # done.
+ entries = [e for e in m._dests.keys() if "**" in e]
+ self.assertEqual(len(entries), 1)
+ self.assertIn("support/**", entries[0])
+
+ def test_test_manifest_deffered_installs_written(self):
+ """Shared support files are written to their own data file by the backend."""
+ env = self._consume("test-manifest-shared-support", RecursiveMakeBackend)
+
+ # First, read the generated for ini manifest contents.
+ test_files_manifest = mozpath.join(
+ env.topobjdir, "_build_manifests", "install", "_test_files"
+ )
+ m = InstallManifest(path=test_files_manifest)
+
+ # Then, synthesize one from the test-installs.pkl file. This should
+ # allow us to re-create a subset of the above.
+ env = self._consume("test-manifest-shared-support", TestManifestBackend)
+ test_installs_path = mozpath.join(env.topobjdir, "test-installs.pkl")
+
+ with open(test_installs_path, "rb") as fh:
+ test_installs = pickle.load(fh)
+
+ self.assertEqual(
+ set(test_installs.keys()),
+ set(["child/test_sub.js", "child/data/**", "child/another-file.sjs"]),
+ )
+ for key in test_installs.keys():
+ self.assertIn(key, test_installs)
+
+ synthesized_manifest = InstallManifest()
+ for item, installs in test_installs.items():
+ for install_info in installs:
+ if len(install_info) == 3:
+ synthesized_manifest.add_pattern_link(*install_info)
+ if len(install_info) == 2:
+ synthesized_manifest.add_link(*install_info)
+
+ self.assertEqual(len(synthesized_manifest), 3)
+ for item, info in synthesized_manifest._dests.items():
+ self.assertIn(item, m)
+ self.assertEqual(info, m._dests[item])
+
+ def test_xpidl_generation(self):
+ """Ensure xpidl files and directories are written out."""
+ env = self._consume("xpidl", RecursiveMakeBackend)
+
+ # Install manifests should contain entries.
+ install_dir = mozpath.join(env.topobjdir, "_build_manifests", "install")
+ self.assertTrue(os.path.isfile(mozpath.join(install_dir, "xpidl")))
+
+ m = InstallManifest(path=mozpath.join(install_dir, "xpidl"))
+ self.assertIn(".deps/my_module.pp", m)
+
+ m = InstallManifest(path=mozpath.join(install_dir, "xpidl"))
+ self.assertIn("my_module.xpt", m)
+
+ m = InstallManifest(path=mozpath.join(install_dir, "dist_include"))
+ self.assertIn("foo.h", m)
+
+ p = mozpath.join(env.topobjdir, "config/makefiles/xpidl")
+ self.assertTrue(os.path.isdir(p))
+
+ self.assertTrue(os.path.isfile(mozpath.join(p, "Makefile")))
+
+ def test_test_support_files_tracked(self):
+ env = self._consume("test-support-binaries-tracked", RecursiveMakeBackend)
+ m = InstallManifest(
+ path=mozpath.join(env.topobjdir, "_build_manifests", "install", "_tests")
+ )
+ self.assertEqual(len(m), 4)
+ self.assertIn("xpcshell/tests/mozbuildtest/test-library.dll", m)
+ self.assertIn("xpcshell/tests/mozbuildtest/test-one.exe", m)
+ self.assertIn("xpcshell/tests/mozbuildtest/test-two.exe", m)
+ self.assertIn("xpcshell/tests/mozbuildtest/host-test-library.dll", m)
+
+ def test_old_install_manifest_deleted(self):
+ # Simulate an install manifest from a previous backend version. Ensure
+ # it is deleted.
+ env = self._get_environment("stub0")
+ purge_dir = mozpath.join(env.topobjdir, "_build_manifests", "install")
+ manifest_path = mozpath.join(purge_dir, "old_manifest")
+ os.makedirs(purge_dir)
+ m = InstallManifest()
+ m.write(path=manifest_path)
+ with open(
+ mozpath.join(env.topobjdir, "backend.RecursiveMakeBackend"), "w"
+ ) as f:
+ f.write("%s\n" % manifest_path)
+
+ self.assertTrue(os.path.exists(manifest_path))
+ self._consume("stub0", RecursiveMakeBackend, env)
+ self.assertFalse(os.path.exists(manifest_path))
+
+ def test_install_manifests_written(self):
+ env, objs = self._emit("stub0")
+ backend = RecursiveMakeBackend(env)
+
+ m = InstallManifest()
+ backend._install_manifests["testing"] = m
+ m.add_link(__file__, "self")
+ backend.consume(objs)
+
+ man_dir = mozpath.join(env.topobjdir, "_build_manifests", "install")
+ self.assertTrue(os.path.isdir(man_dir))
+
+ expected = ["testing"]
+ for e in expected:
+ full = mozpath.join(man_dir, e)
+ self.assertTrue(os.path.exists(full))
+
+ m2 = InstallManifest(path=full)
+ self.assertEqual(m, m2)
+
+ def test_ipdl_sources(self):
+ """Test that PREPROCESSED_IPDL_SOURCES and IPDL_SOURCES are written to
+ ipdlsrcs.mk correctly."""
+ env = self._get_environment("ipdl_sources")
+
+ # Use the ipdl directory as the IPDL root for testing.
+ ipdl_root = mozpath.join(env.topobjdir, "ipdl")
+
+ # Make substs writable so we can set the value of IPDL_ROOT to reflect
+ # the correct objdir.
+ env.substs = dict(env.substs)
+ env.substs["IPDL_ROOT"] = ipdl_root
+
+ self._consume("ipdl_sources", RecursiveMakeBackend, env)
+
+ manifest_path = mozpath.join(ipdl_root, "ipdlsrcs.mk")
+ lines = [l.strip() for l in open(manifest_path, "rt").readlines()]
+
+ # Handle Windows paths correctly
+ topsrcdir = mozpath.normsep(env.topsrcdir)
+
+ expected = [
+ "ALL_IPDLSRCS := bar1.ipdl foo1.ipdl %s/bar/bar.ipdl %s/bar/bar2.ipdlh %s/foo/foo.ipdl %s/foo/foo2.ipdlh" # noqa
+ % tuple([topsrcdir] * 4),
+ "IPDLDIRS := %s %s/bar %s/foo" % (ipdl_root, topsrcdir, topsrcdir),
+ ]
+
+ found = [str for str in lines if str.startswith(("ALL_IPDLSRCS", "IPDLDIRS"))]
+ self.assertEqual(found, expected)
+
+ # Check that each directory declares the generated relevant .cpp files
+ # to be built in CPPSRCS.
+ # ENABLE_UNIFIED_BUILD defaults to False without mozilla-central's
+ # moz.configure so we don't see unified sources here.
+ for dir, expected in (
+ (".", []),
+ ("ipdl", []),
+ (
+ "bar",
+ [
+ "CPPSRCS += "
+ + " ".join(
+ f"{ipdl_root}/{f}"
+ for f in [
+ "bar.cpp",
+ "bar1.cpp",
+ "bar1Child.cpp",
+ "bar1Parent.cpp",
+ "bar2.cpp",
+ "barChild.cpp",
+ "barParent.cpp",
+ ]
+ )
+ ],
+ ),
+ (
+ "foo",
+ [
+ "CPPSRCS += "
+ + " ".join(
+ f"{ipdl_root}/{f}"
+ for f in [
+ "foo.cpp",
+ "foo1.cpp",
+ "foo1Child.cpp",
+ "foo1Parent.cpp",
+ "foo2.cpp",
+ "fooChild.cpp",
+ "fooParent.cpp",
+ ]
+ )
+ ],
+ ),
+ ):
+ backend_path = mozpath.join(env.topobjdir, dir, "backend.mk")
+ lines = [l.strip() for l in open(backend_path, "rt").readlines()]
+
+ found = [str for str in lines if str.startswith("CPPSRCS")]
+ self.assertEqual(found, expected)
+
+ def test_defines(self):
+ """Test that DEFINES are written to backend.mk correctly."""
+ env = self._consume("defines", RecursiveMakeBackend)
+
+ backend_path = mozpath.join(env.topobjdir, "backend.mk")
+ lines = [l.strip() for l in open(backend_path, "rt").readlines()[2:]]
+
+ var = "DEFINES"
+ defines = [val for val in lines if val.startswith(var)]
+
+ expected = ["DEFINES += -DFOO '-DBAZ=\"ab'\\''cd\"' -UQUX -DBAR=7 -DVALUE=xyz"]
+ self.assertEqual(defines, expected)
+
+ def test_local_includes(self):
+ """Test that LOCAL_INCLUDES are written to backend.mk correctly."""
+ env = self._consume("local_includes", RecursiveMakeBackend)
+
+ backend_path = mozpath.join(env.topobjdir, "backend.mk")
+ lines = [l.strip() for l in open(backend_path, "rt").readlines()[2:]]
+
+ expected = [
+ "LOCAL_INCLUDES += -I$(srcdir)/bar/baz",
+ "LOCAL_INCLUDES += -I$(srcdir)/foo",
+ ]
+
+ found = [str for str in lines if str.startswith("LOCAL_INCLUDES")]
+ self.assertEqual(found, expected)
+
+ def test_generated_includes(self):
+ """Test that GENERATED_INCLUDES are written to backend.mk correctly."""
+ env = self._consume("generated_includes", RecursiveMakeBackend)
+
+ backend_path = mozpath.join(env.topobjdir, "backend.mk")
+ lines = [l.strip() for l in open(backend_path, "rt").readlines()[2:]]
+
+ expected = [
+ "LOCAL_INCLUDES += -I$(CURDIR)/bar/baz",
+ "LOCAL_INCLUDES += -I$(CURDIR)/foo",
+ ]
+
+ found = [str for str in lines if str.startswith("LOCAL_INCLUDES")]
+ self.assertEqual(found, expected)
+
+ def test_rust_library(self):
+ """Test that a Rust library is written to backend.mk correctly."""
+ env = self._consume("rust-library", RecursiveMakeBackend)
+
+ backend_path = mozpath.join(env.topobjdir, "backend.mk")
+ lines = [
+ l.strip()
+ for l in open(backend_path, "rt").readlines()[2:]
+ # Strip out computed flags, they're a PITA to test.
+ if not l.startswith("COMPUTED_")
+ ]
+
+ expected = [
+ "RUST_LIBRARY_FILE := %s/x86_64-unknown-linux-gnu/release/libtest_library.a"
+ % env.topobjdir, # noqa
+ "CARGO_FILE := $(srcdir)/Cargo.toml",
+ "CARGO_TARGET_DIR := %s" % env.topobjdir,
+ ]
+
+ self.assertEqual(lines, expected)
+
+ def test_host_rust_library(self):
+ """Test that a Rust library is written to backend.mk correctly."""
+ env = self._consume("host-rust-library", RecursiveMakeBackend)
+
+ backend_path = mozpath.join(env.topobjdir, "backend.mk")
+ lines = [
+ l.strip()
+ for l in open(backend_path, "rt").readlines()[2:]
+ # Strip out computed flags, they're a PITA to test.
+ if not l.startswith("COMPUTED_")
+ ]
+
+ expected = [
+ "HOST_RUST_LIBRARY_FILE := %s/x86_64-unknown-linux-gnu/release/libhostrusttool.a"
+ % env.topobjdir, # noqa
+ "CARGO_FILE := $(srcdir)/Cargo.toml",
+ "CARGO_TARGET_DIR := %s" % env.topobjdir,
+ ]
+
+ self.assertEqual(lines, expected)
+
+ def test_host_rust_library_with_features(self):
+ """Test that a host Rust library with features is written to backend.mk correctly."""
+ env = self._consume("host-rust-library-features", RecursiveMakeBackend)
+
+ backend_path = mozpath.join(env.topobjdir, "backend.mk")
+ lines = [
+ l.strip()
+ for l in open(backend_path, "rt").readlines()[2:]
+ # Strip out computed flags, they're a PITA to test.
+ if not l.startswith("COMPUTED_")
+ ]
+
+ expected = [
+ "HOST_RUST_LIBRARY_FILE := %s/x86_64-unknown-linux-gnu/release/libhostrusttool.a"
+ % env.topobjdir, # noqa
+ "CARGO_FILE := $(srcdir)/Cargo.toml",
+ "CARGO_TARGET_DIR := %s" % env.topobjdir,
+ "HOST_RUST_LIBRARY_FEATURES := musthave cantlivewithout",
+ ]
+
+ self.assertEqual(lines, expected)
+
+ def test_rust_library_with_features(self):
+ """Test that a Rust library with features is written to backend.mk correctly."""
+ env = self._consume("rust-library-features", RecursiveMakeBackend)
+
+ backend_path = mozpath.join(env.topobjdir, "backend.mk")
+ lines = [
+ l.strip()
+ for l in open(backend_path, "rt").readlines()[2:]
+ # Strip out computed flags, they're a PITA to test.
+ if not l.startswith("COMPUTED_")
+ ]
+
+ expected = [
+ "RUST_LIBRARY_FILE := %s/x86_64-unknown-linux-gnu/release/libfeature_library.a"
+ % env.topobjdir, # noqa
+ "CARGO_FILE := $(srcdir)/Cargo.toml",
+ "CARGO_TARGET_DIR := %s" % env.topobjdir,
+ "RUST_LIBRARY_FEATURES := musthave cantlivewithout",
+ ]
+
+ self.assertEqual(lines, expected)
+
+ def test_rust_programs(self):
+ """Test that `{HOST_,}RUST_PROGRAMS` are written to backend.mk correctly."""
+ env = self._consume("rust-programs", RecursiveMakeBackend)
+
+ backend_path = mozpath.join(env.topobjdir, "code/backend.mk")
+ lines = [
+ l.strip()
+ for l in open(backend_path, "rt").readlines()[2:]
+ # Strip out computed flags, they're a PITA to test.
+ if not l.startswith("COMPUTED_")
+ ]
+
+ expected = [
+ "CARGO_FILE := %s/code/Cargo.toml" % env.topsrcdir,
+ "CARGO_TARGET_DIR := %s" % env.topobjdir,
+ "RUST_PROGRAMS += $(DEPTH)/i686-pc-windows-msvc/release/target.exe",
+ "RUST_CARGO_PROGRAMS += target",
+ "HOST_RUST_PROGRAMS += $(DEPTH)/i686-pc-windows-msvc/release/host.exe",
+ "HOST_RUST_CARGO_PROGRAMS += host",
+ ]
+
+ self.assertEqual(lines, expected)
+
+ root_deps_path = mozpath.join(env.topobjdir, "root-deps.mk")
+ lines = [l.strip() for l in open(root_deps_path, "rt").readlines()]
+
+ self.assertTrue(
+ any(l == "recurse_compile: code/host code/target" for l in lines)
+ )
+
+ def test_final_target(self):
+ """Test that FINAL_TARGET is written to backend.mk correctly."""
+ env = self._consume("final_target", RecursiveMakeBackend)
+
+ final_target_rule = "FINAL_TARGET = $(if $(XPI_NAME),$(DIST)/xpi-stage/$(XPI_NAME),$(DIST)/bin)$(DIST_SUBDIR:%=/%)" # noqa
+ expected = dict()
+ expected[env.topobjdir] = []
+ expected[mozpath.join(env.topobjdir, "both")] = [
+ "XPI_NAME = mycrazyxpi",
+ "DIST_SUBDIR = asubdir",
+ final_target_rule,
+ ]
+ expected[mozpath.join(env.topobjdir, "dist-subdir")] = [
+ "DIST_SUBDIR = asubdir",
+ final_target_rule,
+ ]
+ expected[mozpath.join(env.topobjdir, "xpi-name")] = [
+ "XPI_NAME = mycrazyxpi",
+ final_target_rule,
+ ]
+ expected[mozpath.join(env.topobjdir, "final-target")] = [
+ "FINAL_TARGET = $(DEPTH)/random-final-target"
+ ]
+ for key, expected_rules in six.iteritems(expected):
+ backend_path = mozpath.join(key, "backend.mk")
+ lines = [l.strip() for l in open(backend_path, "rt").readlines()[2:]]
+ found = [
+ str
+ for str in lines
+ if str.startswith("FINAL_TARGET")
+ or str.startswith("XPI_NAME")
+ or str.startswith("DIST_SUBDIR")
+ ]
+ self.assertEqual(found, expected_rules)
+
+ def test_final_target_pp_files(self):
+ """Test that FINAL_TARGET_PP_FILES is written to backend.mk correctly."""
+ env = self._consume("dist-files", RecursiveMakeBackend)
+
+ backend_path = mozpath.join(env.topobjdir, "backend.mk")
+ lines = [l.strip() for l in open(backend_path, "rt").readlines()[2:]]
+
+ expected = [
+ "DIST_FILES_0 += $(srcdir)/install.rdf",
+ "DIST_FILES_0 += $(srcdir)/main.js",
+ "DIST_FILES_0_PATH := $(DEPTH)/dist/bin/",
+ "DIST_FILES_0_TARGET := misc",
+ "PP_TARGETS += DIST_FILES_0",
+ ]
+
+ found = [str for str in lines if "DIST_FILES" in str]
+ self.assertEqual(found, expected)
+
+ def test_localized_files(self):
+ """Test that LOCALIZED_FILES is written to backend.mk correctly."""
+ env = self._consume("localized-files", RecursiveMakeBackend)
+
+ backend_path = mozpath.join(env.topobjdir, "backend.mk")
+ lines = [l.strip() for l in open(backend_path, "rt").readlines()[2:]]
+
+ expected = [
+ "LOCALIZED_FILES_0_FILES += $(wildcard $(LOCALE_SRCDIR)/abc/*.abc)",
+ "LOCALIZED_FILES_0_FILES += $(call MERGE_FILE,bar.ini)",
+ "LOCALIZED_FILES_0_FILES += $(call MERGE_FILE,foo.js)",
+ "LOCALIZED_FILES_0_DEST = $(FINAL_TARGET)/",
+ "LOCALIZED_FILES_0_TARGET := misc",
+ "INSTALL_TARGETS += LOCALIZED_FILES_0",
+ ]
+
+ found = [str for str in lines if "LOCALIZED_FILES" in str]
+ self.assertEqual(found, expected)
+
+ def test_localized_pp_files(self):
+ """Test that LOCALIZED_PP_FILES is written to backend.mk correctly."""
+ env = self._consume("localized-pp-files", RecursiveMakeBackend)
+
+ backend_path = mozpath.join(env.topobjdir, "backend.mk")
+ lines = [l.strip() for l in open(backend_path, "rt").readlines()[2:]]
+
+ expected = [
+ "LOCALIZED_PP_FILES_0 += $(call MERGE_FILE,bar.ini)",
+ "LOCALIZED_PP_FILES_0 += $(call MERGE_FILE,foo.js)",
+ "LOCALIZED_PP_FILES_0_PATH = $(FINAL_TARGET)/",
+ "LOCALIZED_PP_FILES_0_TARGET := misc",
+ "LOCALIZED_PP_FILES_0_FLAGS := --silence-missing-directive-warnings",
+ "PP_TARGETS += LOCALIZED_PP_FILES_0",
+ ]
+
+ found = [str for str in lines if "LOCALIZED_PP_FILES" in str]
+ self.assertEqual(found, expected)
+
+ def test_config(self):
+ """Test that CONFIGURE_SUBST_FILES are properly handled."""
+ env = self._consume("test_config", RecursiveMakeBackend)
+
+ self.assertEqual(
+ open(os.path.join(env.topobjdir, "file"), "r").readlines(),
+ ["#ifdef foo\n", "bar baz\n", "@bar@\n"],
+ )
+
+ def test_prog_lib_c_only(self):
+ """Test that C-only binary artifacts are marked as such."""
+ env = self._consume("prog-lib-c-only", RecursiveMakeBackend)
+
+ # PROGRAM C-onlyness.
+ with open(os.path.join(env.topobjdir, "c-program", "backend.mk"), "r") as fh:
+ lines = fh.readlines()
+ lines = [line.rstrip() for line in lines]
+
+ self.assertIn("PROG_IS_C_ONLY_c_test_program := 1", lines)
+
+ with open(os.path.join(env.topobjdir, "cxx-program", "backend.mk"), "r") as fh:
+ lines = fh.readlines()
+ lines = [line.rstrip() for line in lines]
+
+ # Test for only the absence of the variable, not the precise
+ # form of the variable assignment.
+ for line in lines:
+ self.assertNotIn("PROG_IS_C_ONLY_cxx_test_program", line)
+
+ # SIMPLE_PROGRAMS C-onlyness.
+ with open(
+ os.path.join(env.topobjdir, "c-simple-programs", "backend.mk"), "r"
+ ) as fh:
+ lines = fh.readlines()
+ lines = [line.rstrip() for line in lines]
+
+ self.assertIn("PROG_IS_C_ONLY_c_simple_program := 1", lines)
+
+ with open(
+ os.path.join(env.topobjdir, "cxx-simple-programs", "backend.mk"), "r"
+ ) as fh:
+ lines = fh.readlines()
+ lines = [line.rstrip() for line in lines]
+
+ for line in lines:
+ self.assertNotIn("PROG_IS_C_ONLY_cxx_simple_program", line)
+
+ # Libraries C-onlyness.
+ with open(os.path.join(env.topobjdir, "c-library", "backend.mk"), "r") as fh:
+ lines = fh.readlines()
+ lines = [line.rstrip() for line in lines]
+
+ self.assertIn("LIB_IS_C_ONLY := 1", lines)
+
+ with open(os.path.join(env.topobjdir, "cxx-library", "backend.mk"), "r") as fh:
+ lines = fh.readlines()
+ lines = [line.rstrip() for line in lines]
+
+ for line in lines:
+ self.assertNotIn("LIB_IS_C_ONLY", line)
+
+ def test_linkage(self):
+ env = self._consume("linkage", RecursiveMakeBackend)
+ expected_linkage = {
+ "prog": {
+ "SHARED_LIBS": ["qux/qux.so", "../shared/baz.so"],
+ "STATIC_LIBS": ["../real/foo.a"],
+ "OS_LIBS": ["-lfoo", "-lbaz", "-lbar"],
+ },
+ "shared": {
+ "OS_LIBS": ["-lfoo"],
+ "SHARED_LIBS": ["../prog/qux/qux.so"],
+ "STATIC_LIBS": [],
+ },
+ "static": {
+ "STATIC_LIBS": ["../real/foo.a"],
+ "OS_LIBS": ["-lbar"],
+ "SHARED_LIBS": ["../prog/qux/qux.so"],
+ },
+ "real": {
+ "STATIC_LIBS": [],
+ "SHARED_LIBS": ["../prog/qux/qux.so"],
+ "OS_LIBS": ["-lbaz"],
+ },
+ }
+ actual_linkage = {}
+ for name in expected_linkage.keys():
+ with open(os.path.join(env.topobjdir, name, "backend.mk"), "r") as fh:
+ actual_linkage[name] = [line.rstrip() for line in fh.readlines()]
+ for name in expected_linkage:
+ for var in expected_linkage[name]:
+ for val in expected_linkage[name][var]:
+ val = os.path.normpath(val)
+ line = "%s += %s" % (var, val)
+ self.assertIn(line, actual_linkage[name])
+ actual_linkage[name].remove(line)
+ for line in actual_linkage[name]:
+ self.assertNotIn("%s +=" % var, line)
+
+ def test_list_files(self):
+ env = self._consume("linkage", RecursiveMakeBackend)
+ expected_list_files = {
+ "prog/MyProgram_exe.list": [
+ "../static/bar/bar1.o",
+ "../static/bar/bar2.o",
+ "../static/bar/bar_helper/bar_helper1.o",
+ ],
+ "shared/baz_so.list": ["baz/baz1.o"],
+ }
+ actual_list_files = {}
+ for name in expected_list_files.keys():
+ with open(os.path.join(env.topobjdir, name), "r") as fh:
+ actual_list_files[name] = [line.rstrip() for line in fh.readlines()]
+ for name in expected_list_files:
+ self.assertEqual(
+ actual_list_files[name],
+ [os.path.normpath(f) for f in expected_list_files[name]],
+ )
+
+ # We don't produce a list file for a shared library composed only of
+ # object files in its directory, but instead list them in a variable.
+ with open(os.path.join(env.topobjdir, "prog", "qux", "backend.mk"), "r") as fh:
+ lines = [line.rstrip() for line in fh.readlines()]
+
+ self.assertIn("qux.so_OBJS := qux1.o", lines)
+
+ def test_jar_manifests(self):
+ env = self._consume("jar-manifests", RecursiveMakeBackend)
+
+ with open(os.path.join(env.topobjdir, "backend.mk"), "r") as fh:
+ lines = fh.readlines()
+
+ lines = [line.rstrip() for line in lines]
+
+ self.assertIn("JAR_MANIFEST := %s/jar.mn" % env.topsrcdir, lines)
+
+ def test_test_manifests_duplicate_support_files(self):
+ """Ensure duplicate support-files in test manifests work."""
+ env = self._consume(
+ "test-manifests-duplicate-support-files", RecursiveMakeBackend
+ )
+
+ p = os.path.join(env.topobjdir, "_build_manifests", "install", "_test_files")
+ m = InstallManifest(p)
+ self.assertIn("testing/mochitest/tests/support-file.txt", m)
+
+ def test_install_manifests_package_tests(self):
+ """Ensure test suites honor package_tests=False."""
+ env = self._consume("test-manifests-package-tests", RecursiveMakeBackend)
+
+ man_dir = mozpath.join(env.topobjdir, "_build_manifests", "install")
+ self.assertTrue(os.path.isdir(man_dir))
+
+ full = mozpath.join(man_dir, "_test_files")
+ self.assertTrue(os.path.exists(full))
+
+ m = InstallManifest(path=full)
+
+ # Only mochitest.js should be in the install manifest.
+ self.assertTrue("testing/mochitest/tests/mochitest.js" in m)
+
+ # The path is odd here because we do not normalize at test manifest
+ # processing time. This is a fragile test because there's currently no
+ # way to iterate the manifest.
+ self.assertFalse("instrumentation/./not_packaged.java" in m)
+
+ def test_program_paths(self):
+ """PROGRAMs with various moz.build settings that change the destination should produce
+ the expected paths in backend.mk."""
+ env = self._consume("program-paths", RecursiveMakeBackend)
+
+ expected = [
+ ("dist-bin", "$(DEPTH)/dist/bin/dist-bin.prog"),
+ ("dist-subdir", "$(DEPTH)/dist/bin/foo/dist-subdir.prog"),
+ ("final-target", "$(DEPTH)/final/target/final-target.prog"),
+ ("not-installed", "not-installed.prog"),
+ ]
+ prefix = "PROGRAM = "
+ for (subdir, expected_program) in expected:
+ with io.open(os.path.join(env.topobjdir, subdir, "backend.mk"), "r") as fh:
+ lines = fh.readlines()
+ program = [
+ line.rstrip().split(prefix, 1)[1]
+ for line in lines
+ if line.startswith(prefix)
+ ][0]
+ self.assertEqual(program, expected_program)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/test/backend/test_test_manifest.py b/python/mozbuild/mozbuild/test/backend/test_test_manifest.py
new file mode 100644
index 0000000000..fadf65e447
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/test_test_manifest.py
@@ -0,0 +1,94 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+
+import mozpack.path as mozpath
+import six.moves.cPickle as pickle
+from mozunit import main
+
+from mozbuild.backend.test_manifest import TestManifestBackend
+from mozbuild.test.backend.common import BackendTester
+
+
+class TestTestManifestBackend(BackendTester):
+ def test_all_tests_metadata_file_written(self):
+ """Ensure all-tests.pkl is generated."""
+ env = self._consume("test-manifests-written", TestManifestBackend)
+
+ all_tests_path = mozpath.join(env.topobjdir, "all-tests.pkl")
+ self.assertTrue(os.path.exists(all_tests_path))
+
+ with open(all_tests_path, "rb") as fh:
+ o = pickle.load(fh)
+
+ self.assertIn("xpcshell.js", o)
+ self.assertIn("dir1/test_bar.js", o)
+
+ self.assertEqual(len(o["xpcshell.js"]), 1)
+
+ def test_test_installs_metadata_file_written(self):
+ """Ensure test-installs.pkl is generated."""
+ env = self._consume("test-manifest-shared-support", TestManifestBackend)
+ all_tests_path = mozpath.join(env.topobjdir, "all-tests.pkl")
+ self.assertTrue(os.path.exists(all_tests_path))
+ test_installs_path = mozpath.join(env.topobjdir, "test-installs.pkl")
+
+ with open(test_installs_path, "rb") as fh:
+ test_installs = pickle.load(fh)
+
+ self.assertEqual(
+ set(test_installs.keys()),
+ set(["child/test_sub.js", "child/data/**", "child/another-file.sjs"]),
+ )
+
+ for key in test_installs.keys():
+ self.assertIn(key, test_installs)
+
+ def test_test_defaults_metadata_file_written(self):
+ """Ensure test-defaults.pkl is generated."""
+ env = self._consume("test-manifests-written", TestManifestBackend)
+
+ test_defaults_path = mozpath.join(env.topobjdir, "test-defaults.pkl")
+ self.assertTrue(os.path.exists(test_defaults_path))
+
+ with open(test_defaults_path, "rb") as fh:
+ o = {mozpath.normpath(k): v for k, v in pickle.load(fh).items()}
+
+ self.assertEqual(
+ set(mozpath.relpath(k, env.topsrcdir) for k in o.keys()),
+ set(["dir1/xpcshell.ini", "xpcshell.ini", "mochitest.ini"]),
+ )
+
+ manifest_path = mozpath.join(env.topsrcdir, "xpcshell.ini")
+ self.assertIn("here", o[manifest_path])
+ self.assertIn("support-files", o[manifest_path])
+
+ def test_test_manifest_sources(self):
+ """Ensure that backend sources are generated correctly."""
+ env = self._consume("test-manifests-backend-sources", TestManifestBackend)
+
+ backend_path = mozpath.join(env.topobjdir, "backend.TestManifestBackend.in")
+ self.assertTrue(os.path.exists(backend_path))
+
+ status_path = mozpath.join(env.topobjdir, "config.status")
+
+ with open(backend_path, "r") as fh:
+ sources = set(source.strip() for source in fh)
+
+ self.assertEqual(
+ sources,
+ set(
+ [
+ mozpath.join(env.topsrcdir, "mochitest.ini"),
+ mozpath.join(env.topsrcdir, "mochitest-common.ini"),
+ mozpath.join(env.topsrcdir, "moz.build"),
+ status_path,
+ ]
+ ),
+ )
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/test/backend/test_visualstudio.py b/python/mozbuild/mozbuild/test/backend/test_visualstudio.py
new file mode 100644
index 0000000000..14cccb484b
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/backend/test_visualstudio.py
@@ -0,0 +1,63 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import unittest
+from xml.dom.minidom import parse
+
+from mozunit import main
+
+from mozbuild.backend.visualstudio import VisualStudioBackend
+from mozbuild.test.backend.common import BackendTester
+
+
+class TestVisualStudioBackend(BackendTester):
+ @unittest.skip("Failing inconsistently in automation.")
+ def test_basic(self):
+ """Ensure we can consume our stub project."""
+
+ env = self._consume("visual-studio", VisualStudioBackend)
+
+ msvc = os.path.join(env.topobjdir, "msvc")
+ self.assertTrue(os.path.isdir(msvc))
+
+ self.assertTrue(os.path.isfile(os.path.join(msvc, "mozilla.sln")))
+ self.assertTrue(os.path.isfile(os.path.join(msvc, "mozilla.props")))
+ self.assertTrue(os.path.isfile(os.path.join(msvc, "mach.bat")))
+ self.assertTrue(os.path.isfile(os.path.join(msvc, "binary_my_app.vcxproj")))
+ self.assertTrue(os.path.isfile(os.path.join(msvc, "target_full.vcxproj")))
+ self.assertTrue(os.path.isfile(os.path.join(msvc, "library_dir1.vcxproj")))
+ self.assertTrue(os.path.isfile(os.path.join(msvc, "library_dir1.vcxproj.user")))
+
+ d = parse(os.path.join(msvc, "library_dir1.vcxproj"))
+ self.assertEqual(d.documentElement.tagName, "Project")
+ els = d.getElementsByTagName("ClCompile")
+ self.assertEqual(len(els), 2)
+
+ # mozilla-config.h should be explicitly listed as an include.
+ els = d.getElementsByTagName("NMakeForcedIncludes")
+ self.assertEqual(len(els), 1)
+ self.assertEqual(
+ els[0].firstChild.nodeValue, "$(TopObjDir)\\dist\\include\\mozilla-config.h"
+ )
+
+ # LOCAL_INCLUDES get added to the include search path.
+ els = d.getElementsByTagName("NMakeIncludeSearchPath")
+ self.assertEqual(len(els), 1)
+ includes = els[0].firstChild.nodeValue.split(";")
+ self.assertIn(os.path.normpath("$(TopSrcDir)/includeA/foo"), includes)
+ self.assertIn(os.path.normpath("$(TopSrcDir)/dir1"), includes)
+ self.assertIn(os.path.normpath("$(TopObjDir)/dir1"), includes)
+ self.assertIn(os.path.normpath("$(TopObjDir)\\dist\\include"), includes)
+
+ # DEFINES get added to the project.
+ els = d.getElementsByTagName("NMakePreprocessorDefinitions")
+ self.assertEqual(len(els), 1)
+ defines = els[0].firstChild.nodeValue.split(";")
+ self.assertIn("DEFINEFOO", defines)
+ self.assertIn("DEFINEBAR=bar", defines)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/test/code_analysis/test_mach_commands.py b/python/mozbuild/mozbuild/test/code_analysis/test_mach_commands.py
new file mode 100644
index 0000000000..774688c62f
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/code_analysis/test_mach_commands.py
@@ -0,0 +1,90 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import os
+import unittest
+from unittest import mock
+
+import mozpack.path as mozpath
+from mach.registrar import Registrar
+from mozunit import main
+
+from mozbuild.base import MozbuildObject
+
+
+class TestStaticAnalysis(unittest.TestCase):
+ def setUp(self):
+ self.remove_cats = []
+ for cat in ("build", "post-build", "misc", "testing", "devenv"):
+ if cat in Registrar.categories:
+ continue
+ Registrar.register_category(cat, cat, cat)
+ self.remove_cats.append(cat)
+
+ def tearDown(self):
+ for cat in self.remove_cats:
+ del Registrar.categories[cat]
+ del Registrar.commands_by_category[cat]
+
+ def test_bug_1615884(self):
+ # TODO: cleaner test
+ # we're testing the `_is_ignored_path` but in an ideal
+ # world we should test the clang_analysis mach command
+ # since that small function is an internal detail.
+ # But there is zero test infra for that mach command
+ from mozbuild.code_analysis.mach_commands import _is_ignored_path
+
+ config = MozbuildObject.from_environment()
+ context = mock.MagicMock()
+ context.cwd = config.topsrcdir
+
+ command_context = mock.MagicMock()
+ command_context.topsrcdir = os.path.join("/root", "dir")
+ path = os.path.join("/root", "dir", "path1")
+
+ ignored_dirs_re = r"path1|path2/here|path3\there"
+ self.assertTrue(
+ _is_ignored_path(command_context, ignored_dirs_re, path) is not None
+ )
+
+ # simulating a win32 env
+ win32_path = "\\root\\dir\\path1"
+ command_context.topsrcdir = "\\root\\dir"
+ old_sep = os.sep
+ os.sep = "\\"
+ try:
+ self.assertTrue(
+ _is_ignored_path(command_context, ignored_dirs_re, win32_path)
+ is not None
+ )
+ finally:
+ os.sep = old_sep
+
+ self.assertTrue(
+ _is_ignored_path(command_context, ignored_dirs_re, "path2") is None
+ )
+
+ def test_get_files(self):
+ from mozbuild.code_analysis.mach_commands import get_abspath_files
+
+ config = MozbuildObject.from_environment()
+ context = mock.MagicMock()
+ context.cwd = config.topsrcdir
+
+ command_context = mock.MagicMock()
+ command_context.topsrcdir = mozpath.join("/root", "dir")
+ source = get_abspath_files(
+ command_context, ["file1", mozpath.join("directory", "file2")]
+ )
+
+ self.assertTrue(
+ source
+ == [
+ mozpath.join(command_context.topsrcdir, "file1"),
+ mozpath.join(command_context.topsrcdir, "directory", "file2"),
+ ]
+ )
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/test/codecoverage/sample_lcov.info b/python/mozbuild/mozbuild/test/codecoverage/sample_lcov.info
new file mode 100644
index 0000000000..996ccac215
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/codecoverage/sample_lcov.info
@@ -0,0 +1,1895 @@
+SF:lcov_test_newTab.js
+FN:1,top-level
+FN:31,top-level
+FN:232,Transformation_rearrangeSites/</<
+FN:259,Transformation_whenTransitionEnded
+FN:262,onEnd
+FN:275,Transformation_getNodeOpacity
+FN:287,Transformation_setNodeOpacity
+FN:307,Transformation_moveSite
+FN:317,Transformation_isFrozen
+FN:334,Page_init
+FN:363,Page_observe
+FN:399,update
+FN:417,update/this._scheduleUpdateTimeout<
+FN:431,Page_init
+FN:443,Page_init/<
+FN:459,Page_updateAttributes
+FN:482,Page_handleUnloadEvent
+FN:499,Page_handleEvent
+FN:538,Page_handleEvent/<
+FN:544,gPage.onPageFirstVisible
+FN:567,onPageVisibleAndLoaded
+FN:575,reportLastVisibleTileIndex
+FN:619,gGrid.node
+FN:630,gGrid.cells
+FN:635,gGrid.sites
+FN:638,gGrid.ready
+FN:641,gGrid.isDocumentLoaded
+FN:647,Grid_init
+FN:651,Grid_init/<
+FN:676,Grid_createSite
+FN:685,Grid_handleEvent
+FN:697,Grid_lock
+FN:704,Grid_unlock
+FN:714,refresh
+FN:722,_refreshGrid
+FN:755,Grid_computeHeight
+FN:764,Grid_createSiteFragment
+FN:791,Grid_isHistoricalTile
+FN:799,Grid_resizeGrid
+FN:870,Cell
+FN:876,Cell/<
+FN:890,Cell.prototype.node
+FN:895,Cell.prototype.index
+FN:907,Cell.prototype.previousSibling
+FN:920,Cell.prototype.nextSibling
+FN:933,Cell.prototype.site
+FN:942,Cell_containsPinnedSite
+FN:951,Cell_isEmpty
+FN:958,Cell_handleEvent
+FN:991,Site
+FN:1005,Site.prototype.node
+FN:1010,Site.prototype.link
+FN:1015,Site.prototype.url
+FN:1020,Site.prototype.title
+FN:1025,Site.prototype.cell
+FN:1035,Site_pin
+FN:1051,Site_unpin
+FN:1063,Site_isPinned
+FN:1071,Site_block
+FN:1084,Site_querySelector
+FN:1093,Site.prototype._updateAttributes
+FN:1105,Site.prototype._newTabString
+FN:1116,Site.prototype._getSuggestedTileExplanation
+FN:1128,Site_checkLinkEndTime
+FN:1147,Site_render
+FN:1199,Site_onFirstVisible
+FN:1213,Site_captureIfMissing
+FN:1222,Site_refreshThumbnail
+FN:1246,Site.prototype._ignoreHoverEvents
+FN:1247,Site.prototype._ignoreHoverEvents/<
+FN:1250,Site.prototype._ignoreHoverEvents/<
+FN:1258,Site_addEventHandlers
+FN:1275,Site_speculativeConnect
+FN:1284,Site_recordSiteClicked
+FN:1296,Site.prototype._toggleLegalText
+FN:1324,Site_onClick
+FN:1386,Site_handleEvent
+FN:1417,gDrag.draggedSite
+FN:1424,gDrag.cellWidth
+FN:1425,gDrag.cellHeight
+FN:1432,Drag_start
+FN:1465,Drag_drag
+FN:1489,Drag_end
+FN:1505,Drag_isValid
+FN:1524,Drag_setDragData
+FN:1545,Drag_setDragData/<
+FN:1551,gDragDataHelper.mimeType
+FN:1555,DragDataHelper_getLinkFromDragEvent
+FN:1585,Drop_enter
+FN:1594,Drop_exit
+FN:1609,Drop_drop
+FN:1628,Drop_repinSitesAfterDrop
+FN:1632,Drop_repinSitesAfterDrop/pinnedSites<
+FN:1637,Drop_repinSitesAfterDrop/<
+FN:1645,Drop_pinDraggedSite
+FN:1669,Drop_delayedRearrange
+FN:1676,callback
+FN:1691,Drop_cancelDelayedArrange
+FN:1702,Drop_rearrange
+FN:1733,gDropTargetShim.init
+FN:1740,gDropTargetShim._addEventListeners
+FN:1752,gDropTargetShim._removeEventListeners
+FN:1764,gDropTargetShim.handleEvent
+FN:1788,gDropTargetShim._dragstart
+FN:1799,gDropTargetShim._dragover
+FN:1819,gDropTargetShim._drop
+FN:1839,gDropTargetShim._dragend
+FN:1862,gDropTargetShim._updateDropTarget
+FN:1888,gDropTargetShim._findDropTarget
+FN:1914,DropTargetShim_getCellPositions
+FN:1918,DropTargetShim_getCellPositions/this._cellPositions<
+FN:1929,gDropTargetShim._dispatchEvent
+FN:1954,DropPreview_rearrange
+FN:1972,DropPreview_insertDraggedSite
+FN:1999,DropPreview_repositionPinnedSites
+FN:2005,DropPreview_repositionPinnedSites/<
+FN:2023,DropPreview_filterPinnedSites
+FN:2030,DropPreview_filterPinnedSites/<
+FN:2047,DropPreview_getPinnedRange
+FN:2077,DropPreview_hasOverflowedPinnedSite
+FN:2104,DropPreview_repositionOverflowedPinnedSite
+FN:2135,DropPreview_indexOfLowerPrioritySite
+FN:2170,Updater_updateGrid
+FN:2177,Updater_updateGrid/<
+FN:2189,Updater_updateGrid/</<
+FN:2206,Updater_findRemainingSites
+FN:2210,Updater_findRemainingSites/<
+FN:2216,Updater_findRemainingSites/<
+FN:2225,Updater_freezeSitePositions
+FN:2226,Updater_freezeSitePositions/<
+FN:2236,Updater_moveSiteNodes
+FN:2244,Updater_moveSiteNodes/<
+FN:2268,Updater_rearrangeSites
+FN:2279,Updater_removeLegacySites
+FN:2283,Updater_removeLegacySites/<
+FN:2288,Updater_removeLegacySites/</<
+FN:2290,Updater_removeLegacySites/</</<
+FN:2308,Updater_fillEmptyCells
+FN:2312,Updater_fillEmptyCells/<
+FN:2316,Updater_fillEmptyCells/</<
+FN:2351,UndoDialog_init
+FN:2363,UndoDialog_show
+FN:2383,UndoDialog_hide
+FN:2399,UndoDialog_handleEvent
+FN:2416,UndoDialog_undo
+FN:2434,UndoDialog_undoAll
+FN:2435,UndoDialog_undoAll/<
+FN:2446,gSearch.init
+FN:2448,gSearch.init/<
+FN:2469,gCustomize.init
+FN:2474,gCustomize.init/<
+FN:2483,gCustomize.hidePanel
+FN:2484,onTransitionEnd
+FN:2495,gCustomize.showPanel
+FN:2504,gCustomize.showPanel/<
+FN:2517,gCustomize.handleEvent
+FN:2528,gCustomize.onClick
+FN:2554,gCustomize.onKeyDown
+FN:2560,gCustomize.showLearn
+FN:2565,gCustomize.updateSelected
+FN:2568,gCustomize.updateSelected/<
+FN:2602,gIntro.init
+FN:2608,gIntro._showMessage
+FN:2612,gIntro._showMessage/<
+FN:2621,gIntro._bold
+FN:2625,gIntro._link
+FN:2629,gIntro._exitIntro
+FN:2631,gIntro._exitIntro/<
+FN:2636,gIntro._generateParagraphs
+FN:2646,gIntro.showIfNecessary
+FN:2656,gIntro.showPanel
+FN:36,newTabString
+FN:44,inPrivateBrowsingMode
+FN:67,gTransformation._cellBorderWidths
+FN:86,Transformation_getNodePosition
+FN:96,Transformation_fadeNodeIn
+FN:97,Transformation_fadeNodeIn/<
+FN:111,Transformation_fadeNodeOut
+FN:120,Transformation_showSite
+FN:129,Transformation_hideSite
+FN:138,Transformation_setSitePosition
+FN:150,Transformation_freezeSitePosition
+FN:167,Transformation_unfreezeSitePosition
+FN:184,Transformation_slideSiteTo
+FN:191,finish
+FN:221,Transformation_rearrangeSites
+FN:227,Transformation_rearrangeSites/<
+FNDA:1,top-level
+FNDA:1,top-level
+FNDA:1,Page_init
+FNDA:2,update
+FNDA:1,Page_init
+FNDA:1,Page_init/<
+FNDA:1,Page_updateAttributes
+FNDA:1,Page_handleUnloadEvent
+FNDA:2,Page_handleEvent
+FNDA:1,gPage.onPageFirstVisible
+FNDA:1,onPageVisibleAndLoaded
+FNDA:1,reportLastVisibleTileIndex
+FNDA:2,gGrid.node
+FNDA:13,gGrid.cells
+FNDA:12,gGrid.sites
+FNDA:1,gGrid.ready
+FNDA:4,gGrid.isDocumentLoaded
+FNDA:1,Grid_init
+FNDA:1,Grid_init/<
+FNDA:2,Grid_createSite
+FNDA:1,Grid_handleEvent
+FNDA:1,refresh
+FNDA:2,_refreshGrid
+FNDA:4,Grid_computeHeight
+FNDA:1,Grid_createSiteFragment
+FNDA:8,Grid_isHistoricalTile
+FNDA:3,Grid_resizeGrid
+FNDA:30,Cell
+FNDA:120,Cell/<
+FNDA:183,Cell.prototype.node
+FNDA:180,Cell.prototype.site
+FNDA:2,Site
+FNDA:16,Site.prototype.node
+FNDA:33,Site.prototype.link
+FNDA:7,Site.prototype.url
+FNDA:4,Site.prototype.title
+FNDA:2,Site_isPinned
+FNDA:12,Site_querySelector
+FNDA:2,Site_checkLinkEndTime
+FNDA:2,Site_render
+FNDA:1,Site_onFirstVisible
+FNDA:3,Site_captureIfMissing
+FNDA:2,Site_refreshThumbnail
+FNDA:4,Site.prototype._ignoreHoverEvents
+FNDA:2,Site_addEventHandlers
+FNDA:1,gDropTargetShim.init
+FNDA:1,UndoDialog_init
+FNDA:1,gSearch.init
+FNDA:1,gCustomize.init
+FNDA:1,gCustomize.updateSelected
+FNDA:3,gCustomize.updateSelected/<
+FNDA:1,gIntro.init
+FNDA:1,gIntro.showIfNecessary
+FNDA:3,newTabString
+FNF:187
+FNH:54
+BRDA:233,0,0,-
+BRDA:233,0,1,-
+BRDA:236,1,0,-
+BRDA:236,1,1,-
+BRDA:263,0,0,-
+BRDA:263,0,1,-
+BRDA:289,0,0,-
+BRDA:289,0,1,-
+BRDA:290,1,0,-
+BRDA:290,1,1,-
+BRDA:293,2,0,-
+BRDA:293,2,1,-
+BRDA:348,0,0,-
+BRDA:348,0,1,1
+BRDA:364,0,0,-
+BRDA:364,0,1,-
+BRDA:371,1,0,-
+BRDA:371,1,1,-
+BRDA:377,2,0,-
+BRDA:377,2,1,-
+BRDA:382,3,0,-
+BRDA:382,3,1,-
+BRDA:382,4,0,-
+BRDA:382,4,1,-
+BRDA:384,5,0,-
+BRDA:384,5,1,-
+BRDA:384,6,0,-
+BRDA:384,6,1,-
+BRDA:383,7,0,-
+BRDA:383,7,1,-
+BRDA:399,0,0,2
+BRDA:399,0,1,-
+BRDA:401,1,0,-
+BRDA:401,1,1,2
+BRDA:405,2,0,1
+BRDA:405,2,1,1
+BRDA:405,3,0,1
+BRDA:405,3,1,1
+BRDA:413,4,0,-
+BRDA:413,4,1,-
+BRDA:419,0,0,-
+BRDA:419,0,1,-
+BRDA:432,0,0,1
+BRDA:432,0,1,-
+BRDA:440,1,0,1
+BRDA:440,1,1,-
+BRDA:463,0,0,-
+BRDA:463,0,1,2
+BRDA:462,1,0,2
+BRDA:462,1,1,1
+BRDA:472,2,0,-
+BRDA:472,2,1,-
+BRDA:471,3,0,-
+BRDA:471,3,1,1
+BRDA:488,0,0,1
+BRDA:488,0,1,-
+BRDA:500,0,0,1
+BRDA:500,0,1,1
+BRDA:500,1,0,1
+BRDA:500,1,1,-
+BRDA:500,2,0,-
+BRDA:500,2,1,-
+BRDA:500,3,0,-
+BRDA:500,3,1,-
+BRDA:500,4,0,-
+BRDA:500,4,1,-
+BRDA:500,5,0,-
+BRDA:500,5,1,-
+BRDA:511,6,0,-
+BRDA:511,6,1,-
+BRDA:510,7,0,-
+BRDA:510,7,1,-
+BRDA:519,8,0,-
+BRDA:519,8,1,-
+BRDA:519,9,0,-
+BRDA:519,9,1,-
+BRDA:523,10,0,-
+BRDA:523,10,1,-
+BRDA:523,11,0,-
+BRDA:523,11,1,-
+BRDA:530,12,0,-
+BRDA:530,12,1,-
+BRDA:549,0,0,14
+BRDA:549,0,1,1
+BRDA:548,1,0,16
+BRDA:548,1,1,1
+BRDA:560,2,0,1
+BRDA:560,2,1,-
+BRDA:588,0,0,1
+BRDA:588,0,1,22
+BRDA:588,1,0,15
+BRDA:588,1,1,8
+BRDA:589,2,0,7
+BRDA:589,2,1,1
+BRDA:591,3,0,1
+BRDA:591,3,1,-
+BRDA:587,4,0,24
+BRDA:587,4,1,1
+BRDA:635,0,0,181
+BRDA:635,0,1,12
+BRDA:665,0,0,-
+BRDA:665,0,1,1
+BRDA:686,0,0,1
+BRDA:686,0,1,-
+BRDA:686,1,0,-
+BRDA:686,1,1,-
+BRDA:728,0,0,31
+BRDA:728,0,1,2
+BRDA:733,1,0,30
+BRDA:733,1,1,2
+BRDA:741,2,0,-
+BRDA:741,2,1,2
+BRDA:740,3,0,2
+BRDA:740,3,1,2
+BRDA:757,0,0,2
+BRDA:757,0,1,2
+BRDA:793,0,0,8
+BRDA:793,0,1,-
+BRDA:793,1,0,-
+BRDA:793,1,1,-
+BRDA:793,2,0,-
+BRDA:793,2,1,-
+BRDA:804,0,0,1
+BRDA:804,0,1,2
+BRDA:804,1,0,2
+BRDA:804,1,1,1
+BRDA:809,2,0,1
+BRDA:809,2,1,1
+BRDA:819,3,0,1
+BRDA:819,3,1,1
+BRDA:839,4,0,8
+BRDA:839,4,1,-
+BRDA:838,5,0,9
+BRDA:838,5,1,2
+BRDA:909,0,0,-
+BRDA:909,0,1,-
+BRDA:922,0,0,-
+BRDA:922,0,1,-
+BRDA:935,0,0,168
+BRDA:935,0,1,12
+BRDA:944,0,0,-
+BRDA:944,0,1,-
+BRDA:961,0,0,-
+BRDA:961,0,1,-
+BRDA:961,1,0,-
+BRDA:961,1,1,-
+BRDA:964,2,0,-
+BRDA:964,2,1,-
+BRDA:964,3,0,-
+BRDA:964,3,1,-
+BRDA:967,4,0,-
+BRDA:967,4,1,-
+BRDA:967,5,0,-
+BRDA:967,5,1,-
+BRDA:967,6,0,-
+BRDA:967,6,1,-
+BRDA:967,7,0,-
+BRDA:967,7,1,-
+BRDA:1020,0,0,4
+BRDA:1020,0,1,-
+BRDA:1027,0,0,-
+BRDA:1027,0,1,-
+BRDA:1036,0,0,-
+BRDA:1036,0,1,-
+BRDA:1041,1,0,-
+BRDA:1041,1,1,-
+BRDA:1052,0,0,-
+BRDA:1052,0,1,-
+BRDA:1072,0,0,-
+BRDA:1072,0,1,-
+BRDA:1096,0,0,-
+BRDA:1096,0,1,-
+BRDA:1108,0,0,-
+BRDA:1108,0,1,-
+BRDA:1119,0,0,-
+BRDA:1119,0,1,-
+BRDA:1129,0,0,2
+BRDA:1129,0,1,-
+BRDA:1129,1,0,2
+BRDA:1129,1,1,-
+BRDA:1151,0,0,-
+BRDA:1151,0,1,2
+BRDA:1153,1,0,-
+BRDA:1153,1,1,2
+BRDA:1153,2,0,-
+BRDA:1153,2,1,2
+BRDA:1154,3,0,-
+BRDA:1154,3,1,-
+BRDA:1156,4,0,2
+BRDA:1156,4,1,-
+BRDA:1165,5,0,-
+BRDA:1165,5,1,2
+BRDA:1173,6,0,2
+BRDA:1173,6,1,-
+BRDA:1174,7,0,-
+BRDA:1174,7,1,-
+BRDA:1185,8,0,2
+BRDA:1185,8,1,-
+BRDA:1200,0,0,1
+BRDA:1200,0,1,-
+BRDA:1200,1,0,1
+BRDA:1200,1,1,-
+BRDA:1214,0,0,-
+BRDA:1214,0,1,3
+BRDA:1214,1,0,-
+BRDA:1214,1,1,3
+BRDA:1224,0,0,-
+BRDA:1224,0,1,2
+BRDA:1224,1,0,2
+BRDA:1224,1,1,-
+BRDA:1228,2,0,2
+BRDA:1228,2,1,-
+BRDA:1232,3,0,-
+BRDA:1232,3,1,2
+BRDA:1235,4,0,-
+BRDA:1235,4,1,2
+BRDA:1239,5,0,2
+BRDA:1239,5,1,-
+BRDA:1285,0,0,-
+BRDA:1285,0,1,-
+BRDA:1286,1,0,-
+BRDA:1286,1,1,-
+BRDA:1287,2,0,-
+BRDA:1287,2,1,-
+BRDA:1298,0,0,-
+BRDA:1298,0,1,-
+BRDA:1311,1,0,-
+BRDA:1311,1,1,-
+BRDA:1311,2,0,-
+BRDA:1311,2,1,-
+BRDA:1314,3,0,-
+BRDA:1314,3,1,-
+BRDA:1315,4,0,-
+BRDA:1315,4,1,-
+BRDA:1331,0,0,-
+BRDA:1331,0,1,-
+BRDA:1332,1,0,-
+BRDA:1332,1,1,-
+BRDA:1334,2,0,-
+BRDA:1334,2,1,-
+BRDA:1334,3,0,-
+BRDA:1334,3,1,-
+BRDA:1340,4,0,-
+BRDA:1340,4,1,-
+BRDA:1343,5,0,-
+BRDA:1343,5,1,-
+BRDA:1347,6,0,-
+BRDA:1347,6,1,-
+BRDA:1349,7,0,-
+BRDA:1349,7,1,-
+BRDA:1353,8,0,-
+BRDA:1353,8,1,-
+BRDA:1359,9,0,-
+BRDA:1359,9,1,-
+BRDA:1360,10,0,-
+BRDA:1360,10,1,-
+BRDA:1364,11,0,-
+BRDA:1364,11,1,-
+BRDA:1364,12,0,-
+BRDA:1364,12,1,-
+BRDA:1368,13,0,-
+BRDA:1368,13,1,-
+BRDA:1368,14,0,-
+BRDA:1368,14,1,-
+BRDA:1369,15,0,-
+BRDA:1369,15,1,-
+BRDA:1378,16,0,-
+BRDA:1378,16,1,-
+BRDA:1387,0,0,-
+BRDA:1387,0,1,-
+BRDA:1387,1,0,-
+BRDA:1387,1,1,-
+BRDA:1387,2,0,-
+BRDA:1387,2,1,-
+BRDA:1439,0,0,-
+BRDA:1439,0,1,-
+BRDA:1491,0,0,-
+BRDA:1491,0,1,-
+BRDA:1510,0,0,-
+BRDA:1510,0,1,-
+BRDA:1510,1,0,-
+BRDA:1510,1,1,-
+BRDA:1557,0,0,-
+BRDA:1557,0,1,-
+BRDA:1557,1,0,-
+BRDA:1557,1,1,-
+BRDA:1561,2,0,-
+BRDA:1561,2,1,-
+BRDA:1562,3,0,-
+BRDA:1562,3,1,-
+BRDA:1562,4,0,-
+BRDA:1562,4,1,-
+BRDA:1595,0,0,-
+BRDA:1595,0,1,-
+BRDA:1595,1,0,-
+BRDA:1595,1,1,-
+BRDA:1612,0,0,-
+BRDA:1612,0,1,-
+BRDA:1633,0,0,-
+BRDA:1633,0,1,-
+BRDA:1649,0,0,-
+BRDA:1649,0,1,-
+BRDA:1651,1,0,-
+BRDA:1651,1,1,-
+BRDA:1655,2,0,-
+BRDA:1655,2,1,-
+BRDA:1671,0,0,-
+BRDA:1671,0,1,-
+BRDA:1692,0,0,-
+BRDA:1692,0,1,-
+BRDA:1706,0,0,-
+BRDA:1706,0,1,-
+BRDA:1765,0,0,-
+BRDA:1765,0,1,-
+BRDA:1765,1,0,-
+BRDA:1765,1,1,-
+BRDA:1765,2,0,-
+BRDA:1765,2,1,-
+BRDA:1765,3,0,-
+BRDA:1765,3,1,-
+BRDA:1765,4,0,-
+BRDA:1765,4,1,-
+BRDA:1789,0,0,-
+BRDA:1789,0,1,-
+BRDA:1810,0,0,-
+BRDA:1810,0,1,-
+BRDA:1840,0,0,-
+BRDA:1840,0,1,-
+BRDA:1841,1,0,-
+BRDA:1841,1,1,-
+BRDA:1841,2,0,-
+BRDA:1841,2,1,-
+BRDA:1866,0,0,-
+BRDA:1866,0,1,-
+BRDA:1867,1,0,-
+BRDA:1867,1,1,-
+BRDA:1871,2,0,-
+BRDA:1871,2,1,-
+BRDA:1875,3,0,-
+BRDA:1875,3,1,-
+BRDA:1902,0,0,-
+BRDA:1902,0,1,-
+BRDA:1902,1,0,-
+BRDA:1902,1,1,-
+BRDA:1898,2,0,-
+BRDA:1898,2,1,-
+BRDA:1915,0,0,-
+BRDA:1915,0,1,-
+BRDA:1977,0,0,-
+BRDA:1977,0,1,-
+BRDA:1982,1,0,-
+BRDA:1982,1,1,-
+BRDA:2012,0,0,-
+BRDA:2012,0,1,-
+BRDA:2032,0,0,-
+BRDA:2032,0,1,-
+BRDA:2032,1,0,-
+BRDA:2032,1,1,-
+BRDA:2032,2,0,-
+BRDA:2032,2,1,-
+BRDA:2038,3,0,-
+BRDA:2038,3,1,-
+BRDA:2052,0,0,-
+BRDA:2052,0,1,-
+BRDA:2056,1,0,-
+BRDA:2056,1,1,-
+BRDA:2056,2,0,-
+BRDA:2056,2,1,-
+BRDA:2062,3,0,-
+BRDA:2062,3,1,-
+BRDA:2062,4,0,-
+BRDA:2062,4,1,-
+BRDA:2081,0,0,-
+BRDA:2081,0,1,-
+BRDA:2087,1,0,-
+BRDA:2087,1,1,-
+BRDA:2093,2,0,-
+BRDA:2093,2,1,-
+BRDA:2109,0,0,-
+BRDA:2109,0,1,-
+BRDA:2116,1,0,-
+BRDA:2116,1,1,-
+BRDA:2115,2,0,-
+BRDA:2115,2,1,-
+BRDA:2145,0,0,-
+BRDA:2145,0,1,-
+BRDA:2151,1,0,-
+BRDA:2151,1,1,-
+BRDA:2151,2,0,-
+BRDA:2151,2,1,-
+BRDA:2143,3,0,-
+BRDA:2143,3,1,-
+BRDA:2211,0,0,-
+BRDA:2211,0,1,-
+BRDA:2217,0,0,-
+BRDA:2217,0,1,-
+BRDA:2217,1,0,-
+BRDA:2217,1,1,-
+BRDA:2227,0,0,-
+BRDA:2227,0,1,-
+BRDA:2249,0,0,-
+BRDA:2249,0,1,-
+BRDA:2249,1,0,-
+BRDA:2249,1,1,-
+BRDA:2253,2,0,-
+BRDA:2253,2,1,-
+BRDA:2257,3,0,-
+BRDA:2257,3,1,-
+BRDA:2285,0,0,-
+BRDA:2285,0,1,-
+BRDA:2285,1,0,-
+BRDA:2285,1,1,-
+BRDA:2313,0,0,-
+BRDA:2313,0,1,-
+BRDA:2313,1,0,-
+BRDA:2313,1,1,-
+BRDA:2364,0,0,-
+BRDA:2364,0,1,-
+BRDA:2384,0,0,-
+BRDA:2384,0,1,-
+BRDA:2400,0,0,-
+BRDA:2400,0,1,-
+BRDA:2400,1,0,-
+BRDA:2400,1,1,-
+BRDA:2400,2,0,-
+BRDA:2400,2,1,-
+BRDA:2417,0,0,-
+BRDA:2417,0,1,-
+BRDA:2423,1,0,-
+BRDA:2423,1,1,-
+BRDA:2470,0,0,7
+BRDA:2470,0,1,1
+BRDA:2496,0,0,-
+BRDA:2496,0,1,-
+BRDA:2518,0,0,-
+BRDA:2518,0,1,-
+BRDA:2518,1,0,-
+BRDA:2518,1,1,-
+BRDA:2529,0,0,-
+BRDA:2529,0,1,-
+BRDA:2530,1,0,-
+BRDA:2530,1,1,-
+BRDA:2534,2,0,-
+BRDA:2534,2,1,-
+BRDA:2534,3,0,-
+BRDA:2534,3,1,-
+BRDA:2534,4,0,-
+BRDA:2534,4,1,-
+BRDA:2534,5,0,-
+BRDA:2534,5,1,-
+BRDA:2539,6,0,-
+BRDA:2539,6,1,-
+BRDA:2555,0,0,-
+BRDA:2555,0,1,-
+BRDA:2567,0,0,-
+BRDA:2567,0,1,1
+BRDA:2567,1,0,-
+BRDA:2567,1,1,1
+BRDA:2577,2,0,-
+BRDA:2577,2,1,1
+BRDA:2570,0,0,2
+BRDA:2570,0,1,1
+BRDA:2603,0,0,6
+BRDA:2603,0,1,1
+BRDA:2647,0,0,1
+BRDA:2647,0,1,-
+BRDA:2650,1,0,1
+BRDA:2650,1,1,-
+BRDA:2660,0,0,-
+BRDA:2660,0,1,-
+BRDA:38,0,0,-
+BRDA:38,0,1,3
+BRDA:101,0,0,-
+BRDA:101,0,1,-
+BRDA:151,0,0,-
+BRDA:151,0,1,-
+BRDA:168,0,0,-
+BRDA:168,0,1,-
+BRDA:187,0,0,-
+BRDA:187,0,1,-
+BRDA:204,1,0,-
+BRDA:204,1,1,-
+BRDA:205,2,0,-
+BRDA:205,2,1,-
+BRDA:192,0,0,-
+BRDA:192,0,1,-
+BRDA:192,1,0,-
+BRDA:192,1,1,-
+BRDA:195,2,0,-
+BRDA:195,2,1,-
+BRDA:224,0,0,-
+BRDA:224,0,1,-
+BRDA:225,1,0,-
+BRDA:225,1,1,-
+BRDA:246,2,0,-
+BRDA:246,2,1,-
+BRDA:229,0,0,-
+BRDA:229,0,1,-
+BRDA:229,1,0,-
+BRDA:229,1,1,-
+BRF:500
+BRH:90
+DA:7,1
+DA:8,1
+DA:23,1
+DA:24,1
+DA:25,1
+DA:26,1
+DA:27,1
+DA:28,1
+DA:36,1
+DA:48,1
+DA:49,1
+DA:51,1
+DA:52,1
+DA:53,1
+DA:62,1
+DA:324,1
+DA:330,1
+DA:607,1
+DA:608,1
+DA:609,1
+DA:614,1
+DA:870,1
+DA:1406,1
+DA:1550,1
+DA:1570,1
+DA:1575,1
+DA:1719,1
+DA:1947,1
+DA:2164,1
+DA:2337,1
+DA:2445,1
+DA:2456,1
+DA:2585,1
+DA:2586,1
+DA:2588,1
+DA:7,1
+DA:8,1
+DA:10,1
+DA:11,1
+DA:12,1
+DA:13,1
+DA:14,1
+DA:15,1
+DA:17,1
+DA:18,1
+DA:17,1
+DA:19,1
+DA:20,1
+DA:19,1
+DA:29,1
+DA:31,1
+DA:48,1
+DA:49,1
+DA:51,1
+DA:52,1
+DA:53,1
+DA:62,1
+DA:67,1
+DA:86,1
+DA:96,1
+DA:111,1
+DA:120,1
+DA:129,1
+DA:138,1
+DA:150,1
+DA:167,1
+DA:184,1
+DA:221,1
+DA:259,1
+DA:275,1
+DA:287,1
+DA:307,1
+DA:317,1
+DA:324,1
+DA:330,1
+DA:334,1
+DA:363,1
+DA:399,1
+DA:431,1
+DA:459,1
+DA:482,1
+DA:499,1
+DA:544,1
+DA:567,1
+DA:575,1
+DA:607,1
+DA:608,1
+DA:609,1
+DA:614,1
+DA:618,1
+DA:619,1
+DA:624,1
+DA:629,1
+DA:630,1
+DA:635,1
+DA:638,1
+DA:641,1
+DA:647,1
+DA:676,1
+DA:685,1
+DA:697,1
+DA:704,1
+DA:714,1
+DA:722,1
+DA:755,1
+DA:764,1
+DA:791,1
+DA:799,1
+DA:881,1
+DA:885,1
+DA:890,1
+DA:895,1
+DA:907,1
+DA:920,1
+DA:933,1
+DA:942,1
+DA:951,1
+DA:958,1
+DA:1001,1
+DA:1005,1
+DA:1010,1
+DA:1015,1
+DA:1020,1
+DA:1025,1
+DA:1035,1
+DA:1051,1
+DA:1063,1
+DA:1071,1
+DA:1084,1
+DA:1093,1
+DA:1105,1
+DA:1116,1
+DA:1128,1
+DA:1147,1
+DA:1199,1
+DA:1213,1
+DA:1222,1
+DA:1246,1
+DA:1258,1
+DA:1275,1
+DA:1284,1
+DA:1296,1
+DA:1324,1
+DA:1386,1
+DA:1406,1
+DA:1410,1
+DA:1411,1
+DA:1416,1
+DA:1417,1
+DA:1422,1
+DA:1423,1
+DA:1424,1
+DA:1425,1
+DA:1432,1
+DA:1465,1
+DA:1489,1
+DA:1505,1
+DA:1524,1
+DA:1550,1
+DA:1551,1
+DA:1555,1
+DA:1570,1
+DA:1575,1
+DA:1579,1
+DA:1585,1
+DA:1594,1
+DA:1609,1
+DA:1628,1
+DA:1645,1
+DA:1669,1
+DA:1691,1
+DA:1702,1
+DA:1719,1
+DA:1723,1
+DA:1728,1
+DA:1733,1
+DA:1740,1
+DA:1752,1
+DA:1764,1
+DA:1788,1
+DA:1799,1
+DA:1819,1
+DA:1839,1
+DA:1862,1
+DA:1888,1
+DA:1914,1
+DA:1929,1
+DA:1947,1
+DA:1954,1
+DA:1972,1
+DA:1999,1
+DA:2023,1
+DA:2047,1
+DA:2077,1
+DA:2104,1
+DA:2135,1
+DA:2164,1
+DA:2170,1
+DA:2206,1
+DA:2225,1
+DA:2236,1
+DA:2268,1
+DA:2279,1
+DA:2308,1
+DA:2337,1
+DA:2341,1
+DA:2346,1
+DA:2351,1
+DA:2363,1
+DA:2383,1
+DA:2399,1
+DA:2416,1
+DA:2434,1
+DA:2442,1
+DA:2445,1
+DA:2446,1
+DA:2456,1
+DA:2457,1
+DA:2467,1
+DA:2469,1
+DA:2483,1
+DA:2495,1
+DA:2517,1
+DA:2528,1
+DA:2554,1
+DA:2560,1
+DA:2565,1
+DA:2585,1
+DA:2586,1
+DA:2588,1
+DA:2589,1
+DA:2598,1
+DA:2600,1
+DA:2602,1
+DA:2608,1
+DA:2621,1
+DA:2625,1
+DA:2629,1
+DA:2636,1
+DA:2646,1
+DA:2656,1
+DA:2677,1
+DA:32,1
+DA:33,1
+DA:32,1
+DA:233,0
+DA:235,0
+DA:236,0
+DA:238,0
+DA:241,0
+DA:261,0
+DA:262,0
+DA:263,0
+DA:264,0
+DA:265,0
+DA:276,0
+DA:277,0
+DA:289,0
+DA:290,0
+DA:291,0
+DA:293,0
+DA:294,0
+DA:297,0
+DA:308,0
+DA:309,0
+DA:318,0
+DA:336,1
+DA:339,1
+DA:344,1
+DA:347,1
+DA:348,1
+DA:349,1
+DA:351,1
+DA:354,1
+DA:357,1
+DA:364,0
+DA:365,0
+DA:367,0
+DA:368,0
+DA:371,0
+DA:372,0
+DA:373,0
+DA:377,0
+DA:378,0
+DA:380,0
+DA:382,0
+DA:383,0
+DA:384,0
+DA:385,0
+DA:383,0
+DA:401,2
+DA:405,2
+DA:406,1
+DA:409,2
+DA:413,0
+DA:414,0
+DA:417,0
+DA:424,0
+DA:417,0
+DA:419,0
+DA:420,0
+DA:423,0
+DA:432,1
+DA:433,0
+DA:435,1
+DA:438,1
+DA:440,1
+DA:441,0
+DA:443,1
+DA:447,1
+DA:450,1
+DA:461,1
+DA:462,1
+DA:463,2
+DA:464,2
+DA:466,0
+DA:462,3
+DA:470,1
+DA:471,1
+DA:472,0
+DA:473,0
+DA:475,0
+DA:471,1
+DA:483,1
+DA:487,1
+DA:488,1
+DA:489,0
+DA:492,1
+DA:500,2
+DA:502,1
+DA:505,1
+DA:508,0
+DA:511,0
+DA:512,0
+DA:515,0
+DA:510,0
+DA:519,0
+DA:520,0
+DA:523,0
+DA:524,0
+DA:525,0
+DA:530,0
+DA:531,0
+DA:532,0
+DA:535,0
+DA:538,0
+DA:539,0
+DA:546,1
+DA:548,1
+DA:549,15
+DA:553,1
+DA:548,17
+DA:558,1
+DA:560,1
+DA:561,0
+DA:563,1
+DA:569,1
+DA:572,1
+DA:576,1
+DA:577,1
+DA:576,1
+DA:579,1
+DA:580,1
+DA:581,1
+DA:580,1
+DA:583,1
+DA:584,1
+DA:585,1
+DA:587,1
+DA:588,23
+DA:589,8
+DA:590,1
+DA:591,1
+DA:593,0
+DA:587,25
+DA:599,1
+DA:648,1
+DA:649,1
+DA:651,1
+DA:665,1
+DA:666,1
+DA:652,1
+DA:653,1
+DA:659,1
+DA:661,1
+DA:677,2
+DA:678,2
+DA:679,2
+DA:686,1
+DA:689,1
+DA:698,0
+DA:705,0
+DA:715,1
+DA:716,1
+DA:723,2
+DA:724,2
+DA:727,2
+DA:728,2
+DA:729,30
+DA:728,30
+DA:733,2
+DA:736,2
+DA:739,2
+DA:740,2
+DA:741,2
+DA:742,2
+DA:740,2
+DA:746,2
+DA:747,2
+DA:748,2
+DA:756,4
+DA:757,4
+DA:758,4
+DA:765,1
+DA:766,1
+DA:767,1
+DA:770,1
+DA:771,1
+DA:777,1
+DA:779,1
+DA:783,1
+DA:784,1
+DA:792,8
+DA:793,8
+DA:804,3
+DA:805,1
+DA:809,2
+DA:810,1
+DA:811,1
+DA:812,1
+DA:813,1
+DA:814,1
+DA:817,2
+DA:819,2
+DA:820,1
+DA:821,1
+DA:825,2
+DA:826,2
+DA:827,2
+DA:830,2
+DA:832,2
+DA:833,2
+DA:832,2
+DA:835,2
+DA:837,2
+DA:839,8
+DA:842,8
+DA:838,11
+DA:849,2
+DA:856,2
+DA:857,2
+DA:858,2
+DA:859,2
+DA:860,2
+DA:871,30
+DA:872,30
+DA:873,30
+DA:876,30
+DA:878,30
+DA:876,30
+DA:877,120
+DA:896,0
+DA:899,0
+DA:901,0
+DA:908,0
+DA:909,0
+DA:912,0
+DA:914,0
+DA:921,0
+DA:922,0
+DA:925,0
+DA:927,0
+DA:934,180
+DA:935,180
+DA:943,0
+DA:944,0
+DA:952,0
+DA:961,0
+DA:962,0
+DA:964,0
+DA:965,0
+DA:967,0
+DA:969,0
+DA:970,0
+DA:973,0
+DA:976,0
+DA:979,0
+DA:980,0
+DA:992,2
+DA:993,2
+DA:995,2
+DA:997,2
+DA:998,2
+DA:1026,0
+DA:1027,0
+DA:1036,0
+DA:1037,0
+DA:1039,0
+DA:1040,0
+DA:1041,0
+DA:1043,0
+DA:1045,0
+DA:1052,0
+DA:1053,0
+DA:1054,0
+DA:1055,0
+DA:1064,2
+DA:1072,0
+DA:1073,0
+DA:1074,0
+DA:1075,0
+DA:1085,12
+DA:1094,0
+DA:1096,0
+DA:1097,0
+DA:1098,0
+DA:1100,0
+DA:1101,0
+DA:1106,0
+DA:1107,0
+DA:1109,0
+DA:1110,0
+DA:1111,0
+DA:1108,0
+DA:1113,0
+DA:1117,0
+DA:1118,0
+DA:1119,0
+DA:1120,0
+DA:1122,0
+DA:1129,2
+DA:1130,0
+DA:1132,0
+DA:1134,0
+DA:1135,0
+DA:1137,0
+DA:1139,0
+DA:1140,0
+DA:1149,2
+DA:1151,2
+DA:1152,2
+DA:1153,2
+DA:1154,0
+DA:1155,0
+DA:1156,2
+DA:1158,2
+DA:1159,2
+DA:1160,2
+DA:1161,2
+DA:1163,2
+DA:1164,2
+DA:1165,2
+DA:1166,2
+DA:1171,2
+DA:1173,2
+DA:1174,0
+DA:1175,0
+DA:1176,0
+DA:1179,0
+DA:1180,0
+DA:1181,0
+DA:1182,0
+DA:1185,2
+DA:1186,0
+DA:1189,2
+DA:1191,2
+DA:1200,1
+DA:1202,0
+DA:1205,1
+DA:1214,3
+DA:1215,3
+DA:1224,2
+DA:1225,0
+DA:1227,2
+DA:1228,2
+DA:1229,0
+DA:1232,2
+DA:1233,2
+DA:1235,2
+DA:1236,2
+DA:1237,2
+DA:1239,2
+DA:1240,0
+DA:1241,0
+DA:1247,4
+DA:1250,4
+DA:1248,0
+DA:1251,0
+DA:1260,2
+DA:1261,2
+DA:1262,2
+DA:1266,2
+DA:1267,2
+DA:1268,2
+DA:1269,2
+DA:1276,0
+DA:1277,0
+DA:1278,0
+DA:1285,0
+DA:1286,0
+DA:1287,0
+DA:1290,0
+DA:1292,0
+DA:1293,0
+DA:1292,0
+DA:1297,0
+DA:1298,0
+DA:1299,0
+DA:1300,0
+DA:1302,0
+DA:1304,0
+DA:1305,0
+DA:1306,0
+DA:1307,0
+DA:1309,0
+DA:1310,0
+DA:1311,0
+DA:1312,0
+DA:1313,0
+DA:1314,0
+DA:1315,0
+DA:1317,0
+DA:1325,0
+DA:1326,0
+DA:1327,0
+DA:1328,0
+DA:1331,0
+DA:1332,0
+DA:1334,0
+DA:1335,0
+DA:1336,0
+DA:1340,0
+DA:1341,0
+DA:1343,0
+DA:1344,0
+DA:1347,0
+DA:1348,0
+DA:1349,0
+DA:1353,0
+DA:1354,0
+DA:1356,0
+DA:1357,0
+DA:1359,0
+DA:1360,0
+DA:1361,0
+DA:1362,0
+DA:1364,0
+DA:1365,0
+DA:1366,0
+DA:1368,0
+DA:1369,0
+DA:1371,0
+DA:1373,0
+DA:1378,0
+DA:1379,0
+DA:1387,0
+DA:1389,0
+DA:1390,0
+DA:1393,0
+DA:1396,0
+DA:1433,0
+DA:1436,0
+DA:1437,0
+DA:1438,0
+DA:1439,0
+DA:1440,0
+DA:1439,0
+DA:1442,0
+DA:1444,0
+DA:1447,0
+DA:1448,0
+DA:1449,0
+DA:1450,0
+DA:1453,0
+DA:1454,0
+DA:1455,0
+DA:1457,0
+DA:1467,0
+DA:1470,0
+DA:1473,0
+DA:1474,0
+DA:1477,0
+DA:1478,0
+DA:1481,0
+DA:1490,0
+DA:1491,0
+DA:1492,0
+DA:1491,0
+DA:1495,0
+DA:1497,0
+DA:1506,0
+DA:1510,0
+DA:1511,0
+DA:1516,0
+DA:1525,0
+DA:1527,0
+DA:1528,0
+DA:1529,0
+DA:1530,0
+DA:1531,0
+DA:1532,0
+DA:1533,0
+DA:1537,0
+DA:1538,0
+DA:1539,0
+DA:1540,0
+DA:1541,0
+DA:1545,0
+DA:1552,0
+DA:1556,0
+DA:1557,0
+DA:1558,0
+DA:1561,0
+DA:1562,0
+DA:1563,0
+DA:1586,0
+DA:1595,0
+DA:1596,0
+DA:1599,0
+DA:1600,0
+DA:1612,0
+DA:1613,0
+DA:1616,0
+DA:1618,0
+DA:1621,0
+DA:1629,0
+DA:1632,0
+DA:1637,0
+DA:1633,0
+DA:1646,0
+DA:1647,0
+DA:1649,0
+DA:1651,0
+DA:1652,0
+DA:1653,0
+DA:1654,0
+DA:1655,0
+DA:1657,0
+DA:1660,0
+DA:1676,0
+DA:1671,0
+DA:1672,0
+DA:1674,0
+DA:1681,0
+DA:1682,0
+DA:1685,0
+DA:1677,0
+DA:1678,0
+DA:1692,0
+DA:1693,0
+DA:1694,0
+DA:1703,0
+DA:1706,0
+DA:1707,0
+DA:1709,0
+DA:1734,1
+DA:1741,0
+DA:1743,0
+DA:1744,0
+DA:1745,0
+DA:1746,0
+DA:1753,0
+DA:1755,0
+DA:1756,0
+DA:1757,0
+DA:1758,0
+DA:1765,0
+DA:1767,0
+DA:1770,0
+DA:1773,0
+DA:1776,0
+DA:1779,0
+DA:1789,0
+DA:1790,0
+DA:1791,0
+DA:1802,0
+DA:1803,0
+DA:1806,0
+DA:1810,0
+DA:1811,0
+DA:1821,0
+DA:1825,0
+DA:1829,0
+DA:1832,0
+DA:1840,0
+DA:1841,0
+DA:1843,0
+DA:1844,0
+DA:1848,0
+DA:1849,0
+DA:1852,0
+DA:1853,0
+DA:1854,0
+DA:1864,0
+DA:1866,0
+DA:1867,0
+DA:1869,0
+DA:1871,0
+DA:1873,0
+DA:1875,0
+DA:1877,0
+DA:1879,0
+DA:1891,0
+DA:1892,0
+DA:1894,0
+DA:1895,0
+DA:1898,0
+DA:1899,0
+DA:1902,0
+DA:1903,0
+DA:1898,0
+DA:1907,0
+DA:1915,0
+DA:1916,0
+DA:1918,0
+DA:1919,0
+DA:1930,0
+DA:1931,0
+DA:1934,0
+DA:1935,0
+DA:1934,0
+DA:1937,0
+DA:1955,0
+DA:1958,0
+DA:1962,0
+DA:1964,0
+DA:1973,0
+DA:1974,0
+DA:1977,0
+DA:1978,0
+DA:1979,0
+DA:1982,0
+DA:1983,0
+DA:1984,0
+DA:1988,0
+DA:2002,0
+DA:2005,0
+DA:2008,0
+DA:2005,0
+DA:2012,0
+DA:2013,0
+DA:2006,0
+DA:2007,0
+DA:2024,0
+DA:2028,0
+DA:2030,0
+DA:2032,0
+DA:2033,0
+DA:2035,0
+DA:2038,0
+DA:2048,0
+DA:2049,0
+DA:2052,0
+DA:2053,0
+DA:2057,0
+DA:2056,0
+DA:2059,0
+DA:2063,0
+DA:2062,0
+DA:2066,0
+DA:2081,0
+DA:2082,0
+DA:2084,0
+DA:2087,0
+DA:2088,0
+DA:2090,0
+DA:2093,0
+DA:2107,0
+DA:2109,0
+DA:2110,0
+DA:2111,0
+DA:2115,0
+DA:2116,0
+DA:2117,0
+DA:2118,0
+DA:2115,0
+DA:2123,0
+DA:2137,0
+DA:2138,0
+DA:2143,0
+DA:2145,0
+DA:2146,0
+DA:2148,0
+DA:2151,0
+DA:2152,0
+DA:2143,0
+DA:2155,0
+DA:2171,0
+DA:2174,0
+DA:2177,0
+DA:2180,0
+DA:2185,0
+DA:2189,0
+DA:2191,0
+DA:2194,0
+DA:2207,0
+DA:2210,0
+DA:2216,0
+DA:2211,0
+DA:2212,0
+DA:2217,0
+DA:2226,0
+DA:2227,0
+DA:2228,0
+DA:2237,0
+DA:2242,0
+DA:2244,0
+DA:2260,0
+DA:2244,0
+DA:2245,0
+DA:2246,0
+DA:2249,0
+DA:2250,0
+DA:2253,0
+DA:2254,0
+DA:2257,0
+DA:2258,0
+DA:2269,0
+DA:2270,0
+DA:2280,0
+DA:2283,0
+DA:2300,0
+DA:2285,0
+DA:2286,0
+DA:2288,0
+DA:2290,0
+DA:2291,0
+DA:2294,0
+DA:2295,0
+DA:2309,0
+DA:2312,0
+DA:2328,0
+DA:2312,0
+DA:2328,0
+DA:2312,0
+DA:2313,0
+DA:2314,0
+DA:2316,0
+DA:2318,0
+DA:2321,0
+DA:2325,0
+DA:2326,0
+DA:2352,1
+DA:2353,1
+DA:2354,1
+DA:2355,1
+DA:2356,1
+DA:2364,0
+DA:2365,0
+DA:2367,0
+DA:2368,0
+DA:2369,0
+DA:2370,0
+DA:2371,0
+DA:2374,0
+DA:2375,0
+DA:2376,0
+DA:2377,0
+DA:2384,0
+DA:2385,0
+DA:2387,0
+DA:2388,0
+DA:2389,0
+DA:2390,0
+DA:2391,0
+DA:2392,0
+DA:2400,0
+DA:2402,0
+DA:2405,0
+DA:2408,0
+DA:2417,0
+DA:2418,0
+DA:2420,0
+DA:2421,0
+DA:2423,0
+DA:2424,0
+DA:2427,0
+DA:2428,0
+DA:2435,0
+DA:2438,0
+DA:2435,0
+DA:2436,0
+DA:2437,0
+DA:2447,1
+DA:2448,1
+DA:2447,1
+DA:2449,1
+DA:2450,1
+DA:2451,1
+DA:2470,1
+DA:2471,7
+DA:2470,8
+DA:2474,1
+DA:2475,1
+DA:2476,1
+DA:2477,1
+DA:2478,1
+DA:2480,1
+DA:2484,0
+DA:2488,0
+DA:2489,0
+DA:2490,0
+DA:2491,0
+DA:2492,0
+DA:2485,0
+DA:2486,0
+DA:2496,0
+DA:2497,0
+DA:2500,0
+DA:2501,0
+DA:2502,0
+DA:2503,0
+DA:2504,0
+DA:2507,0
+DA:2504,0
+DA:2509,0
+DA:2510,0
+DA:2514,0
+DA:2506,0
+DA:2518,0
+DA:2520,0
+DA:2523,0
+DA:2529,0
+DA:2530,0
+DA:2531,0
+DA:2534,0
+DA:2536,0
+DA:2539,0
+DA:2540,0
+DA:2542,0
+DA:2546,0
+DA:2549,0
+DA:2555,0
+DA:2556,0
+DA:2561,0
+DA:2562,0
+DA:2566,1
+DA:2567,1
+DA:2568,1
+DA:2577,1
+DA:2579,1
+DA:2569,3
+DA:2570,3
+DA:2571,1
+DA:2574,2
+DA:2603,1
+DA:2604,6
+DA:2603,7
+DA:2610,0
+DA:2612,0
+DA:2617,0
+DA:2618,0
+DA:2617,0
+DA:2613,0
+DA:2622,0
+DA:2626,0
+DA:2630,0
+DA:2631,0
+DA:2632,0
+DA:2637,0
+DA:2638,0
+DA:2639,0
+DA:2640,0
+DA:2641,0
+DA:2642,0
+DA:2639,0
+DA:2647,1
+DA:2648,0
+DA:2650,1
+DA:2651,0
+DA:2652,0
+DA:2657,0
+DA:2658,0
+DA:2660,0
+DA:2662,0
+DA:2664,0
+DA:2667,0
+DA:2670,0
+DA:2671,0
+DA:37,3
+DA:38,3
+DA:39,3
+DA:41,0
+DA:45,0
+DA:68,0
+DA:69,0
+DA:70,0
+DA:71,0
+DA:75,0
+DA:76,0
+DA:75,0
+DA:78,0
+DA:87,0
+DA:88,0
+DA:97,0
+DA:99,0
+DA:101,0
+DA:102,0
+DA:112,0
+DA:121,0
+DA:130,0
+DA:139,0
+DA:140,0
+DA:142,0
+DA:143,0
+DA:151,0
+DA:152,0
+DA:154,0
+DA:155,0
+DA:156,0
+DA:157,0
+DA:159,0
+DA:160,0
+DA:168,0
+DA:169,0
+DA:171,0
+DA:172,0
+DA:173,0
+DA:191,0
+DA:185,0
+DA:186,0
+DA:187,0
+DA:189,0
+DA:200,0
+DA:201,0
+DA:204,0
+DA:205,0
+DA:206,0
+DA:208,0
+DA:209,0
+DA:192,0
+DA:193,0
+DA:195,0
+DA:196,0
+DA:222,0
+DA:223,0
+DA:224,0
+DA:225,0
+DA:227,0
+DA:244,0
+DA:227,0
+DA:246,0
+DA:247,0
+DA:229,0
+DA:230,0
+DA:232,0
+LF:1146
+LH:478
+end_of_record
diff --git a/python/mozbuild/mozbuild/test/codecoverage/test_lcov_rewrite.py b/python/mozbuild/mozbuild/test/codecoverage/test_lcov_rewrite.py
new file mode 100644
index 0000000000..9009300951
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/codecoverage/test_lcov_rewrite.py
@@ -0,0 +1,444 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import json
+import os
+import shutil
+import unittest
+from io import StringIO
+from tempfile import NamedTemporaryFile
+
+import buildconfig
+import mozunit
+
+from mozbuild.codecoverage import chrome_map, lcov_rewriter
+
+here = os.path.dirname(__file__)
+
+BUILDCONFIG = {
+ "topobjdir": buildconfig.topobjdir,
+ "MOZ_APP_NAME": buildconfig.substs.get("MOZ_APP_NAME", "nightly"),
+ "OMNIJAR_NAME": buildconfig.substs.get("OMNIJAR_NAME", "omni.ja"),
+ "MOZ_MACBUNDLE_NAME": buildconfig.substs.get("MOZ_MACBUNDLE_NAME", "Nightly.app"),
+}
+
+basic_file = """TN:Compartment_5f7f5c30251800
+SF:resource://gre/modules/osfile.jsm
+FN:1,top-level
+FNDA:1,top-level
+FNF:1
+FNH:1
+BRDA:9,0,61,1
+BRF:1
+BRH:1
+DA:9,1
+DA:24,1
+LF:2
+LH:2
+end_of_record
+"""
+
+# These line numbers are (synthetically) sorted.
+multiple_records = """SF:resource://gre/modules/workers/require.js
+FN:1,top-level
+FN:80,.get
+FN:95,require
+FNDA:1,top-level
+FNF:3
+FNH:1
+BRDA:46,0,16,-
+BRDA:135,225,446,-
+BRF:2
+BRH:0
+DA:43,1
+DA:46,1
+DA:152,0
+DA:163,1
+LF:4
+LH:3
+end_of_record
+SF:resource://gre/modules/osfile/osfile_async_worker.js
+FN:12,top-level
+FN:30,worker.dispatch
+FN:34,worker.postMessage
+FN:387,do_close
+FN:392,exists
+FN:394,do_exists
+FN:400,unixSymLink
+FNDA:1,do_exists
+FNDA:1,exists
+FNDA:1,top-level
+FNDA:594,worker.dispatch
+FNF:7
+FNH:4
+BRDA:6,0,30,1
+BRDA:365,0,103,-
+BRF:2
+BRH:1
+DA:6,1
+DA:7,0
+DA:12,1
+DA:18,1
+DA:19,1
+DA:20,1
+DA:23,1
+DA:25,1
+DA:401,0
+DA:407,1
+LF:10
+LH:8
+end_of_record
+"""
+
+fn_with_multiple_commas = """TN:Compartment_5f7f5c30251800
+SF:resource://gre/modules/osfile.jsm
+FN:1,function,name,with,commas
+FNDA:1,function,name,with,commas
+FNF:1
+FNH:1
+BRDA:9,0,61,1
+BRF:1
+BRH:1
+DA:9,1
+DA:24,1
+LF:2
+LH:2
+end_of_record
+"""
+
+
+class TempFile:
+ def __init__(self, content):
+ self.file = NamedTemporaryFile(mode="w", delete=False, encoding="utf-8")
+ self.file.write(content)
+ self.file.close()
+
+ def __enter__(self):
+ return self.file.name
+
+ def __exit__(self, *args):
+ os.remove(self.file.name)
+
+
+class TestLcovParser(unittest.TestCase):
+ def parser_roundtrip(self, lcov_string):
+ with TempFile(lcov_string) as fname:
+ file_obj = lcov_rewriter.LcovFile([fname])
+ out = StringIO()
+ file_obj.print_file(out, lambda s: (s, None), lambda x, pp: x)
+
+ return out.getvalue()
+
+ def test_basic_parse(self):
+ output = self.parser_roundtrip(basic_file)
+ self.assertEqual(basic_file, output)
+
+ output = self.parser_roundtrip(multiple_records)
+ self.assertEqual(multiple_records, output)
+
+ def test_multiple_commas(self):
+ output = self.parser_roundtrip(fn_with_multiple_commas)
+ self.assertEqual(fn_with_multiple_commas, output)
+
+
+multiple_included_files = """//@line 1 "/src/dir/foo.js"
+bazfoobar
+//@line 2 "/src/dir/path/bar.js"
+@foo@
+//@line 3 "/src/dir/foo.js"
+bazbarfoo
+//@line 2 "/src/dir/path/bar.js"
+foobarbaz
+//@line 3 "/src/dir/path2/test.js"
+barfoobaz
+//@line 1 "/src/dir/path/baz.js"
+baz
+//@line 6 "/src/dir/f.js"
+fin
+"""
+
+srcdir_prefix_files = """//@line 1 "/src/dir/foo.js"
+bazfoobar
+//@line 2 "$SRCDIR/path/file.js"
+@foo@
+//@line 3 "/src/dir/foo.js"
+bazbarfoo
+"""
+
+
+class TestLineRemapping(unittest.TestCase):
+ def setUp(self):
+ chrome_map_file = os.path.join(buildconfig.topobjdir, "chrome-map.json")
+ self._old_chrome_info_file = None
+ if os.path.isfile(chrome_map_file):
+ backup_file = os.path.join(buildconfig.topobjdir, "chrome-map-backup.json")
+ self._old_chrome_info_file = backup_file
+ self._chrome_map_file = chrome_map_file
+ shutil.move(chrome_map_file, backup_file)
+
+ empty_chrome_info = [
+ {},
+ {},
+ {},
+ BUILDCONFIG,
+ ]
+ with open(chrome_map_file, "w") as fh:
+ json.dump(empty_chrome_info, fh)
+
+ self.lcov_rewriter = lcov_rewriter.LcovFileRewriter(chrome_map_file, "", "", [])
+ self.pp_rewriter = self.lcov_rewriter.pp_rewriter
+
+ def tearDown(self):
+ if self._old_chrome_info_file:
+ shutil.move(self._old_chrome_info_file, self._chrome_map_file)
+
+ def test_map_multiple_included(self):
+ with TempFile(multiple_included_files) as fname:
+ actual = chrome_map.generate_pp_info(fname, "/src/dir")
+ expected = {
+ "2,3": ("foo.js", 1),
+ "4,5": ("path/bar.js", 2),
+ "6,7": ("foo.js", 3),
+ "8,9": ("path/bar.js", 2),
+ "10,11": ("path2/test.js", 3),
+ "12,13": ("path/baz.js", 1),
+ "14,15": ("f.js", 6),
+ }
+
+ self.assertEqual(actual, expected)
+
+ def test_map_srcdir_prefix(self):
+ with TempFile(srcdir_prefix_files) as fname:
+ actual = chrome_map.generate_pp_info(fname, "/src/dir")
+ expected = {
+ "2,3": ("foo.js", 1),
+ "4,5": ("path/file.js", 2),
+ "6,7": ("foo.js", 3),
+ }
+
+ self.assertEqual(actual, expected)
+
+ def test_remap_lcov(self):
+ pp_remap = {
+ "1941,2158": ("dropPreview.js", 6),
+ "2159,2331": ("updater.js", 6),
+ "2584,2674": ("intro.js", 6),
+ "2332,2443": ("undo.js", 6),
+ "864,985": ("cells.js", 6),
+ "2444,2454": ("search.js", 6),
+ "1567,1712": ("drop.js", 6),
+ "2455,2583": ("customize.js", 6),
+ "1713,1940": ("dropTargetShim.js", 6),
+ "1402,1548": ("drag.js", 6),
+ "1549,1566": ("dragDataHelper.js", 6),
+ "453,602": ("page.js", 141),
+ "2675,2678": ("newTab.js", 70),
+ "56,321": ("transformations.js", 6),
+ "603,863": ("grid.js", 6),
+ "322,452": ("page.js", 6),
+ "986,1401": ("sites.js", 6),
+ }
+
+ fpath = os.path.join(here, "sample_lcov.info")
+
+ # Read original records
+ lcov_file = lcov_rewriter.LcovFile([fpath])
+ records = [lcov_file.parse_record(r) for _, _, r in lcov_file.iterate_records()]
+
+ # This summarization changes values due multiple reports per line coming
+ # from the JS engine (bug 1198356).
+ for r in records:
+ r.resummarize()
+ original_line_count = r.line_count
+ original_covered_line_count = r.covered_line_count
+ original_function_count = r.function_count
+ original_covered_function_count = r.covered_function_count
+
+ self.assertEqual(len(records), 1)
+
+ # Rewrite preprocessed entries.
+ lcov_file = lcov_rewriter.LcovFile([fpath])
+ r_num = []
+
+ def rewrite_source(s):
+ r_num.append(1)
+ return s, pp_remap
+
+ out = StringIO()
+ lcov_file.print_file(out, rewrite_source, self.pp_rewriter.rewrite_record)
+ self.assertEqual(len(r_num), 1)
+
+ # Read rewritten lcov.
+ with TempFile(out.getvalue()) as fname:
+ lcov_file = lcov_rewriter.LcovFile([fname])
+ records = [
+ lcov_file.parse_record(r) for _, _, r in lcov_file.iterate_records()
+ ]
+
+ self.assertEqual(len(records), 17)
+
+ # Lines/functions are only "moved" between records, not duplicated or omited.
+ self.assertEqual(original_line_count, sum(r.line_count for r in records))
+ self.assertEqual(
+ original_covered_line_count, sum(r.covered_line_count for r in records)
+ )
+ self.assertEqual(
+ original_function_count, sum(r.function_count for r in records)
+ )
+ self.assertEqual(
+ original_covered_function_count,
+ sum(r.covered_function_count for r in records),
+ )
+
+
+class TestUrlFinder(unittest.TestCase):
+ def setUp(self):
+ chrome_map_file = os.path.join(buildconfig.topobjdir, "chrome-map.json")
+ self._old_chrome_info_file = None
+ if os.path.isfile(chrome_map_file):
+ backup_file = os.path.join(buildconfig.topobjdir, "chrome-map-backup.json")
+ self._old_chrome_info_file = backup_file
+ self._chrome_map_file = chrome_map_file
+ shutil.move(chrome_map_file, backup_file)
+
+ dummy_chrome_info = [
+ {
+ "resource://activity-stream/": [
+ "dist/bin/browser/chrome/browser/res/activity-stream",
+ ],
+ "chrome://browser/content/": [
+ "dist/bin/browser/chrome/browser/content/browser",
+ ],
+ },
+ {
+ "chrome://global/content/license.html": "chrome://browser/content/license.html",
+ },
+ {
+ "dist/bin/components/MainProcessSingleton.js": ["path1", None],
+ "dist/bin/browser/features/firefox@getpocket.com/bootstrap.js": [
+ "path4",
+ None,
+ ],
+ "dist/bin/modules/osfile/osfile_async_worker.js": [
+ "toolkit/components/osfile/modules/osfile_async_worker.js",
+ None,
+ ],
+ "dist/bin/browser/chrome/browser/res/activity-stream/lib/": [
+ "browser/components/newtab/lib/*",
+ None,
+ ],
+ "dist/bin/browser/chrome/browser/content/browser/license.html": [
+ "browser/base/content/license.html",
+ None,
+ ],
+ "dist/bin/modules/AppConstants.sys.mjs": [
+ "toolkit/modules/AppConstants.sys.mjs",
+ {
+ "101,102": ["toolkit/modules/AppConstants.sys.mjs", 135],
+ },
+ ],
+ },
+ BUILDCONFIG,
+ ]
+ with open(chrome_map_file, "w") as fh:
+ json.dump(dummy_chrome_info, fh)
+
+ def tearDown(self):
+ if self._old_chrome_info_file:
+ shutil.move(self._old_chrome_info_file, self._chrome_map_file)
+
+ def test_jar_paths(self):
+ app_name = BUILDCONFIG["MOZ_APP_NAME"]
+ omnijar_name = BUILDCONFIG["OMNIJAR_NAME"]
+
+ paths = [
+ (
+ "jar:file:///home/worker/workspace/build/application/"
+ + app_name
+ + "/"
+ + omnijar_name
+ + "!/components/MainProcessSingleton.js",
+ "path1",
+ ),
+ (
+ "jar:file:///home/worker/workspace/build/application/"
+ + app_name
+ + "/browser/features/firefox@getpocket.com.xpi!/bootstrap.js",
+ "path4",
+ ),
+ ]
+
+ url_finder = lcov_rewriter.UrlFinder(self._chrome_map_file, "", "", [])
+ for path, expected in paths:
+ self.assertEqual(url_finder.rewrite_url(path)[0], expected)
+
+ def test_wrong_scheme_paths(self):
+ paths = [
+ "http://www.mozilla.org/aFile.js",
+ "https://www.mozilla.org/aFile.js",
+ "data:something",
+ "about:newtab",
+ "javascript:something",
+ ]
+
+ url_finder = lcov_rewriter.UrlFinder(self._chrome_map_file, "", "", [])
+ for path in paths:
+ self.assertIsNone(url_finder.rewrite_url(path))
+
+ def test_chrome_resource_paths(self):
+ paths = [
+ # Path with default url prefix
+ (
+ "resource://gre/modules/osfile/osfile_async_worker.js",
+ ("toolkit/components/osfile/modules/osfile_async_worker.js", None),
+ ),
+ # Path with url prefix that is in chrome map
+ (
+ "resource://activity-stream/lib/PrefsFeed.jsm",
+ ("browser/components/newtab/lib/PrefsFeed.jsm", None),
+ ),
+ # Path which is in url overrides
+ (
+ "chrome://global/content/license.html",
+ ("browser/base/content/license.html", None),
+ ),
+ # Path which ends with > eval
+ (
+ "resource://gre/modules/osfile/osfile_async_worker.js line 3 > eval",
+ None,
+ ),
+ # Path which ends with > Function
+ (
+ "resource://gre/modules/osfile/osfile_async_worker.js line 3 > Function",
+ None,
+ ),
+ # Path which contains "->"
+ (
+ "resource://gre/modules/addons/XPIProvider.jsm -> resource://gre/modules/osfile/osfile_async_worker.js", # noqa
+ ("toolkit/components/osfile/modules/osfile_async_worker.js", None),
+ ),
+ # Path with pp_info
+ (
+ "resource://gre/modules/AppConstants.sys.mjs",
+ (
+ "toolkit/modules/AppConstants.sys.mjs",
+ {
+ "101,102": ["toolkit/modules/AppConstants.sys.mjs", 135],
+ },
+ ),
+ ),
+ # Path with query
+ (
+ "resource://activity-stream/lib/PrefsFeed.jsm?q=0.9098419174803978",
+ ("browser/components/newtab/lib/PrefsFeed.jsm", None),
+ ),
+ ]
+
+ url_finder = lcov_rewriter.UrlFinder(self._chrome_map_file, "", "dist/bin/", [])
+ for path, expected in paths:
+ self.assertEqual(url_finder.rewrite_url(path), expected)
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozbuild/test/common.py b/python/mozbuild/mozbuild/test/common.py
new file mode 100644
index 0000000000..47f04a8dd3
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/common.py
@@ -0,0 +1,69 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import errno
+import os
+import shutil
+
+import mozpack.path as mozpath
+from buildconfig import topsrcdir
+from mach.logging import LoggingManager
+
+from mozbuild.util import ReadOnlyDict
+
+# By including this module, tests get structured logging.
+log_manager = LoggingManager()
+log_manager.add_terminal_logging()
+
+
+def prepare_tmp_topsrcdir(path):
+ for p in (
+ "build/autoconf/config.guess",
+ "build/autoconf/config.sub",
+ "build/moz.configure/checks.configure",
+ "build/moz.configure/init.configure",
+ "build/moz.configure/util.configure",
+ ):
+ file_path = os.path.join(path, p)
+ try:
+ os.makedirs(os.path.dirname(file_path))
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+ shutil.copy(os.path.join(topsrcdir, p), file_path)
+
+
+# mozconfig is not a reusable type (it's actually a module) so, we
+# have to mock it.
+class MockConfig(object):
+ def __init__(
+ self,
+ topsrcdir="/path/to/topsrcdir",
+ extra_substs={},
+ error_is_fatal=True,
+ ):
+ self.topsrcdir = mozpath.abspath(topsrcdir)
+ self.topobjdir = mozpath.abspath("/path/to/topobjdir")
+
+ self.substs = ReadOnlyDict(
+ {
+ "MOZ_FOO": "foo",
+ "MOZ_BAR": "bar",
+ "MOZ_TRUE": "1",
+ "MOZ_FALSE": "",
+ "DLL_PREFIX": "lib",
+ "DLL_SUFFIX": ".so",
+ },
+ **extra_substs
+ )
+
+ self.defines = self.substs
+
+ self.lib_prefix = "lib"
+ self.lib_suffix = ".a"
+ self.import_prefix = "lib"
+ self.import_suffix = ".so"
+ self.dll_prefix = "lib"
+ self.dll_suffix = ".so"
+ self.error_is_fatal = error_is_fatal
diff --git a/python/mozbuild/mozbuild/test/compilation/__init__.py b/python/mozbuild/mozbuild/test/compilation/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/compilation/__init__.py
diff --git a/python/mozbuild/mozbuild/test/compilation/test_warnings.py b/python/mozbuild/mozbuild/test/compilation/test_warnings.py
new file mode 100644
index 0000000000..1769e2e333
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/compilation/test_warnings.py
@@ -0,0 +1,240 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import unittest
+
+from mozfile.mozfile import NamedTemporaryFile
+from mozunit import main
+
+from mozbuild.compilation.warnings import (
+ CompilerWarning,
+ WarningsCollector,
+ WarningsDatabase,
+)
+
+CLANG_TESTS = [
+ (
+ "foobar.cpp:123:10: warning: you messed up [-Wfoo]",
+ "foobar.cpp",
+ 123,
+ 10,
+ "warning",
+ "you messed up",
+ "-Wfoo",
+ ),
+ (
+ "c_locale_dummy.c:457:1: error: (near initialization for "
+ "'full_wmonthname[0]') [clang-diagnostic-error]",
+ "c_locale_dummy.c",
+ 457,
+ 1,
+ "error",
+ "(near initialization for 'full_wmonthname[0]')",
+ "clang-diagnostic-error",
+ ),
+]
+
+CURRENT_LINE = 1
+
+
+def get_warning():
+ global CURRENT_LINE
+
+ w = CompilerWarning()
+ w["filename"] = "/foo/bar/baz.cpp"
+ w["line"] = CURRENT_LINE
+ w["column"] = 12
+ w["message"] = "This is irrelevant"
+
+ CURRENT_LINE += 1
+
+ return w
+
+
+class TestCompilerWarning(unittest.TestCase):
+ def test_equivalence(self):
+ w1 = CompilerWarning()
+ w2 = CompilerWarning()
+
+ s = set()
+
+ # Empty warnings should be equal.
+ self.assertEqual(w1, w2)
+
+ s.add(w1)
+ s.add(w2)
+
+ self.assertEqual(len(s), 1)
+
+ w1["filename"] = "/foo.c"
+ w2["filename"] = "/bar.c"
+
+ self.assertNotEqual(w1, w2)
+
+ s = set()
+ s.add(w1)
+ s.add(w2)
+
+ self.assertEqual(len(s), 2)
+
+ w1["filename"] = "/foo.c"
+ w1["line"] = 5
+ w2["line"] = 5
+
+ w2["filename"] = "/foo.c"
+ w1["column"] = 3
+ w2["column"] = 3
+
+ self.assertEqual(w1, w2)
+
+ def test_comparison(self):
+ w1 = CompilerWarning()
+ w2 = CompilerWarning()
+
+ w1["filename"] = "/aaa.c"
+ w1["line"] = 5
+ w1["column"] = 5
+
+ w2["filename"] = "/bbb.c"
+ w2["line"] = 5
+ w2["column"] = 5
+
+ self.assertLess(w1, w2)
+ self.assertGreater(w2, w1)
+ self.assertGreaterEqual(w2, w1)
+
+ w2["filename"] = "/aaa.c"
+ w2["line"] = 4
+ w2["column"] = 6
+
+ self.assertLess(w2, w1)
+ self.assertGreater(w1, w2)
+ self.assertGreaterEqual(w1, w2)
+
+ w2["filename"] = "/aaa.c"
+ w2["line"] = 5
+ w2["column"] = 10
+
+ self.assertLess(w1, w2)
+ self.assertGreater(w2, w1)
+ self.assertGreaterEqual(w2, w1)
+
+ w2["filename"] = "/aaa.c"
+ w2["line"] = 5
+ w2["column"] = 5
+
+ self.assertLessEqual(w1, w2)
+ self.assertLessEqual(w2, w1)
+ self.assertGreaterEqual(w2, w1)
+ self.assertGreaterEqual(w1, w2)
+
+
+class TestWarningsAndErrorsParsing(unittest.TestCase):
+ def test_clang_parsing(self):
+ for source, filename, line, column, diag_type, message, flag in CLANG_TESTS:
+ collector = WarningsCollector(lambda w: None)
+ warning = collector.process_line(source)
+
+ self.assertIsNotNone(warning)
+
+ self.assertEqual(warning["filename"], filename)
+ self.assertEqual(warning["line"], line)
+ self.assertEqual(warning["column"], column)
+ self.assertEqual(warning["type"], diag_type)
+ self.assertEqual(warning["message"], message)
+ self.assertEqual(warning["flag"], flag)
+
+
+class TestWarningsDatabase(unittest.TestCase):
+ def test_basic(self):
+ db = WarningsDatabase()
+
+ self.assertEqual(len(db), 0)
+
+ for i in range(10):
+ db.insert(get_warning(), compute_hash=False)
+
+ self.assertEqual(len(db), 10)
+
+ warnings = list(db)
+ self.assertEqual(len(warnings), 10)
+
+ def test_hashing(self):
+ """Ensure that hashing files on insert works."""
+ db = WarningsDatabase()
+
+ temp = NamedTemporaryFile(mode="wt")
+ temp.write("x" * 100)
+ temp.flush()
+
+ w = CompilerWarning()
+ w["filename"] = temp.name
+ w["line"] = 1
+ w["column"] = 4
+ w["message"] = "foo bar"
+
+ # Should not throw.
+ db.insert(w)
+
+ w["filename"] = "DOES_NOT_EXIST"
+
+ with self.assertRaises(Exception):
+ db.insert(w)
+
+ def test_pruning(self):
+ """Ensure old warnings are removed from database appropriately."""
+ db = WarningsDatabase()
+
+ source_files = []
+ for i in range(1, 21):
+ temp = NamedTemporaryFile(mode="wt")
+ temp.write("x" * (100 * i))
+ temp.flush()
+
+ # Keep reference so it doesn't get GC'd and deleted.
+ source_files.append(temp)
+
+ w = CompilerWarning()
+ w["filename"] = temp.name
+ w["line"] = 1
+ w["column"] = i * 10
+ w["message"] = "irrelevant"
+
+ db.insert(w)
+
+ self.assertEqual(len(db), 20)
+
+ # If we change a source file, inserting a new warning should nuke the
+ # old one.
+ source_files[0].write("extra")
+ source_files[0].flush()
+
+ w = CompilerWarning()
+ w["filename"] = source_files[0].name
+ w["line"] = 1
+ w["column"] = 50
+ w["message"] = "replaced"
+
+ db.insert(w)
+
+ self.assertEqual(len(db), 20)
+
+ warnings = list(db.warnings_for_file(source_files[0].name))
+ self.assertEqual(len(warnings), 1)
+ self.assertEqual(warnings[0]["column"], w["column"])
+
+ # If we delete the source file, calling prune should cause the warnings
+ # to go away.
+ old_filename = source_files[0].name
+ del source_files[0]
+
+ self.assertFalse(os.path.exists(old_filename))
+
+ db.prune()
+ self.assertEqual(len(db), 19)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/test/configure/common.py b/python/mozbuild/mozbuild/test/configure/common.py
new file mode 100644
index 0000000000..7dc1b85b22
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/common.py
@@ -0,0 +1,307 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import copy
+import errno
+import os
+import subprocess
+import sys
+import tempfile
+import unittest
+
+import six
+from buildconfig import topobjdir, topsrcdir
+from mozpack import path as mozpath
+from six import StringIO, string_types
+
+from mozbuild.configure import ConfigureSandbox
+from mozbuild.util import ReadOnlyNamespace, memoized_property
+
+
+def fake_short_path(path):
+ if sys.platform.startswith("win"):
+ return "/".join(
+ p.split(" ", 1)[0] + "~1" if " " in p else p for p in mozpath.split(path)
+ )
+ return path
+
+
+def ensure_exe_extension(path):
+ if sys.platform.startswith("win"):
+ return path + ".exe"
+ return path
+
+
+class ConfigureTestVFS(object):
+ def __init__(self, paths):
+ self._paths = set(mozpath.abspath(p) for p in paths)
+
+ def _real_file(self, path):
+ return mozpath.basedir(path, [topsrcdir, topobjdir, tempfile.gettempdir()])
+
+ def exists(self, path):
+ if path in self._paths:
+ return True
+ if self._real_file(path):
+ return os.path.exists(path)
+ return False
+
+ def isfile(self, path):
+ path = mozpath.abspath(path)
+ if path in self._paths:
+ return True
+ if self._real_file(path):
+ return os.path.isfile(path)
+ return False
+
+ def expanduser(self, path):
+ return os.path.expanduser(path)
+
+ def isdir(self, path):
+ path = mozpath.abspath(path)
+ if any(mozpath.basedir(mozpath.dirname(p), [path]) for p in self._paths):
+ return True
+ if self._real_file(path):
+ return os.path.isdir(path)
+ return False
+
+ def getsize(self, path):
+ if not self._real_file(path):
+ raise FileNotFoundError(path)
+ return os.path.getsize(path)
+
+
+class ConfigureTestSandbox(ConfigureSandbox):
+ """Wrapper around the ConfigureSandbox for testing purposes.
+
+ Its arguments are the same as ConfigureSandbox, except for the additional
+ `paths` argument, which is a dict where the keys are file paths and the
+ values are either None or a function that will be called when the sandbox
+ calls an implemented function from subprocess with the key as command.
+ When the command is CONFIG_SHELL, the function for the path of the script
+ that follows will be called.
+
+ The API for those functions is:
+ retcode, stdout, stderr = func(stdin, args)
+
+ This class is only meant to implement the minimal things to make
+ moz.configure testing possible. As such, it takes shortcuts.
+ """
+
+ def __init__(self, paths, config, environ, *args, **kwargs):
+ self._search_path = environ.get("PATH", "").split(os.pathsep)
+
+ self._subprocess_paths = {
+ mozpath.abspath(k): v for k, v in six.iteritems(paths) if v
+ }
+
+ paths = list(paths)
+
+ environ = copy.copy(environ)
+ if "CONFIG_SHELL" not in environ:
+ environ["CONFIG_SHELL"] = mozpath.abspath("/bin/sh")
+ self._subprocess_paths[environ["CONFIG_SHELL"]] = self.shell
+ paths.append(environ["CONFIG_SHELL"])
+ self._subprocess_paths[
+ mozpath.join(topsrcdir, "build/win32/vswhere.exe")
+ ] = self.vswhere
+
+ vfs = ConfigureTestVFS(paths)
+
+ os_path = {k: getattr(vfs, k) for k in dir(vfs) if not k.startswith("_")}
+
+ os_path.update(self.OS.path.__dict__)
+
+ os_contents = {}
+ exec("from os import *", {}, os_contents)
+ os_contents["path"] = ReadOnlyNamespace(**os_path)
+ os_contents["environ"] = dict(environ)
+ self.imported_os = ReadOnlyNamespace(**os_contents)
+
+ super(ConfigureTestSandbox, self).__init__(config, environ, *args, **kwargs)
+
+ @memoized_property
+ def _wrapped_mozfile(self):
+ return ReadOnlyNamespace(which=self.which)
+
+ @memoized_property
+ def _wrapped_os(self):
+ return self.imported_os
+
+ @memoized_property
+ def _wrapped_subprocess(self):
+ return ReadOnlyNamespace(
+ CalledProcessError=subprocess.CalledProcessError,
+ check_output=self.check_output,
+ PIPE=subprocess.PIPE,
+ STDOUT=subprocess.STDOUT,
+ Popen=self.Popen,
+ )
+
+ @memoized_property
+ def _wrapped_ctypes(self):
+ class CTypesFunc(object):
+ def __init__(self, func):
+ self._func = func
+
+ def __call__(self, *args, **kwargs):
+ return self._func(*args, **kwargs)
+
+ return ReadOnlyNamespace(
+ create_unicode_buffer=self.create_unicode_buffer,
+ windll=ReadOnlyNamespace(
+ kernel32=ReadOnlyNamespace(
+ GetShortPathNameW=CTypesFunc(self.GetShortPathNameW)
+ )
+ ),
+ wintypes=ReadOnlyNamespace(LPCWSTR=0, LPWSTR=1, DWORD=2),
+ )
+
+ @memoized_property
+ def _wrapped__winreg(self):
+ def OpenKey(*args, **kwargs):
+ raise WindowsError()
+
+ return ReadOnlyNamespace(HKEY_LOCAL_MACHINE=0, OpenKey=OpenKey)
+
+ def create_unicode_buffer(self, *args, **kwargs):
+ class Buffer(object):
+ def __init__(self):
+ self.value = ""
+
+ return Buffer()
+
+ def GetShortPathNameW(self, path_in, path_out, length):
+ path_out.value = fake_short_path(path_in)
+ return length
+
+ def which(self, command, mode=None, path=None, exts=None):
+ if isinstance(path, string_types):
+ path = path.split(os.pathsep)
+
+ for parent in path or self._search_path:
+ c = mozpath.abspath(mozpath.join(parent, command))
+ for candidate in (c, ensure_exe_extension(c)):
+ if self.imported_os.path.exists(candidate):
+ return candidate
+ return None
+
+ def Popen(self, args, stdin=None, stdout=None, stderr=None, **kargs):
+ program = self.which(args[0])
+ if not program:
+ raise OSError(errno.ENOENT, "File not found")
+
+ func = self._subprocess_paths.get(program)
+ retcode, stdout, stderr = func(stdin, args[1:])
+
+ class Process(object):
+ def communicate(self, stdin=None):
+ return stdout, stderr
+
+ def wait(self):
+ return retcode
+
+ return Process()
+
+ def check_output(self, args, **kwargs):
+ proc = self.Popen(args, **kwargs)
+ stdout, stderr = proc.communicate()
+ retcode = proc.wait()
+ if retcode:
+ raise subprocess.CalledProcessError(retcode, args, stdout)
+ return stdout
+
+ def shell(self, stdin, args):
+ script = mozpath.abspath(args[0])
+ if script in self._subprocess_paths:
+ return self._subprocess_paths[script](stdin, args[1:])
+ return 127, "", "File not found"
+
+ def vswhere(self, stdin, args):
+ return 0, "[]", ""
+
+ def get_config(self, name):
+ # Like the loop in ConfigureSandbox.run, but only execute the code
+ # associated with the given config item.
+ for func, args in self._execution_queue:
+ if (
+ func == self._resolve_and_set
+ and args[0] is self._config
+ and args[1] == name
+ ):
+ func(*args)
+ return self._config.get(name)
+
+
+class BaseConfigureTest(unittest.TestCase):
+ HOST = "x86_64-pc-linux-gnu"
+
+ def setUp(self):
+ self._cwd = os.getcwd()
+ os.chdir(topobjdir)
+
+ def tearDown(self):
+ os.chdir(self._cwd)
+
+ def config_guess(self, stdin, args):
+ return 0, self.HOST, ""
+
+ def config_sub(self, stdin, args):
+ return 0, args[0], ""
+
+ def get_sandbox(
+ self,
+ paths,
+ config,
+ args=[],
+ environ={},
+ mozconfig="",
+ out=None,
+ logger=None,
+ cls=ConfigureTestSandbox,
+ ):
+ kwargs = {}
+ if logger:
+ kwargs["logger"] = logger
+ else:
+ if not out:
+ out = StringIO()
+ kwargs["stdout"] = out
+ kwargs["stderr"] = out
+
+ if hasattr(self, "TARGET"):
+ target = ["--target=%s" % self.TARGET]
+ else:
+ target = []
+
+ if mozconfig:
+ fh, mozconfig_path = tempfile.mkstemp(text=True)
+ os.write(fh, six.ensure_binary(mozconfig))
+ os.close(fh)
+ else:
+ mozconfig_path = os.path.join(
+ os.path.dirname(__file__), "data", "empty_mozconfig"
+ )
+
+ try:
+ environ = dict(
+ environ,
+ OLD_CONFIGURE=os.path.join(topsrcdir, "old-configure"),
+ MOZCONFIG=mozconfig_path,
+ )
+
+ paths = dict(paths)
+ autoconf_dir = mozpath.join(topsrcdir, "build", "autoconf")
+ paths[mozpath.join(autoconf_dir, "config.guess")] = self.config_guess
+ paths[mozpath.join(autoconf_dir, "config.sub")] = self.config_sub
+
+ sandbox = cls(
+ paths, config, environ, ["configure"] + target + args, **kwargs
+ )
+ sandbox.include_file(os.path.join(topsrcdir, "moz.configure"))
+
+ return sandbox
+ finally:
+ if mozconfig:
+ os.remove(mozconfig_path)
diff --git a/python/mozbuild/mozbuild/test/configure/data/decorators.configure b/python/mozbuild/mozbuild/test/configure/data/decorators.configure
new file mode 100644
index 0000000000..b98eb26f3f
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/data/decorators.configure
@@ -0,0 +1,53 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+@template
+def simple_decorator(func):
+ return func
+
+
+@template
+def wrapper_decorator(func):
+ def wrapper(*args, **kwargs):
+ return func(*args, **kwargs)
+
+ return wrapper
+
+
+@template
+def function_decorator(*args, **kwargs):
+ # We could return wrapper_decorator from above here, but then we wouldn't
+ # know if this works as expected because wrapper_decorator itself was
+ # modified or because the right thing happened here.
+ def wrapper_decorator(func):
+ def wrapper(*args, **kwargs):
+ return func(*args, **kwargs)
+
+ return wrapper
+
+ return wrapper_decorator
+
+
+@depends("--help")
+@simple_decorator
+def foo(help):
+ global FOO
+ FOO = 1
+
+
+@depends("--help")
+@wrapper_decorator
+def bar(help):
+ global BAR
+ BAR = 1
+
+
+@depends("--help")
+@function_decorator("a", "b", "c")
+def qux(help):
+ global QUX
+ QUX = 1
diff --git a/python/mozbuild/mozbuild/test/configure/data/empty_mozconfig b/python/mozbuild/mozbuild/test/configure/data/empty_mozconfig
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/data/empty_mozconfig
diff --git a/python/mozbuild/mozbuild/test/configure/data/extra.configure b/python/mozbuild/mozbuild/test/configure/data/extra.configure
new file mode 100644
index 0000000000..e54a93dbc3
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/data/extra.configure
@@ -0,0 +1,15 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+option("--extra", help="Extra")
+
+
+@depends("--extra")
+def extra(extra):
+ return extra
+
+
+set_config("EXTRA", extra)
diff --git a/python/mozbuild/mozbuild/test/configure/data/imply_option/imm.configure b/python/mozbuild/mozbuild/test/configure/data/imply_option/imm.configure
new file mode 100644
index 0000000000..f20a4a7149
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/data/imply_option/imm.configure
@@ -0,0 +1,37 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+imply_option("--enable-foo", True)
+
+option("--enable-foo", help="enable foo")
+
+
+@depends("--enable-foo", "--help")
+def foo(value, help):
+ if value:
+ return True
+
+
+imply_option("--enable-bar", ("foo", "bar"))
+
+option("--enable-bar", nargs="*", help="enable bar")
+
+
+@depends("--enable-bar")
+def bar(value):
+ if value:
+ return value
+
+
+imply_option("--enable-baz", "BAZ")
+
+option("--enable-baz", nargs=1, help="enable baz")
+
+
+@depends("--enable-baz")
+def bar(value):
+ if value:
+ return value
diff --git a/python/mozbuild/mozbuild/test/configure/data/imply_option/infer.configure b/python/mozbuild/mozbuild/test/configure/data/imply_option/infer.configure
new file mode 100644
index 0000000000..b73be9a720
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/data/imply_option/infer.configure
@@ -0,0 +1,28 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+option("--enable-foo", help="enable foo")
+
+
+@depends("--enable-foo", "--help")
+def foo(value, help):
+ if value:
+ return True
+
+
+imply_option("--enable-bar", foo)
+
+
+option("--enable-bar", help="enable bar")
+
+
+@depends("--enable-bar")
+def bar(value):
+ if value:
+ return value
+
+
+set_config("BAR", bar)
diff --git a/python/mozbuild/mozbuild/test/configure/data/imply_option/infer_ko.configure b/python/mozbuild/mozbuild/test/configure/data/imply_option/infer_ko.configure
new file mode 100644
index 0000000000..9b3761c3c3
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/data/imply_option/infer_ko.configure
@@ -0,0 +1,36 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+option("--enable-hoge", help="enable hoge")
+
+
+@depends("--enable-hoge")
+def hoge(value):
+ return value
+
+
+option("--enable-foo", help="enable foo")
+
+
+@depends("--enable-foo", hoge)
+def foo(value, hoge):
+ if value:
+ return True
+
+
+imply_option("--enable-bar", foo)
+
+
+option("--enable-bar", help="enable bar")
+
+
+@depends("--enable-bar")
+def bar(value):
+ if value:
+ return value
+
+
+set_config("BAR", bar)
diff --git a/python/mozbuild/mozbuild/test/configure/data/imply_option/negative.configure b/python/mozbuild/mozbuild/test/configure/data/imply_option/negative.configure
new file mode 100644
index 0000000000..e953231f5e
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/data/imply_option/negative.configure
@@ -0,0 +1,40 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+option("--enable-foo", help="enable foo")
+
+
+@depends("--enable-foo")
+def foo(value):
+ if value:
+ return False
+
+
+imply_option("--enable-bar", foo)
+
+
+option("--disable-hoge", help="enable hoge")
+
+
+@depends("--disable-hoge")
+def hoge(value):
+ if not value:
+ return False
+
+
+imply_option("--enable-bar", hoge)
+
+
+option("--enable-bar", default=True, help="enable bar")
+
+
+@depends("--enable-bar")
+def bar(value):
+ if not value:
+ return value
+
+
+set_config("BAR", bar)
diff --git a/python/mozbuild/mozbuild/test/configure/data/imply_option/simple.configure b/python/mozbuild/mozbuild/test/configure/data/imply_option/simple.configure
new file mode 100644
index 0000000000..6aa225cc45
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/data/imply_option/simple.configure
@@ -0,0 +1,28 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+option("--enable-foo", help="enable foo")
+
+
+@depends("--enable-foo")
+def foo(value):
+ if value:
+ return True
+
+
+imply_option("--enable-bar", foo)
+
+
+option("--enable-bar", help="enable bar")
+
+
+@depends("--enable-bar")
+def bar(value):
+ if value:
+ return value
+
+
+set_config("BAR", bar)
diff --git a/python/mozbuild/mozbuild/test/configure/data/imply_option/values.configure b/python/mozbuild/mozbuild/test/configure/data/imply_option/values.configure
new file mode 100644
index 0000000000..93198a8295
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/data/imply_option/values.configure
@@ -0,0 +1,28 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+option("--enable-foo", nargs="*", help="enable foo")
+
+
+@depends("--enable-foo")
+def foo(value):
+ if value:
+ return value
+
+
+imply_option("--enable-bar", foo)
+
+
+option("--enable-bar", nargs="*", help="enable bar")
+
+
+@depends("--enable-bar")
+def bar(value):
+ if value:
+ return value
+
+
+set_config("BAR", bar)
diff --git a/python/mozbuild/mozbuild/test/configure/data/included.configure b/python/mozbuild/mozbuild/test/configure/data/included.configure
new file mode 100644
index 0000000000..97166618ec
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/data/included.configure
@@ -0,0 +1,68 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# For more complex and repetitive things, we can create templates
+@template
+def check_compiler_flag(flag):
+ @depends(is_gcc)
+ def check(value):
+ if value:
+ return [flag]
+
+ set_config("CFLAGS", check)
+ return check
+
+
+check_compiler_flag("-Werror=foobar")
+
+# Normal functions can be used in @depends functions.
+def fortytwo():
+ return 42
+
+
+def twentyone():
+ yield 21
+
+
+@depends(is_gcc)
+def check(value):
+ if value:
+ return fortytwo()
+
+
+set_config("TEMPLATE_VALUE", check)
+
+
+@depends(is_gcc)
+def check(value):
+ if value:
+ for val in twentyone():
+ return val
+
+
+set_config("TEMPLATE_VALUE_2", check)
+
+# Normal functions can use @imports too to import modules.
+@imports("sys")
+def platform():
+ return sys.platform
+
+
+option("--enable-imports-in-template", help="Imports in template")
+
+
+@depends("--enable-imports-in-template")
+def check(value):
+ if value:
+ return platform()
+
+
+set_config("PLATFORM", check)
+
+
+@template
+def indirectly_define_option(*args, **kwargs):
+ option(*args, **kwargs)
diff --git a/python/mozbuild/mozbuild/test/configure/data/moz.configure b/python/mozbuild/mozbuild/test/configure/data/moz.configure
new file mode 100644
index 0000000000..4d57eabbb9
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/data/moz.configure
@@ -0,0 +1,205 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+option("--enable-simple", help="Enable simple")
+
+# Setting MOZ_WITH_ENV in the environment has the same effect as passing
+# --enable-with-env.
+option("--enable-with-env", env="MOZ_WITH_ENV", help="Enable with env")
+
+# Optional values
+option("--enable-values", nargs="*", help="Enable values")
+
+# Everything supported in the Option class is supported in option(). Assume
+# the tests of the Option class are extensive about this.
+
+# Alternatively to --enable/--disable, there also is --with/--without. The
+# difference is semantic only. Behavior is the same as --enable/--disable.
+
+# When the option name starts with --disable/--without, the default is for
+# the option to be enabled.
+option("--without-thing", help="Build without thing")
+
+# A --enable/--with option with a default of False is equivalent to a
+# --disable/--without option. This can be used to change the defaults
+# depending on e.g. the target or the built application.
+option("--with-stuff", default=False, help="Build with stuff")
+
+# Other kinds of arbitrary options are also allowed. This is effectively
+# equivalent to --enable/--with, with no possibility of --disable/--without.
+option("--option", env="MOZ_OPTION", help="Option")
+
+# It is also possible to pass options through the environment only.
+option(env="CC", nargs=1, help="C Compiler")
+
+# Call the function when the --enable-simple option is processed, with its
+# OptionValue as argument.
+@depends("--enable-simple")
+def simple(simple):
+ if simple:
+ return simple
+
+
+set_config("ENABLED_SIMPLE", simple)
+
+# There can be multiple functions depending on the same option.
+@depends("--enable-simple")
+def simple(simple):
+ return simple
+
+
+set_config("SIMPLE", simple)
+
+
+@depends("--enable-with-env")
+def with_env(with_env):
+ return with_env
+
+
+set_config("WITH_ENV", with_env)
+
+# It doesn't matter if the dependency is on --enable or --disable
+@depends("--disable-values")
+def with_env2(values):
+ return values
+
+
+set_config("VALUES", with_env2)
+
+# It is possible to @depends on environment-only options.
+@depends("CC")
+def is_gcc(cc):
+ return cc and "gcc" in cc[0]
+
+
+set_config("IS_GCC", is_gcc)
+
+# It is possible to depend on the result from another function.
+@depends(with_env2)
+def with_env3(values):
+ return values
+
+
+set_config("VALUES2", with_env3)
+
+# @depends functions can also return results for use as input to another
+# @depends.
+@depends(with_env3)
+def with_env4(values):
+ return values
+
+
+@depends(with_env4)
+def with_env5(values):
+ return values
+
+
+set_config("VALUES3", with_env5)
+
+# The result from @depends functions can also be used as input to options.
+# The result must be returned, not implied.
+@depends("--enable-simple")
+def simple(simple):
+ return "simple" if simple else "not-simple"
+
+
+option("--with-returned-default", default=simple, help="Returned default")
+
+
+@depends("--with-returned-default")
+def default(value):
+ return value
+
+
+set_config("DEFAULTED", default)
+
+
+@depends("--enable-values")
+def choices(values):
+ if len(values):
+ return {
+ "alpha": ("a", "b", "c"),
+ "numeric": ("0", "1", "2"),
+ }.get(values[0])
+
+
+option("--returned-choices", choices=choices, help="Choices")
+
+
+@depends("--returned-choices")
+def returned_choices(values):
+ return values
+
+
+set_config("CHOICES", returned_choices)
+
+# All options must be referenced by some @depends function.
+# It is possible to depend on multiple options/functions
+@depends("--without-thing", "--with-stuff", with_env4, "--option")
+def remainder(*args):
+ return args
+
+
+set_config("REMAINDER", remainder)
+
+# It is possible to include other files to extend the configuration script.
+include("included.configure")
+
+# It is also possible for the include file path to come from the result of a
+# @depends function.
+option("--enable-include", nargs=1, help="Include")
+
+
+@depends("--enable-include")
+def include_path(path):
+ return path[0] if path else None
+
+
+include(include_path)
+
+# Sandboxed functions can import from modules through the use of the @imports
+# decorator.
+# The order of the decorators matter: @imports needs to appear after other
+# decorators.
+option("--with-imports", nargs="?", help="Imports")
+
+# A limited set of functions from os.path are exposed by default.
+@depends("--with-imports")
+def with_imports(value):
+ if len(value):
+ return hasattr(os.path, "abspath")
+
+
+set_config("HAS_ABSPATH", with_imports)
+
+# It is still possible to import the full set from os.path.
+# It is also possible to cherry-pick builtins.
+@depends("--with-imports")
+@imports("os.path")
+def with_imports(value):
+ if len(value):
+ return hasattr(os.path, "getatime")
+
+
+set_config("HAS_GETATIME", with_imports)
+
+
+@depends("--with-imports")
+def with_imports(value):
+ if len(value):
+ return hasattr(os.path, "getatime")
+
+
+set_config("HAS_GETATIME2", with_imports)
+
+# This option should be attributed to this file in the --help output even though
+# included.configure is the actual file that defines the option.
+indirectly_define_option("--indirect-option", help="Indirectly defined option")
+
+
+@depends("--indirect-option")
+def indirect_option(option):
+ return option
diff --git a/python/mozbuild/mozbuild/test/configure/data/set_config.configure b/python/mozbuild/mozbuild/test/configure/data/set_config.configure
new file mode 100644
index 0000000000..0ae5fef6d6
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/data/set_config.configure
@@ -0,0 +1,51 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+option("--set-foo", help="set foo")
+
+
+@depends("--set-foo")
+def foo(value):
+ if value:
+ return True
+
+
+set_config("FOO", foo)
+
+
+option("--set-bar", help="set bar")
+
+
+@depends("--set-bar")
+def bar(value):
+ return bool(value)
+
+
+set_config("BAR", bar)
+
+
+option("--set-value", nargs=1, help="set value")
+
+
+@depends("--set-value")
+def set_value(value):
+ if value:
+ return value[0]
+
+
+set_config("VALUE", set_value)
+
+
+option("--set-name", nargs=1, help="set name")
+
+
+@depends("--set-name")
+def set_name(value):
+ if value:
+ return value[0]
+
+
+set_config(set_name, True)
diff --git a/python/mozbuild/mozbuild/test/configure/data/set_define.configure b/python/mozbuild/mozbuild/test/configure/data/set_define.configure
new file mode 100644
index 0000000000..ce9a60d7f1
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/data/set_define.configure
@@ -0,0 +1,51 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+option("--set-foo", help="set foo")
+
+
+@depends("--set-foo")
+def foo(value):
+ if value:
+ return True
+
+
+set_define("FOO", foo)
+
+
+option("--set-bar", help="set bar")
+
+
+@depends("--set-bar")
+def bar(value):
+ return bool(value)
+
+
+set_define("BAR", bar)
+
+
+option("--set-value", nargs=1, help="set value")
+
+
+@depends("--set-value")
+def set_value(value):
+ if value:
+ return value[0]
+
+
+set_define("VALUE", set_value)
+
+
+option("--set-name", nargs=1, help="set name")
+
+
+@depends("--set-name")
+def set_name(value):
+ if value:
+ return value[0]
+
+
+set_define(set_name, True)
diff --git a/python/mozbuild/mozbuild/test/configure/data/subprocess.configure b/python/mozbuild/mozbuild/test/configure/data/subprocess.configure
new file mode 100644
index 0000000000..3316fee087
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/data/subprocess.configure
@@ -0,0 +1,24 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+@depends("--help")
+@imports("codecs")
+@imports(_from="mozbuild.configure.util", _import="getpreferredencoding")
+@imports("os")
+@imports(_from="__builtin__", _import="open")
+def dies_when_logging(_):
+ test_file = "test.txt"
+ quote_char = "'"
+ if getpreferredencoding().lower() == "utf-8":
+ quote_char = "\u00B4"
+ try:
+ with open(test_file, "w+") as fh:
+ fh.write(quote_char)
+ out = check_cmd_output("cat", "test.txt")
+ log.info(out)
+ finally:
+ os.remove(test_file)
diff --git a/python/mozbuild/mozbuild/test/configure/lint.py b/python/mozbuild/mozbuild/test/configure/lint.py
new file mode 100644
index 0000000000..59d41da264
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/lint.py
@@ -0,0 +1,62 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import unittest
+
+import six
+from buildconfig import topobjdir, topsrcdir
+from mozunit import main
+
+from mozbuild.configure.lint import LintSandbox
+
+test_path = os.path.abspath(__file__)
+
+
+class LintMeta(type):
+ def __new__(mcs, name, bases, attrs):
+ def create_test(project, func):
+ def test(self):
+ return func(self, project)
+
+ return test
+
+ for project in (
+ "browser",
+ "js",
+ "memory",
+ "mobile/android",
+ ):
+ attrs["test_%s" % project.replace("/", "_")] = create_test(
+ project, attrs["lint"]
+ )
+
+ return type.__new__(mcs, name, bases, attrs)
+
+
+# We don't actually need python2 compat, but this makes flake8 happy.
+@six.add_metaclass(LintMeta)
+class Lint(unittest.TestCase):
+ def setUp(self):
+ self._curdir = os.getcwd()
+ os.chdir(topobjdir)
+
+ def tearDown(self):
+ os.chdir(self._curdir)
+
+ def lint(self, project):
+ sandbox = LintSandbox(
+ {
+ "OLD_CONFIGURE": os.path.join(topsrcdir, "old-configure"),
+ "MOZCONFIG": os.path.join(
+ os.path.dirname(test_path), "data", "empty_mozconfig"
+ ),
+ },
+ ["configure", "--enable-project=%s" % project, "--help"],
+ )
+ sandbox.run(os.path.join(topsrcdir, "moz.configure"))
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/test/configure/macos_fake_sdk/SDKSettings.plist b/python/mozbuild/mozbuild/test/configure/macos_fake_sdk/SDKSettings.plist
new file mode 100644
index 0000000000..f0d6e1949f
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/macos_fake_sdk/SDKSettings.plist
@@ -0,0 +1,8 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>Version</key>
+ <string>13.3</string>
+</dict>
+</plist>
diff --git a/python/mozbuild/mozbuild/test/configure/test_bootstrap.py b/python/mozbuild/mozbuild/test/configure/test_bootstrap.py
new file mode 100644
index 0000000000..eaa417d566
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/test_bootstrap.py
@@ -0,0 +1,43 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from mozunit import main
+
+from common import BaseConfigureTest
+
+
+class TestBootstrap(BaseConfigureTest):
+ def test_bootstrap(self):
+ def get_value_for(arg):
+ sandbox = self.get_sandbox({}, {}, [arg], {})
+ return sandbox._value_for(sandbox["enable_bootstrap"])
+
+ self.assertEqual(None, get_value_for("--disable-bootstrap"))
+
+ # With `--enable-bootstrap`, anything is bootstrappable
+ bootstrap = get_value_for("--enable-bootstrap")
+ self.assertTrue(bootstrap("foo"))
+ self.assertTrue(bootstrap("bar"))
+
+ # With `--enable-bootstrap=foo,bar`, only foo and bar are bootstrappable
+ bootstrap = get_value_for("--enable-bootstrap=foo,bar")
+ self.assertTrue(bootstrap("foo"))
+ self.assertTrue(bootstrap("bar"))
+ self.assertFalse(bootstrap("qux"))
+
+ # With `--enable-bootstrap=-foo`, anything is bootstrappable, except foo
+ bootstrap = get_value_for("--enable-bootstrap=-foo")
+ self.assertFalse(bootstrap("foo"))
+ self.assertTrue(bootstrap("bar"))
+ self.assertTrue(bootstrap("qux"))
+
+ # Corner case.
+ bootstrap = get_value_for("--enable-bootstrap=-foo,foo,bar")
+ self.assertFalse(bootstrap("foo"))
+ self.assertTrue(bootstrap("bar"))
+ self.assertFalse(bootstrap("qux"))
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/test/configure/test_checks_configure.py b/python/mozbuild/mozbuild/test/configure/test_checks_configure.py
new file mode 100644
index 0000000000..53361ff199
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/test_checks_configure.py
@@ -0,0 +1,1169 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import sys
+import textwrap
+import unittest
+
+from buildconfig import topsrcdir
+from mozpack import path as mozpath
+from mozunit import MockedOpen, main
+from six import StringIO
+
+from common import ConfigureTestSandbox, ensure_exe_extension, fake_short_path
+from mozbuild.configure import ConfigureError, ConfigureSandbox
+from mozbuild.shellutil import quote as shell_quote
+from mozbuild.util import exec_
+
+
+class TestChecksConfigure(unittest.TestCase):
+ def test_checking(self):
+ def make_test(to_exec):
+ def test(val, msg):
+ out = StringIO()
+ sandbox = ConfigureSandbox({}, stdout=out, stderr=out)
+ base_dir = os.path.join(topsrcdir, "build", "moz.configure")
+ sandbox.include_file(os.path.join(base_dir, "checks.configure"))
+ exec_(to_exec, sandbox)
+ sandbox["foo"](val)
+ self.assertEqual(out.getvalue(), msg)
+
+ return test
+
+ test = make_test(
+ textwrap.dedent(
+ """
+ @checking('for a thing')
+ def foo(value):
+ return value
+ """
+ )
+ )
+ test(True, "checking for a thing... yes\n")
+ test(False, "checking for a thing... no\n")
+ test(42, "checking for a thing... 42\n")
+ test("foo", "checking for a thing... foo\n")
+ data = ["foo", "bar"]
+ test(data, "checking for a thing... %r\n" % data)
+
+ # When the function given to checking does nothing interesting, the
+ # behavior is not altered
+ test = make_test(
+ textwrap.dedent(
+ """
+ @checking('for a thing', lambda x: x)
+ def foo(value):
+ return value
+ """
+ )
+ )
+ test(True, "checking for a thing... yes\n")
+ test(False, "checking for a thing... no\n")
+ test(42, "checking for a thing... 42\n")
+ test("foo", "checking for a thing... foo\n")
+ data = ["foo", "bar"]
+ test(data, "checking for a thing... %r\n" % data)
+
+ test = make_test(
+ textwrap.dedent(
+ """
+ def munge(x):
+ if not x:
+ return 'not found'
+ if isinstance(x, (str, bool, int)):
+ return x
+ return ' '.join(x)
+
+ @checking('for a thing', munge)
+ def foo(value):
+ return value
+ """
+ )
+ )
+ test(True, "checking for a thing... yes\n")
+ test(False, "checking for a thing... not found\n")
+ test(42, "checking for a thing... 42\n")
+ test("foo", "checking for a thing... foo\n")
+ data = ["foo", "bar"]
+ test(data, "checking for a thing... foo bar\n")
+
+ KNOWN_A = ensure_exe_extension(mozpath.abspath("/usr/bin/known-a"))
+ KNOWN_B = ensure_exe_extension(mozpath.abspath("/usr/local/bin/known-b"))
+ KNOWN_C = ensure_exe_extension(mozpath.abspath("/home/user/bin/known c"))
+ OTHER_A = ensure_exe_extension(mozpath.abspath("/lib/other/known-a"))
+
+ def get_result(
+ self,
+ command="",
+ args=[],
+ environ={},
+ prog="/bin/configure",
+ extra_paths=None,
+ includes=("util.configure", "checks.configure"),
+ ):
+ config = {}
+ out = StringIO()
+ paths = {self.KNOWN_A: None, self.KNOWN_B: None, self.KNOWN_C: None}
+ if extra_paths:
+ paths.update(extra_paths)
+ environ = dict(environ)
+ if "PATH" not in environ:
+ environ["PATH"] = os.pathsep.join(os.path.dirname(p) for p in paths)
+ paths[self.OTHER_A] = None
+ sandbox = ConfigureTestSandbox(paths, config, environ, [prog] + args, out, out)
+ base_dir = os.path.join(topsrcdir, "build", "moz.configure")
+ for f in includes:
+ sandbox.include_file(os.path.join(base_dir, f))
+
+ status = 0
+ try:
+ exec_(command, sandbox)
+ sandbox.run()
+ except SystemExit as e:
+ status = e.code
+
+ return config, out.getvalue(), status
+
+ def test_check_prog(self):
+ config, out, status = self.get_result('check_prog("FOO", ("known-a",))')
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {"FOO": self.KNOWN_A})
+ self.assertEqual(out, "checking for foo... %s\n" % self.KNOWN_A)
+
+ config, out, status = self.get_result(
+ 'check_prog("FOO", ("unknown", "known-b", "known c"))'
+ )
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {"FOO": self.KNOWN_B})
+ self.assertEqual(out, "checking for foo... %s\n" % self.KNOWN_B)
+
+ config, out, status = self.get_result(
+ 'check_prog("FOO", ("unknown", "unknown-2", "known c"))'
+ )
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {"FOO": fake_short_path(self.KNOWN_C)})
+ self.assertEqual(
+ out, "checking for foo... %s\n" % shell_quote(fake_short_path(self.KNOWN_C))
+ )
+
+ config, out, status = self.get_result('check_prog("FOO", ("unknown",))')
+ self.assertEqual(status, 1)
+ self.assertEqual(config, {})
+ self.assertEqual(
+ out,
+ textwrap.dedent(
+ """\
+ checking for foo... not found
+ DEBUG: foo: Looking for unknown
+ ERROR: Cannot find foo
+ """
+ ),
+ )
+
+ config, out, status = self.get_result(
+ 'check_prog("FOO", ("unknown", "unknown-2", "unknown 3"))'
+ )
+ self.assertEqual(status, 1)
+ self.assertEqual(config, {})
+ self.assertEqual(
+ out,
+ textwrap.dedent(
+ """\
+ checking for foo... not found
+ DEBUG: foo: Looking for unknown
+ DEBUG: foo: Looking for unknown-2
+ DEBUG: foo: Looking for 'unknown 3'
+ ERROR: Cannot find foo
+ """
+ ),
+ )
+
+ config, out, status = self.get_result(
+ 'check_prog("FOO", ("unknown", "unknown-2", "unknown 3"), '
+ "allow_missing=True)"
+ )
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {})
+ self.assertEqual(out, "checking for foo... not found\n")
+
+ @unittest.skipIf(not sys.platform.startswith("win"), "Windows-only test")
+ def test_check_prog_exe(self):
+ config, out, status = self.get_result(
+ 'check_prog("FOO", ("unknown", "known-b", "known c"))', ["FOO=known-a.exe"]
+ )
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {"FOO": self.KNOWN_A})
+ self.assertEqual(out, "checking for foo... %s\n" % self.KNOWN_A)
+
+ config, out, status = self.get_result(
+ 'check_prog("FOO", ("unknown", "known-b", "known c"))',
+ ["FOO=%s" % os.path.splitext(self.KNOWN_A)[0]],
+ )
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {"FOO": self.KNOWN_A})
+ self.assertEqual(out, "checking for foo... %s\n" % self.KNOWN_A)
+
+ def test_check_prog_with_args(self):
+ config, out, status = self.get_result(
+ 'check_prog("FOO", ("unknown", "known-b", "known c"))', ["FOO=known-a"]
+ )
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {"FOO": self.KNOWN_A})
+ self.assertEqual(out, "checking for foo... %s\n" % self.KNOWN_A)
+
+ config, out, status = self.get_result(
+ 'check_prog("FOO", ("unknown", "known-b", "known c"))',
+ ["FOO=%s" % self.KNOWN_A],
+ )
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {"FOO": self.KNOWN_A})
+ self.assertEqual(out, "checking for foo... %s\n" % self.KNOWN_A)
+
+ path = self.KNOWN_B.replace("known-b", "known-a")
+ config, out, status = self.get_result(
+ 'check_prog("FOO", ("unknown", "known-b", "known c"))', ["FOO=%s" % path]
+ )
+ self.assertEqual(status, 1)
+ self.assertEqual(config, {})
+ self.assertEqual(
+ out,
+ textwrap.dedent(
+ """\
+ checking for foo... not found
+ DEBUG: foo: Looking for %s
+ ERROR: Cannot find foo
+ """
+ )
+ % path,
+ )
+
+ config, out, status = self.get_result(
+ 'check_prog("FOO", ("unknown",))', ["FOO=known c"]
+ )
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {"FOO": fake_short_path(self.KNOWN_C)})
+ self.assertEqual(
+ out, "checking for foo... %s\n" % shell_quote(fake_short_path(self.KNOWN_C))
+ )
+
+ config, out, status = self.get_result(
+ 'check_prog("FOO", ("unknown", "unknown-2", "unknown 3"), '
+ "allow_missing=True)",
+ ["FOO=unknown"],
+ )
+ self.assertEqual(status, 1)
+ self.assertEqual(config, {})
+ self.assertEqual(
+ out,
+ textwrap.dedent(
+ """\
+ checking for foo... not found
+ DEBUG: foo: Looking for unknown
+ ERROR: Cannot find foo
+ """
+ ),
+ )
+
+ def test_check_prog_what(self):
+ config, out, status = self.get_result(
+ 'check_prog("CC", ("known-a",), what="the target C compiler")'
+ )
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {"CC": self.KNOWN_A})
+ self.assertEqual(
+ out, "checking for the target C compiler... %s\n" % self.KNOWN_A
+ )
+
+ config, out, status = self.get_result(
+ 'check_prog("CC", ("unknown", "unknown-2", "unknown 3"),'
+ ' what="the target C compiler")'
+ )
+ self.assertEqual(status, 1)
+ self.assertEqual(config, {})
+ self.assertEqual(
+ out,
+ textwrap.dedent(
+ """\
+ checking for the target C compiler... not found
+ DEBUG: cc: Looking for unknown
+ DEBUG: cc: Looking for unknown-2
+ DEBUG: cc: Looking for 'unknown 3'
+ ERROR: Cannot find the target C compiler
+ """
+ ),
+ )
+
+ def test_check_prog_input(self):
+ config, out, status = self.get_result(
+ textwrap.dedent(
+ """
+ option("--with-ccache", nargs=1, help="ccache")
+ check_prog("CCACHE", ("known-a",), input="--with-ccache")
+ """
+ ),
+ ["--with-ccache=known-b"],
+ )
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {"CCACHE": self.KNOWN_B})
+ self.assertEqual(out, "checking for ccache... %s\n" % self.KNOWN_B)
+
+ script = textwrap.dedent(
+ """
+ option(env="CC", nargs=1, help="compiler")
+ @depends("CC")
+ def compiler(value):
+ return value[0].split()[0] if value else None
+ check_prog("CC", ("known-a",), input=compiler)
+ """
+ )
+ config, out, status = self.get_result(script)
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {"CC": self.KNOWN_A})
+ self.assertEqual(out, "checking for cc... %s\n" % self.KNOWN_A)
+
+ config, out, status = self.get_result(script, ["CC=known-b"])
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {"CC": self.KNOWN_B})
+ self.assertEqual(out, "checking for cc... %s\n" % self.KNOWN_B)
+
+ config, out, status = self.get_result(script, ["CC=known-b -m32"])
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {"CC": self.KNOWN_B})
+ self.assertEqual(out, "checking for cc... %s\n" % self.KNOWN_B)
+
+ def test_check_prog_progs(self):
+ config, out, status = self.get_result('check_prog("FOO", ())')
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {})
+ self.assertEqual(out, "")
+
+ config, out, status = self.get_result('check_prog("FOO", ())', ["FOO=known-a"])
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {"FOO": self.KNOWN_A})
+ self.assertEqual(out, "checking for foo... %s\n" % self.KNOWN_A)
+
+ script = textwrap.dedent(
+ """
+ option(env="TARGET", nargs=1, default="linux", help="target")
+ @depends("TARGET")
+ def compiler(value):
+ if value:
+ if value[0] == "linux":
+ return ("gcc", "clang")
+ if value[0] == "winnt":
+ return ("cl", "clang-cl")
+ check_prog("CC", compiler)
+ """
+ )
+ config, out, status = self.get_result(script)
+ self.assertEqual(status, 1)
+ self.assertEqual(config, {})
+ self.assertEqual(
+ out,
+ textwrap.dedent(
+ """\
+ checking for cc... not found
+ DEBUG: cc: Looking for gcc
+ DEBUG: cc: Looking for clang
+ ERROR: Cannot find cc
+ """
+ ),
+ )
+
+ config, out, status = self.get_result(script, ["TARGET=linux"])
+ self.assertEqual(status, 1)
+ self.assertEqual(config, {})
+ self.assertEqual(
+ out,
+ textwrap.dedent(
+ """\
+ checking for cc... not found
+ DEBUG: cc: Looking for gcc
+ DEBUG: cc: Looking for clang
+ ERROR: Cannot find cc
+ """
+ ),
+ )
+
+ config, out, status = self.get_result(script, ["TARGET=winnt"])
+ self.assertEqual(status, 1)
+ self.assertEqual(config, {})
+ self.assertEqual(
+ out,
+ textwrap.dedent(
+ """\
+ checking for cc... not found
+ DEBUG: cc: Looking for cl
+ DEBUG: cc: Looking for clang-cl
+ ERROR: Cannot find cc
+ """
+ ),
+ )
+
+ config, out, status = self.get_result(script, ["TARGET=none"])
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {})
+ self.assertEqual(out, "")
+
+ config, out, status = self.get_result(script, ["TARGET=winnt", "CC=known-a"])
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {"CC": self.KNOWN_A})
+ self.assertEqual(out, "checking for cc... %s\n" % self.KNOWN_A)
+
+ config, out, status = self.get_result(script, ["TARGET=none", "CC=known-a"])
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {"CC": self.KNOWN_A})
+ self.assertEqual(out, "checking for cc... %s\n" % self.KNOWN_A)
+
+ def test_check_prog_configure_error(self):
+ with self.assertRaises(ConfigureError) as e:
+ self.get_result('check_prog("FOO", "foo")')
+
+ self.assertEqual(str(e.exception), "progs must resolve to a list or tuple!")
+
+ with self.assertRaises(ConfigureError) as e:
+ self.get_result(
+ 'foo = depends(when=True)(lambda: ("a", "b"))\n'
+ 'check_prog("FOO", ("known-a",), input=foo)'
+ )
+
+ self.assertEqual(
+ str(e.exception),
+ "input must resolve to a tuple or a list with a "
+ "single element, or a string",
+ )
+
+ with self.assertRaises(ConfigureError) as e:
+ self.get_result(
+ 'foo = depends(when=True)(lambda: {"a": "b"})\n'
+ 'check_prog("FOO", ("known-a",), input=foo)'
+ )
+
+ self.assertEqual(
+ str(e.exception),
+ "input must resolve to a tuple or a list with a "
+ "single element, or a string",
+ )
+
+ def test_check_prog_with_path(self):
+ config, out, status = self.get_result(
+ 'check_prog("A", ("known-a",), paths=["/some/path"])'
+ )
+ self.assertEqual(status, 1)
+ self.assertEqual(config, {})
+ self.assertEqual(
+ out,
+ textwrap.dedent(
+ """\
+ checking for a... not found
+ DEBUG: a: Looking for known-a
+ ERROR: Cannot find a
+ """
+ ),
+ )
+
+ config, out, status = self.get_result(
+ 'check_prog("A", ("known-a",), paths=["%s"])'
+ % os.path.dirname(self.OTHER_A)
+ )
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {"A": self.OTHER_A})
+ self.assertEqual(
+ out,
+ textwrap.dedent(
+ """\
+ checking for a... %s
+ """
+ % self.OTHER_A
+ ),
+ )
+
+ dirs = map(mozpath.dirname, (self.OTHER_A, self.KNOWN_A))
+ config, out, status = self.get_result(
+ textwrap.dedent(
+ """\
+ check_prog("A", ("known-a",), paths=["%s"])
+ """
+ % os.pathsep.join(dirs)
+ )
+ )
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {"A": self.OTHER_A})
+ self.assertEqual(
+ out,
+ textwrap.dedent(
+ """\
+ checking for a... %s
+ """
+ % self.OTHER_A
+ ),
+ )
+
+ dirs = map(mozpath.dirname, (self.KNOWN_A, self.KNOWN_B))
+ config, out, status = self.get_result(
+ textwrap.dedent(
+ """\
+ check_prog("A", ("known-a",), paths=["%s", "%s"])
+ """
+ % (os.pathsep.join(dirs), self.OTHER_A)
+ )
+ )
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {"A": self.KNOWN_A})
+ self.assertEqual(
+ out,
+ textwrap.dedent(
+ """\
+ checking for a... %s
+ """
+ % self.KNOWN_A
+ ),
+ )
+
+ config, out, status = self.get_result(
+ 'check_prog("A", ("known-a",), paths="%s")' % os.path.dirname(self.OTHER_A)
+ )
+
+ self.assertEqual(status, 1)
+ self.assertEqual(config, {})
+ self.assertEqual(
+ out,
+ textwrap.dedent(
+ """\
+ checking for a... """ # noqa # trailing whitespace...
+ """
+ DEBUG: a: Looking for known-a
+ ERROR: Paths provided to find_program must be a list of strings, not %r
+ """
+ % mozpath.dirname(self.OTHER_A)
+ ),
+ )
+
+ @unittest.skipIf(
+ not sys.platform.startswith("linux"),
+ "Linux-only test, assumes Java is located from a $PATH",
+ )
+ def test_java_tool_checks_linux(self):
+ def run_configure_java(
+ mock_fs_paths, mock_java_home=None, mock_path=None, args=[]
+ ):
+ script = textwrap.dedent(
+ """\
+ @depends('--help')
+ def host(_):
+ return namespace(os='unknown', kernel='unknown')
+ toolchains_base_dir = depends(when=True)(lambda: '/mozbuild')
+ include('%(topsrcdir)s/build/moz.configure/java.configure')
+ """
+ % {"topsrcdir": topsrcdir}
+ )
+
+ # Don't let system JAVA_HOME influence the test
+ original_java_home = os.environ.pop("JAVA_HOME", None)
+ configure_environ = {}
+
+ if mock_java_home:
+ os.environ["JAVA_HOME"] = mock_java_home
+ configure_environ["JAVA_HOME"] = mock_java_home
+
+ if mock_path:
+ configure_environ["PATH"] = mock_path
+
+ # * Even if the real file sysphabtem has a symlink at the mocked path, don't let
+ # realpath follow it, as it may influence the test.
+ # * When finding a binary, check the mock paths rather than the real filesystem.
+ # Note: Python doesn't allow the different "with" bits to be put in parenthesis,
+ # because then it thinks it's an un-with-able tuple. Additionally, if this is cleanly
+ # lined up with "\", black removes them and autoformats them to the block that is
+ # below.
+ result = self.get_result(
+ args=args,
+ command=script,
+ extra_paths=paths,
+ environ=configure_environ,
+ )
+
+ if original_java_home:
+ os.environ["JAVA_HOME"] = original_java_home
+ return result
+
+ java = mozpath.abspath("/usr/bin/java")
+ javac = mozpath.abspath("/usr/bin/javac")
+ paths = {java: None, javac: None}
+ expected_error_message = (
+ "ERROR: Could not locate Java at /mozbuild/jdk/jdk-17.0.7+7/bin, "
+ "please run ./mach bootstrap --no-system-changes\n"
+ )
+
+ config, out, status = run_configure_java(paths)
+ self.assertEqual(status, 1)
+ self.assertEqual(config, {})
+ self.assertEqual(out, expected_error_message)
+
+ # An alternative valid set of tools referred to by JAVA_HOME.
+ alt_java = mozpath.abspath("/usr/local/bin/java")
+ alt_javac = mozpath.abspath("/usr/local/bin/javac")
+ alt_java_home = mozpath.dirname(mozpath.dirname(alt_java))
+ paths = {alt_java: None, alt_javac: None, java: None, javac: None}
+
+ alt_path = mozpath.dirname(java)
+ config, out, status = run_configure_java(paths, alt_java_home, alt_path)
+ self.assertEqual(status, 1)
+ self.assertEqual(config, {})
+ self.assertEqual(out, expected_error_message)
+
+ # We can use --with-java-bin-path instead of JAVA_HOME to similar
+ # effect.
+ config, out, status = run_configure_java(
+ paths,
+ mock_path=mozpath.dirname(java),
+ args=["--with-java-bin-path=%s" % mozpath.dirname(alt_java)],
+ )
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {"JAVA": alt_java, "MOZ_JAVA_CODE_COVERAGE": False})
+ self.assertEqual(
+ out,
+ textwrap.dedent(
+ """\
+ checking for java... %s
+ """
+ % alt_java
+ ),
+ )
+
+ # If --with-java-bin-path and JAVA_HOME are both set,
+ # --with-java-bin-path takes precedence.
+ config, out, status = run_configure_java(
+ paths,
+ mock_java_home=mozpath.dirname(mozpath.dirname(java)),
+ mock_path=mozpath.dirname(java),
+ args=["--with-java-bin-path=%s" % mozpath.dirname(alt_java)],
+ )
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {"JAVA": alt_java, "MOZ_JAVA_CODE_COVERAGE": False})
+ self.assertEqual(
+ out,
+ textwrap.dedent(
+ """\
+ checking for java... %s
+ """
+ % alt_java
+ ),
+ )
+
+ # --enable-java-coverage should set MOZ_JAVA_CODE_COVERAGE.
+ alt_java_home = mozpath.dirname(mozpath.dirname(java))
+ config, out, status = run_configure_java(
+ paths,
+ mock_java_home=alt_java_home,
+ mock_path=mozpath.dirname(java),
+ args=["--enable-java-coverage"],
+ )
+ self.assertEqual(status, 1)
+ self.assertEqual(config, {})
+
+ # Any missing tool is fatal when these checks run.
+ paths = {}
+ config, out, status = run_configure_java(
+ mock_fs_paths={},
+ mock_path=mozpath.dirname(java),
+ args=["--enable-java-coverage"],
+ )
+ self.assertEqual(status, 1)
+ self.assertEqual(config, {})
+ self.assertEqual(out, expected_error_message)
+
+ def test_pkg_check_modules(self):
+ mock_pkg_config_version = "0.10.0"
+ mock_pkg_config_path = mozpath.abspath("/usr/bin/pkg-config")
+
+ seen_flags = set()
+
+ def mock_pkg_config(_, args):
+ if "--dont-define-prefix" in args:
+ args = list(args)
+ seen_flags.add(args.pop(args.index("--dont-define-prefix")))
+ args = tuple(args)
+ if args[0:2] == ("--errors-to-stdout", "--print-errors"):
+ assert len(args) == 3
+ package = args[2]
+ if package == "unknown":
+ return (
+ 1,
+ "Package unknown was not found in the pkg-config search path.\n"
+ "Perhaps you should add the directory containing `unknown.pc'\n"
+ "to the PKG_CONFIG_PATH environment variable\n"
+ "No package 'unknown' found",
+ "",
+ )
+ if package == "valid":
+ return 0, "", ""
+ if package == "new > 1.1":
+ return 1, "Requested 'new > 1.1' but version of new is 1.1", ""
+ if args[0] == "--cflags":
+ assert len(args) == 2
+ return 0, "-I/usr/include/%s" % args[1], ""
+ if args[0] == "--libs":
+ assert len(args) == 2
+ return 0, "-l%s" % args[1], ""
+ if args[0] == "--version":
+ return 0, mock_pkg_config_version, ""
+ if args[0] == "--about":
+ return 1, "Unknown option --about", ""
+ self.fail("Unexpected arguments to mock_pkg_config: %s" % (args,))
+
+ def mock_pkgconf(_, args):
+ if args[0] == "--shared":
+ seen_flags.add(args[0])
+ args = args[1:]
+ if args[0] == "--about":
+ return 0, "pkgconf {}".format(mock_pkg_config_version), ""
+ return mock_pkg_config(_, args)
+
+ def get_result(cmd, args=[], bootstrapped_sysroot=False, extra_paths=None):
+ return self.get_result(
+ textwrap.dedent(
+ """\
+ option('--disable-compile-environment', help='compile env')
+ compile_environment = depends(when='--enable-compile-environment')(lambda: True)
+ toolchain_prefix = depends(when=True)(lambda: None)
+ target_multiarch_dir = depends(when=True)(lambda: None)
+ target_sysroot = depends(when=True)(lambda: %(sysroot)s)
+ target = depends(when=True)(lambda: None)
+ include('%(topsrcdir)s/build/moz.configure/util.configure')
+ include('%(topsrcdir)s/build/moz.configure/checks.configure')
+ # Skip bootstrapping.
+ @template
+ def check_prog(*args, **kwargs):
+ del kwargs["bootstrap"]
+ return check_prog(*args, **kwargs)
+ include('%(topsrcdir)s/build/moz.configure/pkg.configure')
+ """
+ % {
+ "topsrcdir": topsrcdir,
+ "sysroot": "namespace(bootstrapped=True)"
+ if bootstrapped_sysroot
+ else "None",
+ }
+ )
+ + cmd,
+ args=args,
+ extra_paths=extra_paths,
+ includes=(),
+ )
+
+ extra_paths = {mock_pkg_config_path: mock_pkg_config}
+
+ config, output, status = get_result("pkg_check_modules('MOZ_VALID', 'valid')")
+ self.assertEqual(status, 1)
+ self.assertEqual(
+ output,
+ textwrap.dedent(
+ """\
+ checking for pkg_config... not found
+ ERROR: *** The pkg-config script could not be found. Make sure it is
+ *** in your path, or set the PKG_CONFIG environment variable
+ *** to the full path to pkg-config.
+ """
+ ),
+ )
+
+ for pkg_config, version, bootstrapped_sysroot, is_pkgconf in (
+ (mock_pkg_config, "0.10.0", False, False),
+ (mock_pkg_config, "0.30.0", False, False),
+ (mock_pkg_config, "0.30.0", True, False),
+ (mock_pkgconf, "1.1.0", True, True),
+ (mock_pkgconf, "1.6.0", False, True),
+ (mock_pkgconf, "1.8.0", False, True),
+ (mock_pkgconf, "1.8.0", True, True),
+ ):
+ seen_flags = set()
+ mock_pkg_config_version = version
+ config, output, status = get_result(
+ "pkg_check_modules('MOZ_VALID', 'valid')",
+ bootstrapped_sysroot=bootstrapped_sysroot,
+ extra_paths={mock_pkg_config_path: pkg_config},
+ )
+ self.assertEqual(status, 0)
+ self.assertEqual(
+ output,
+ textwrap.dedent(
+ """\
+ checking for pkg_config... %s
+ checking for pkg-config version... %s
+ checking whether pkg-config is pkgconf... %s
+ checking for valid... yes
+ checking MOZ_VALID_CFLAGS... -I/usr/include/valid
+ checking MOZ_VALID_LIBS... -lvalid
+ """
+ % (
+ mock_pkg_config_path,
+ mock_pkg_config_version,
+ "yes" if is_pkgconf else "no",
+ )
+ ),
+ )
+ self.assertEqual(
+ config,
+ {
+ "PKG_CONFIG": mock_pkg_config_path,
+ "MOZ_VALID_CFLAGS": ("-I/usr/include/valid",),
+ "MOZ_VALID_LIBS": ("-lvalid",),
+ },
+ )
+ if version == "1.8.0" and bootstrapped_sysroot:
+ self.assertEqual(seen_flags, set(["--shared", "--dont-define-prefix"]))
+ elif version == "1.8.0":
+ self.assertEqual(seen_flags, set(["--shared"]))
+ elif version in ("1.6.0", "0.30.0") and bootstrapped_sysroot:
+ self.assertEqual(seen_flags, set(["--dont-define-prefix"]))
+ else:
+ self.assertEqual(seen_flags, set())
+
+ config, output, status = get_result(
+ "pkg_check_modules('MOZ_UKNOWN', 'unknown')", extra_paths=extra_paths
+ )
+ self.assertEqual(status, 1)
+ self.assertEqual(
+ output,
+ textwrap.dedent(
+ """\
+ checking for pkg_config... %s
+ checking for pkg-config version... %s
+ checking whether pkg-config is pkgconf... no
+ checking for unknown... no
+ ERROR: Package unknown was not found in the pkg-config search path.
+ ERROR: Perhaps you should add the directory containing `unknown.pc'
+ ERROR: to the PKG_CONFIG_PATH environment variable
+ ERROR: No package 'unknown' found
+ """
+ % (mock_pkg_config_path, mock_pkg_config_version)
+ ),
+ )
+ self.assertEqual(config, {"PKG_CONFIG": mock_pkg_config_path})
+
+ config, output, status = get_result(
+ "pkg_check_modules('MOZ_NEW', 'new > 1.1')", extra_paths=extra_paths
+ )
+ self.assertEqual(status, 1)
+ self.assertEqual(
+ output,
+ textwrap.dedent(
+ """\
+ checking for pkg_config... %s
+ checking for pkg-config version... %s
+ checking whether pkg-config is pkgconf... no
+ checking for new > 1.1... no
+ ERROR: Requested 'new > 1.1' but version of new is 1.1
+ """
+ % (mock_pkg_config_path, mock_pkg_config_version)
+ ),
+ )
+ self.assertEqual(config, {"PKG_CONFIG": mock_pkg_config_path})
+
+ # allow_missing makes missing packages non-fatal.
+ cmd = textwrap.dedent(
+ """\
+ have_new_module = pkg_check_modules('MOZ_NEW', 'new > 1.1', allow_missing=True)
+ @depends(have_new_module)
+ def log_new_module_error(mod):
+ if mod is not True:
+ log.info('Module not found.')
+ """
+ )
+
+ config, output, status = get_result(cmd, extra_paths=extra_paths)
+ self.assertEqual(status, 0)
+ self.assertEqual(
+ output,
+ textwrap.dedent(
+ """\
+ checking for pkg_config... %s
+ checking for pkg-config version... %s
+ checking whether pkg-config is pkgconf... no
+ checking for new > 1.1... no
+ WARNING: Requested 'new > 1.1' but version of new is 1.1
+ Module not found.
+ """
+ % (mock_pkg_config_path, mock_pkg_config_version)
+ ),
+ )
+ self.assertEqual(config, {"PKG_CONFIG": mock_pkg_config_path})
+
+ config, output, status = get_result(
+ cmd, args=["--disable-compile-environment"], extra_paths=extra_paths
+ )
+ self.assertEqual(status, 0)
+ self.assertEqual(output, "Module not found.\n")
+ self.assertEqual(config, {})
+
+ def mock_old_pkg_config(_, args):
+ if args[0] == "--version":
+ return 0, "0.8.10", ""
+ if args[0] == "--about":
+ return 1, "Unknown option --about", ""
+ self.fail("Unexpected arguments to mock_old_pkg_config: %s" % args)
+
+ extra_paths = {mock_pkg_config_path: mock_old_pkg_config}
+
+ config, output, status = get_result(
+ "pkg_check_modules('MOZ_VALID', 'valid')", extra_paths=extra_paths
+ )
+ self.assertEqual(status, 1)
+ self.assertEqual(
+ output,
+ textwrap.dedent(
+ """\
+ checking for pkg_config... %s
+ checking for pkg-config version... 0.8.10
+ checking whether pkg-config is pkgconf... no
+ ERROR: *** Your version of pkg-config is too old. You need version 0.9.0 or newer.
+ """
+ % mock_pkg_config_path
+ ),
+ )
+
+ def test_simple_keyfile(self):
+ includes = ("util.configure", "checks.configure", "keyfiles.configure")
+
+ config, output, status = self.get_result(
+ "simple_keyfile('Mozilla API')", includes=includes
+ )
+ self.assertEqual(status, 0)
+ self.assertEqual(
+ output,
+ textwrap.dedent(
+ """\
+ checking for the Mozilla API key... no
+ """
+ ),
+ )
+ self.assertEqual(config, {"MOZ_MOZILLA_API_KEY": "no-mozilla-api-key"})
+
+ config, output, status = self.get_result(
+ "simple_keyfile('Mozilla API')",
+ args=["--with-mozilla-api-keyfile=/foo/bar/does/not/exist"],
+ includes=includes,
+ )
+ self.assertEqual(status, 1)
+ self.assertEqual(
+ output,
+ textwrap.dedent(
+ """\
+ checking for the Mozilla API key... no
+ ERROR: '/foo/bar/does/not/exist': No such file or directory.
+ """
+ ),
+ )
+ self.assertEqual(config, {})
+
+ with MockedOpen({"key": ""}):
+ config, output, status = self.get_result(
+ "simple_keyfile('Mozilla API')",
+ args=["--with-mozilla-api-keyfile=key"],
+ includes=includes,
+ )
+ self.assertEqual(status, 1)
+ self.assertEqual(
+ output,
+ textwrap.dedent(
+ """\
+ checking for the Mozilla API key... no
+ ERROR: 'key' is empty.
+ """
+ ),
+ )
+ self.assertEqual(config, {})
+
+ with MockedOpen({"key": "fake-key\n"}):
+ config, output, status = self.get_result(
+ "simple_keyfile('Mozilla API')",
+ args=["--with-mozilla-api-keyfile=key"],
+ includes=includes,
+ )
+ self.assertEqual(status, 0)
+ self.assertEqual(
+ output,
+ textwrap.dedent(
+ """\
+ checking for the Mozilla API key... yes
+ """
+ ),
+ )
+ self.assertEqual(config, {"MOZ_MOZILLA_API_KEY": "fake-key"})
+
+ with MockedOpen({"default": "default-key\n"}):
+ config, output, status = self.get_result(
+ "simple_keyfile('Mozilla API', default='default')", includes=includes
+ )
+ self.assertEqual(status, 0)
+ self.assertEqual(
+ output,
+ textwrap.dedent(
+ """\
+ checking for the Mozilla API key... yes
+ """
+ ),
+ )
+ self.assertEqual(config, {"MOZ_MOZILLA_API_KEY": "default-key"})
+
+ with MockedOpen({"default": "default-key\n", "key": "fake-key\n"}):
+ config, output, status = self.get_result(
+ "simple_keyfile('Mozilla API', default='key')", includes=includes
+ )
+ self.assertEqual(status, 0)
+ self.assertEqual(
+ output,
+ textwrap.dedent(
+ """\
+ checking for the Mozilla API key... yes
+ """
+ ),
+ )
+ self.assertEqual(config, {"MOZ_MOZILLA_API_KEY": "fake-key"})
+
+ def test_id_and_secret_keyfile(self):
+ includes = ("util.configure", "checks.configure", "keyfiles.configure")
+
+ config, output, status = self.get_result(
+ "id_and_secret_keyfile('Bing API')", includes=includes
+ )
+ self.assertEqual(status, 0)
+ self.assertEqual(
+ output,
+ textwrap.dedent(
+ """\
+ checking for the Bing API key... no
+ """
+ ),
+ )
+ self.assertEqual(
+ config,
+ {
+ "MOZ_BING_API_CLIENTID": "no-bing-api-clientid",
+ "MOZ_BING_API_KEY": "no-bing-api-key",
+ },
+ )
+
+ config, output, status = self.get_result(
+ "id_and_secret_keyfile('Bing API')",
+ args=["--with-bing-api-keyfile=/foo/bar/does/not/exist"],
+ includes=includes,
+ )
+ self.assertEqual(status, 1)
+ self.assertEqual(
+ output,
+ textwrap.dedent(
+ """\
+ checking for the Bing API key... no
+ ERROR: '/foo/bar/does/not/exist': No such file or directory.
+ """
+ ),
+ )
+ self.assertEqual(config, {})
+
+ with MockedOpen({"key": ""}):
+ config, output, status = self.get_result(
+ "id_and_secret_keyfile('Bing API')",
+ args=["--with-bing-api-keyfile=key"],
+ includes=includes,
+ )
+ self.assertEqual(status, 1)
+ self.assertEqual(
+ output,
+ textwrap.dedent(
+ """\
+ checking for the Bing API key... no
+ ERROR: 'key' is empty.
+ """
+ ),
+ )
+ self.assertEqual(config, {})
+
+ with MockedOpen({"key": "fake-id fake-key\n"}):
+ config, output, status = self.get_result(
+ "id_and_secret_keyfile('Bing API')",
+ args=["--with-bing-api-keyfile=key"],
+ includes=includes,
+ )
+ self.assertEqual(status, 0)
+ self.assertEqual(
+ output,
+ textwrap.dedent(
+ """\
+ checking for the Bing API key... yes
+ """
+ ),
+ )
+ self.assertEqual(
+ config,
+ {"MOZ_BING_API_CLIENTID": "fake-id", "MOZ_BING_API_KEY": "fake-key"},
+ )
+
+ with MockedOpen({"key": "fake-key\n"}):
+ config, output, status = self.get_result(
+ "id_and_secret_keyfile('Bing API')",
+ args=["--with-bing-api-keyfile=key"],
+ includes=includes,
+ )
+ self.assertEqual(status, 1)
+ self.assertEqual(
+ output,
+ textwrap.dedent(
+ """\
+ checking for the Bing API key... no
+ ERROR: Bing API key file has an invalid format.
+ """
+ ),
+ )
+ self.assertEqual(config, {})
+
+ with MockedOpen({"default-key": "default-id default-key\n"}):
+ config, output, status = self.get_result(
+ "id_and_secret_keyfile('Bing API', default='default-key')",
+ includes=includes,
+ )
+ self.assertEqual(status, 0)
+ self.assertEqual(
+ output,
+ textwrap.dedent(
+ """\
+ checking for the Bing API key... yes
+ """
+ ),
+ )
+ self.assertEqual(
+ config,
+ {
+ "MOZ_BING_API_CLIENTID": "default-id",
+ "MOZ_BING_API_KEY": "default-key",
+ },
+ )
+
+ with MockedOpen(
+ {"default-key": "default-id default-key\n", "key": "fake-id fake-key\n"}
+ ):
+ config, output, status = self.get_result(
+ "id_and_secret_keyfile('Bing API', default='default-key')",
+ args=["--with-bing-api-keyfile=key"],
+ includes=includes,
+ )
+ self.assertEqual(status, 0)
+ self.assertEqual(
+ output,
+ textwrap.dedent(
+ """\
+ checking for the Bing API key... yes
+ """
+ ),
+ )
+ self.assertEqual(
+ config,
+ {"MOZ_BING_API_CLIENTID": "fake-id", "MOZ_BING_API_KEY": "fake-key"},
+ )
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/test/configure/test_compile_checks.py b/python/mozbuild/mozbuild/test/configure/test_compile_checks.py
new file mode 100644
index 0000000000..37988d535f
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/test_compile_checks.py
@@ -0,0 +1,599 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import textwrap
+import unittest
+
+import mozpack.path as mozpath
+from buildconfig import topsrcdir
+from mozunit import main
+from six import StringIO
+from test_toolchain_helpers import FakeCompiler
+
+from common import ConfigureTestSandbox
+from mozbuild.util import exec_
+
+
+class BaseCompileChecks(unittest.TestCase):
+ def get_mock_compiler(self, expected_test_content=None, expected_flags=None):
+ expected_flags = expected_flags or []
+
+ def mock_compiler(stdin, args):
+ if args != ["--version"]:
+ test_file = [a for a in args if not a.startswith("-")]
+ self.assertEqual(len(test_file), 1)
+ test_file = test_file[0]
+ args = [a for a in args if a.startswith("-")]
+ self.assertIn("-c", args)
+ for flag in expected_flags:
+ self.assertIn(flag, args)
+
+ if expected_test_content:
+ with open(test_file) as fh:
+ test_content = fh.read()
+ self.assertEqual(test_content, expected_test_content)
+
+ return FakeCompiler()(None, args)
+
+ return mock_compiler
+
+ def do_compile_test(self, command, expected_test_content=None, expected_flags=None):
+
+ paths = {
+ os.path.abspath("/usr/bin/mockcc"): self.get_mock_compiler(
+ expected_test_content=expected_test_content,
+ expected_flags=expected_flags,
+ ),
+ }
+
+ base_dir = os.path.join(topsrcdir, "build", "moz.configure")
+
+ mock_compiler_defs = textwrap.dedent(
+ """\
+ @depends(when=True)
+ def extra_toolchain_flags():
+ return []
+
+ @depends(when=True)
+ def linker_ldflags():
+ return []
+
+ target = depends(when=True)(lambda: True)
+
+ @depends(when=True)
+ def configure_cache():
+
+ class ConfigureCache(dict):
+ pass
+
+ cache_data = {}
+
+ cache = ConfigureCache(cache_data)
+ cache.version_checked_compilers = set()
+
+ return cache
+
+ include('%s/compilers-util.configure')
+
+ @template
+ def wrap_compiler(compiler):
+ return compiler_class(compiler, False)
+
+ @wrap_compiler
+ @depends(when=True)
+ def c_compiler():
+ return namespace(
+ flags=[],
+ type='gcc',
+ compiler=os.path.abspath('/usr/bin/mockcc'),
+ wrapper=[],
+ language='C',
+ )
+
+ @wrap_compiler
+ @depends(when=True)
+ def host_c_compiler():
+ return namespace(
+ flags=[],
+ type='gcc',
+ compiler=os.path.abspath('/usr/bin/mockcc'),
+ wrapper=[],
+ language='C',
+ )
+
+ @wrap_compiler
+ @depends(when=True)
+ def cxx_compiler():
+ return namespace(
+ flags=[],
+ type='gcc',
+ compiler=os.path.abspath('/usr/bin/mockcc'),
+ wrapper=[],
+ language='C++',
+ )
+
+ @wrap_compiler
+ @depends(when=True)
+ def host_cxx_compiler():
+ return namespace(
+ flags=[],
+ type='gcc',
+ compiler=os.path.abspath('/usr/bin/mockcc'),
+ wrapper=[],
+ language='C++',
+ )
+ """
+ % mozpath.normsep(base_dir)
+ )
+
+ config = {}
+ out = StringIO()
+ sandbox = ConfigureTestSandbox(paths, config, {}, ["/bin/configure"], out, out)
+ sandbox.include_file(os.path.join(base_dir, "util.configure"))
+ sandbox.include_file(os.path.join(base_dir, "checks.configure"))
+ exec_(mock_compiler_defs, sandbox)
+ sandbox.include_file(os.path.join(base_dir, "compile-checks.configure"))
+
+ status = 0
+ try:
+ exec_(command, sandbox)
+ sandbox.run()
+ except SystemExit as e:
+ status = e.code
+
+ return config, out.getvalue(), status
+
+
+class TestHeaderChecks(BaseCompileChecks):
+ def test_try_compile_include(self):
+ expected_test_content = textwrap.dedent(
+ """\
+ #include <foo.h>
+ #include <bar.h>
+ int
+ main(void)
+ {
+
+ ;
+ return 0;
+ }
+ """
+ )
+
+ cmd = textwrap.dedent(
+ """\
+ try_compile(['foo.h', 'bar.h'], language='C')
+ """
+ )
+
+ config, out, status = self.do_compile_test(cmd, expected_test_content)
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {})
+
+ def test_try_compile_flags(self):
+ expected_flags = ["--extra", "--flags"]
+
+ cmd = textwrap.dedent(
+ """\
+ try_compile(language='C++', flags=['--flags', '--extra'])
+ """
+ )
+
+ config, out, status = self.do_compile_test(cmd, expected_flags=expected_flags)
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {})
+
+ def test_try_compile_failure(self):
+ cmd = textwrap.dedent(
+ """\
+ have_fn = try_compile(body='somefn();', flags=['-funknown-flag'])
+ set_config('HAVE_SOMEFN', have_fn)
+
+ have_another = try_compile(body='anotherfn();', language='C')
+ set_config('HAVE_ANOTHERFN', have_another)
+ """
+ )
+
+ config, out, status = self.do_compile_test(cmd)
+ self.assertEqual(status, 0)
+ self.assertEqual(
+ config,
+ {
+ "HAVE_ANOTHERFN": True,
+ },
+ )
+
+ def test_try_compile_msg(self):
+ cmd = textwrap.dedent(
+ """\
+ known_flag = try_compile(language='C++', flags=['-fknown-flag'],
+ check_msg='whether -fknown-flag works')
+ set_config('HAVE_KNOWN_FLAG', known_flag)
+ """
+ )
+ config, out, status = self.do_compile_test(cmd)
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {"HAVE_KNOWN_FLAG": True})
+ self.assertEqual(
+ out,
+ textwrap.dedent(
+ """\
+ checking whether -fknown-flag works... yes
+ """
+ ),
+ )
+
+ def test_check_header(self):
+ expected_test_content = textwrap.dedent(
+ """\
+ #include <foo.h>
+ int
+ main(void)
+ {
+
+ ;
+ return 0;
+ }
+ """
+ )
+
+ cmd = textwrap.dedent(
+ """\
+ check_header('foo.h')
+ """
+ )
+
+ config, out, status = self.do_compile_test(
+ cmd, expected_test_content=expected_test_content
+ )
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {"DEFINES": {"HAVE_FOO_H": True}})
+ self.assertEqual(
+ out,
+ textwrap.dedent(
+ """\
+ checking for foo.h... yes
+ """
+ ),
+ )
+
+ def test_check_header_conditional(self):
+ cmd = textwrap.dedent(
+ """\
+ check_headers('foo.h', 'bar.h', when=never)
+ """
+ )
+
+ config, out, status = self.do_compile_test(cmd)
+ self.assertEqual(status, 0)
+ self.assertEqual(out, "")
+ self.assertEqual(config, {"DEFINES": {}})
+
+ def test_check_header_include(self):
+ expected_test_content = textwrap.dedent(
+ """\
+ #include <std.h>
+ #include <bar.h>
+ #include <foo.h>
+ int
+ main(void)
+ {
+
+ ;
+ return 0;
+ }
+ """
+ )
+
+ cmd = textwrap.dedent(
+ """\
+ have_foo = check_header('foo.h', includes=['std.h', 'bar.h'])
+ set_config('HAVE_FOO_H', have_foo)
+ """
+ )
+
+ config, out, status = self.do_compile_test(
+ cmd, expected_test_content=expected_test_content
+ )
+
+ self.assertEqual(status, 0)
+ self.assertEqual(
+ config,
+ {
+ "HAVE_FOO_H": True,
+ "DEFINES": {
+ "HAVE_FOO_H": True,
+ },
+ },
+ )
+ self.assertEqual(
+ out,
+ textwrap.dedent(
+ """\
+ checking for foo.h... yes
+ """
+ ),
+ )
+
+ def test_check_headers_multiple(self):
+ cmd = textwrap.dedent(
+ """\
+ baz_bar, quux_bar = check_headers('baz/foo-bar.h', 'baz-quux/foo-bar.h')
+ set_config('HAVE_BAZ_BAR', baz_bar)
+ set_config('HAVE_QUUX_BAR', quux_bar)
+ """
+ )
+
+ config, out, status = self.do_compile_test(cmd)
+ self.assertEqual(status, 0)
+ self.assertEqual(
+ config,
+ {
+ "HAVE_BAZ_BAR": True,
+ "HAVE_QUUX_BAR": True,
+ "DEFINES": {
+ "HAVE_BAZ_FOO_BAR_H": True,
+ "HAVE_BAZ_QUUX_FOO_BAR_H": True,
+ },
+ },
+ )
+ self.assertEqual(
+ out,
+ textwrap.dedent(
+ """\
+ checking for baz/foo-bar.h... yes
+ checking for baz-quux/foo-bar.h... yes
+ """
+ ),
+ )
+
+ def test_check_headers_not_found(self):
+
+ cmd = textwrap.dedent(
+ """\
+ baz_bar, quux_bar = check_headers('baz/foo-bar.h', 'baz-quux/foo-bar.h',
+ flags=['-funknown-flag'])
+ set_config('HAVE_BAZ_BAR', baz_bar)
+ set_config('HAVE_QUUX_BAR', quux_bar)
+ """
+ )
+
+ config, out, status = self.do_compile_test(cmd)
+ self.assertEqual(status, 0)
+ self.assertEqual(config, {"DEFINES": {}})
+ self.assertEqual(
+ out,
+ textwrap.dedent(
+ """\
+ checking for baz/foo-bar.h... no
+ checking for baz-quux/foo-bar.h... no
+ """
+ ),
+ )
+
+
+class TestWarningChecks(BaseCompileChecks):
+ def get_warnings(self):
+ return textwrap.dedent(
+ """\
+ set_config('_WARNINGS_CFLAGS', warnings_flags.cflags)
+ set_config('_WARNINGS_CXXFLAGS', warnings_flags.cxxflags)
+ """
+ )
+
+ def test_check_and_add_warning(self):
+ for flag, expected_flags in (
+ ("-Wfoo", ["-Werror", "-Wfoo"]),
+ ("-Wno-foo", ["-Werror", "-Wfoo"]),
+ ("-Werror=foo", ["-Werror=foo"]),
+ ("-Wno-error=foo", ["-Wno-error=foo"]),
+ ):
+ cmd = (
+ textwrap.dedent(
+ """\
+ check_and_add_warning('%s')
+ """
+ % flag
+ )
+ + self.get_warnings()
+ )
+
+ config, out, status = self.do_compile_test(
+ cmd, expected_flags=expected_flags
+ )
+ self.assertEqual(status, 0)
+ self.assertEqual(
+ config,
+ {
+ "_WARNINGS_CFLAGS": [flag],
+ "_WARNINGS_CXXFLAGS": [flag],
+ },
+ )
+ self.assertEqual(
+ out,
+ textwrap.dedent(
+ """\
+ checking whether the C compiler supports {flag}... yes
+ checking whether the C++ compiler supports {flag}... yes
+ """.format(
+ flag=flag
+ )
+ ),
+ )
+
+ def test_check_and_add_warning_one(self):
+ cmd = (
+ textwrap.dedent(
+ """\
+ check_and_add_warning('-Wfoo', cxx_compiler)
+ """
+ )
+ + self.get_warnings()
+ )
+
+ config, out, status = self.do_compile_test(cmd)
+ self.assertEqual(status, 0)
+ self.assertEqual(
+ config,
+ {
+ "_WARNINGS_CFLAGS": [],
+ "_WARNINGS_CXXFLAGS": ["-Wfoo"],
+ },
+ )
+ self.assertEqual(
+ out,
+ textwrap.dedent(
+ """\
+ checking whether the C++ compiler supports -Wfoo... yes
+ """
+ ),
+ )
+
+ def test_check_and_add_warning_when(self):
+ cmd = (
+ textwrap.dedent(
+ """\
+ @depends(when=True)
+ def never():
+ return False
+ check_and_add_warning('-Wfoo', cxx_compiler, when=never)
+ """
+ )
+ + self.get_warnings()
+ )
+
+ config, out, status = self.do_compile_test(cmd)
+ self.assertEqual(status, 0)
+ self.assertEqual(
+ config,
+ {
+ "_WARNINGS_CFLAGS": [],
+ "_WARNINGS_CXXFLAGS": [],
+ },
+ )
+ self.assertEqual(out, "")
+
+ cmd = (
+ textwrap.dedent(
+ """\
+ @depends(when=True)
+ def always():
+ return True
+ check_and_add_warning('-Wfoo', cxx_compiler, when=always)
+ """
+ )
+ + self.get_warnings()
+ )
+
+ config, out, status = self.do_compile_test(cmd)
+ self.assertEqual(status, 0)
+ self.assertEqual(
+ config,
+ {
+ "_WARNINGS_CFLAGS": [],
+ "_WARNINGS_CXXFLAGS": ["-Wfoo"],
+ },
+ )
+ self.assertEqual(
+ out,
+ textwrap.dedent(
+ """\
+ checking whether the C++ compiler supports -Wfoo... yes
+ """
+ ),
+ )
+
+ def test_add_warning(self):
+ cmd = (
+ textwrap.dedent(
+ """\
+ add_warning('-Wfoo')
+ """
+ )
+ + self.get_warnings()
+ )
+
+ config, out, status = self.do_compile_test(cmd)
+ self.assertEqual(status, 0)
+ self.assertEqual(
+ config,
+ {
+ "_WARNINGS_CFLAGS": ["-Wfoo"],
+ "_WARNINGS_CXXFLAGS": ["-Wfoo"],
+ },
+ )
+ self.assertEqual(out, "")
+
+ def test_add_warning_one(self):
+ cmd = (
+ textwrap.dedent(
+ """\
+ add_warning('-Wfoo', c_compiler)
+ """
+ )
+ + self.get_warnings()
+ )
+
+ config, out, status = self.do_compile_test(cmd)
+ self.assertEqual(status, 0)
+ self.assertEqual(
+ config,
+ {
+ "_WARNINGS_CFLAGS": ["-Wfoo"],
+ "_WARNINGS_CXXFLAGS": [],
+ },
+ )
+ self.assertEqual(out, "")
+
+ def test_add_warning_when(self):
+ cmd = (
+ textwrap.dedent(
+ """\
+ @depends(when=True)
+ def never():
+ return False
+ add_warning('-Wfoo', c_compiler, when=never)
+ """
+ )
+ + self.get_warnings()
+ )
+
+ config, out, status = self.do_compile_test(cmd)
+ self.assertEqual(status, 0)
+ self.assertEqual(
+ config,
+ {
+ "_WARNINGS_CFLAGS": [],
+ "_WARNINGS_CXXFLAGS": [],
+ },
+ )
+ self.assertEqual(out, "")
+
+ cmd = (
+ textwrap.dedent(
+ """\
+ @depends(when=True)
+ def always():
+ return True
+ add_warning('-Wfoo', c_compiler, when=always)
+ """
+ )
+ + self.get_warnings()
+ )
+
+ config, out, status = self.do_compile_test(cmd)
+ self.assertEqual(status, 0)
+ self.assertEqual(
+ config,
+ {
+ "_WARNINGS_CFLAGS": ["-Wfoo"],
+ "_WARNINGS_CXXFLAGS": [],
+ },
+ )
+ self.assertEqual(out, "")
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/test/configure/test_configure.py b/python/mozbuild/mozbuild/test/configure/test_configure.py
new file mode 100644
index 0000000000..a5e42faae3
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/test_configure.py
@@ -0,0 +1,1986 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import sys
+import textwrap
+import unittest
+
+import mozpack.path as mozpath
+import six
+from mozunit import MockedOpen, main
+from six import StringIO
+
+from mozbuild.configure import ConfigureError, ConfigureSandbox
+from mozbuild.configure.options import (
+ InvalidOptionError,
+ NegativeOptionValue,
+ PositiveOptionValue,
+)
+from mozbuild.util import ReadOnlyNamespace, exec_, memoized_property
+
+test_data_path = mozpath.abspath(mozpath.dirname(__file__))
+test_data_path = mozpath.join(test_data_path, "data")
+
+
+class TestConfigure(unittest.TestCase):
+ def get_config(
+ self, options=[], env={}, configure="moz.configure", prog="/bin/configure"
+ ):
+ config = {}
+ out = StringIO()
+ sandbox = ConfigureSandbox(config, env, [prog] + options, out, out)
+
+ sandbox.run(mozpath.join(test_data_path, configure))
+
+ if "--help" in options:
+ return six.ensure_text(out.getvalue()), config
+ self.assertEqual("", out.getvalue())
+ return config
+
+ def moz_configure(self, source):
+ return MockedOpen(
+ {os.path.join(test_data_path, "moz.configure"): textwrap.dedent(source)}
+ )
+
+ def test_defaults(self):
+ config = self.get_config()
+ self.maxDiff = None
+ self.assertEqual(
+ {
+ "CHOICES": NegativeOptionValue(),
+ "DEFAULTED": PositiveOptionValue(("not-simple",)),
+ "IS_GCC": NegativeOptionValue(),
+ "REMAINDER": (
+ PositiveOptionValue(),
+ NegativeOptionValue(),
+ NegativeOptionValue(),
+ NegativeOptionValue(),
+ ),
+ "SIMPLE": NegativeOptionValue(),
+ "VALUES": NegativeOptionValue(),
+ "VALUES2": NegativeOptionValue(),
+ "VALUES3": NegativeOptionValue(),
+ "WITH_ENV": NegativeOptionValue(),
+ },
+ config,
+ )
+
+ def test_help(self):
+ help, config = self.get_config(["--help"], prog="configure")
+
+ self.assertEqual({}, config)
+ self.maxDiff = None
+ self.assertEqual(
+ "Usage: configure [options]\n"
+ "\n"
+ "Options: [defaults in brackets after descriptions]\n"
+ " Help options:\n"
+ " --help print this message\n"
+ "\n"
+ " Options from python/mozbuild/mozbuild/test/configure/data/included.configure:\n"
+ " --enable-imports-in-template\n Imports in template\n"
+ "\n"
+ " Options from python/mozbuild/mozbuild/test/configure/data/moz.configure:\n"
+ " --enable-include Include\n"
+ " --enable-simple Enable simple\n"
+ " --enable-values Enable values\n"
+ " --enable-with-env Enable with env\n"
+ " --indirect-option Indirectly defined option\n"
+ " --option Option\n"
+ " --returned-choices Choices\n"
+ " --with-imports Imports\n"
+ " --with-returned-default Returned default [not-simple]\n"
+ " --with-stuff Build with stuff\n"
+ " --without-thing Build without thing\n"
+ "\n"
+ "\n"
+ "Environment variables:\n"
+ " Options from python/mozbuild/mozbuild/test/configure/data/moz.configure:\n"
+ " CC C Compiler\n"
+ "\n",
+ help.replace("\\", "/"),
+ )
+
+ def test_unknown(self):
+ with self.assertRaises(InvalidOptionError):
+ self.get_config(["--unknown"])
+
+ def test_simple(self):
+ for config in (
+ self.get_config(),
+ self.get_config(["--disable-simple"]),
+ # Last option wins.
+ self.get_config(["--enable-simple", "--disable-simple"]),
+ ):
+ self.assertNotIn("ENABLED_SIMPLE", config)
+ self.assertIn("SIMPLE", config)
+ self.assertEqual(NegativeOptionValue(), config["SIMPLE"])
+
+ for config in (
+ self.get_config(["--enable-simple"]),
+ self.get_config(["--disable-simple", "--enable-simple"]),
+ ):
+ self.assertIn("ENABLED_SIMPLE", config)
+ self.assertIn("SIMPLE", config)
+ self.assertEqual(PositiveOptionValue(), config["SIMPLE"])
+ self.assertIs(config["SIMPLE"], config["ENABLED_SIMPLE"])
+
+ # --enable-simple doesn't take values.
+ with self.assertRaises(InvalidOptionError):
+ self.get_config(["--enable-simple=value"])
+
+ def test_with_env(self):
+ for config in (
+ self.get_config(),
+ self.get_config(["--disable-with-env"]),
+ self.get_config(["--enable-with-env", "--disable-with-env"]),
+ self.get_config(env={"MOZ_WITH_ENV": ""}),
+ # Options win over environment
+ self.get_config(["--disable-with-env"], env={"MOZ_WITH_ENV": "1"}),
+ ):
+ self.assertIn("WITH_ENV", config)
+ self.assertEqual(NegativeOptionValue(), config["WITH_ENV"])
+
+ for config in (
+ self.get_config(["--enable-with-env"]),
+ self.get_config(["--disable-with-env", "--enable-with-env"]),
+ self.get_config(env={"MOZ_WITH_ENV": "1"}),
+ self.get_config(["--enable-with-env"], env={"MOZ_WITH_ENV": ""}),
+ ):
+ self.assertIn("WITH_ENV", config)
+ self.assertEqual(PositiveOptionValue(), config["WITH_ENV"])
+
+ with self.assertRaises(InvalidOptionError):
+ self.get_config(["--enable-with-env=value"])
+
+ with self.assertRaises(InvalidOptionError):
+ self.get_config(env={"MOZ_WITH_ENV": "value"})
+
+ def test_values(self, name="VALUES"):
+ for config in (
+ self.get_config(),
+ self.get_config(["--disable-values"]),
+ self.get_config(["--enable-values", "--disable-values"]),
+ ):
+ self.assertIn(name, config)
+ self.assertEqual(NegativeOptionValue(), config[name])
+
+ for config in (
+ self.get_config(["--enable-values"]),
+ self.get_config(["--disable-values", "--enable-values"]),
+ ):
+ self.assertIn(name, config)
+ self.assertEqual(PositiveOptionValue(), config[name])
+
+ config = self.get_config(["--enable-values=foo"])
+ self.assertIn(name, config)
+ self.assertEqual(PositiveOptionValue(("foo",)), config[name])
+
+ config = self.get_config(["--enable-values=foo,bar"])
+ self.assertIn(name, config)
+ self.assertTrue(config[name])
+ self.assertEqual(PositiveOptionValue(("foo", "bar")), config[name])
+
+ def test_values2(self):
+ self.test_values("VALUES2")
+
+ def test_values3(self):
+ self.test_values("VALUES3")
+
+ def test_returned_default(self):
+ config = self.get_config(["--enable-simple"])
+ self.assertIn("DEFAULTED", config)
+ self.assertEqual(PositiveOptionValue(("simple",)), config["DEFAULTED"])
+
+ config = self.get_config(["--disable-simple"])
+ self.assertIn("DEFAULTED", config)
+ self.assertEqual(PositiveOptionValue(("not-simple",)), config["DEFAULTED"])
+
+ def test_returned_choices(self):
+ for val in ("a", "b", "c"):
+ config = self.get_config(
+ ["--enable-values=alpha", "--returned-choices=%s" % val]
+ )
+ self.assertIn("CHOICES", config)
+ self.assertEqual(PositiveOptionValue((val,)), config["CHOICES"])
+
+ for val in ("0", "1", "2"):
+ config = self.get_config(
+ ["--enable-values=numeric", "--returned-choices=%s" % val]
+ )
+ self.assertIn("CHOICES", config)
+ self.assertEqual(PositiveOptionValue((val,)), config["CHOICES"])
+
+ with self.assertRaises(InvalidOptionError):
+ self.get_config(["--enable-values=numeric", "--returned-choices=a"])
+
+ with self.assertRaises(InvalidOptionError):
+ self.get_config(["--enable-values=alpha", "--returned-choices=0"])
+
+ def test_included(self):
+ config = self.get_config(env={"CC": "gcc"})
+ self.assertIn("IS_GCC", config)
+ self.assertEqual(config["IS_GCC"], True)
+
+ config = self.get_config(["--enable-include=extra.configure", "--extra"])
+ self.assertIn("EXTRA", config)
+ self.assertEqual(PositiveOptionValue(), config["EXTRA"])
+
+ with self.assertRaises(InvalidOptionError):
+ self.get_config(["--extra"])
+
+ def test_template(self):
+ config = self.get_config(env={"CC": "gcc"})
+ self.assertIn("CFLAGS", config)
+ self.assertEqual(config["CFLAGS"], ["-Werror=foobar"])
+
+ config = self.get_config(env={"CC": "clang"})
+ self.assertNotIn("CFLAGS", config)
+
+ def test_imports(self):
+ config = {}
+ out = StringIO()
+ sandbox = ConfigureSandbox(config, {}, ["configure"], out, out)
+
+ with self.assertRaises(ImportError):
+ exec_(
+ textwrap.dedent(
+ """
+ @template
+ def foo():
+ import sys
+ foo()"""
+ ),
+ sandbox,
+ )
+
+ exec_(
+ textwrap.dedent(
+ """
+ @template
+ @imports('sys')
+ def foo():
+ return sys"""
+ ),
+ sandbox,
+ )
+
+ self.assertIs(sandbox["foo"](), sys)
+
+ # os.path after an import is a mix of vanilla os.path and sandbox os.path.
+ os_path = {}
+ exec_("from os.path import *", {}, os_path)
+ os_path.update(sandbox.OS.path.__dict__)
+ os_path = ReadOnlyNamespace(**os_path)
+
+ exec_(
+ textwrap.dedent(
+ """
+ @template
+ @imports(_from='os', _import='path')
+ def foo():
+ return path"""
+ ),
+ sandbox,
+ )
+
+ self.assertEqual(sandbox["foo"](), os_path)
+
+ exec_(
+ textwrap.dedent(
+ """
+ @template
+ @imports(_from='os', _import='path', _as='os_path')
+ def foo():
+ return os_path"""
+ ),
+ sandbox,
+ )
+
+ self.assertEqual(sandbox["foo"](), os_path)
+
+ exec_(
+ textwrap.dedent(
+ """
+ @template
+ @imports('__builtin__')
+ def foo():
+ return __builtin__"""
+ ),
+ sandbox,
+ )
+
+ self.assertIs(sandbox["foo"](), six.moves.builtins)
+
+ exec_(
+ textwrap.dedent(
+ """
+ @template
+ @imports(_from='__builtin__', _import='open')
+ def foo():
+ return open('%s')"""
+ % os.devnull
+ ),
+ sandbox,
+ )
+
+ f = sandbox["foo"]()
+ self.assertEqual(f.name, os.devnull)
+ f.close()
+
+ # This unlocks the sandbox
+ exec_(
+ textwrap.dedent(
+ """
+ @template
+ @imports(_import='__builtin__', _as='__builtins__')
+ def foo():
+ import sys
+ return sys"""
+ ),
+ sandbox,
+ )
+
+ self.assertIs(sandbox["foo"](), sys)
+
+ exec_(
+ textwrap.dedent(
+ """
+ @template
+ @imports('__sandbox__')
+ def foo():
+ return __sandbox__"""
+ ),
+ sandbox,
+ )
+
+ self.assertIs(sandbox["foo"](), sandbox)
+
+ exec_(
+ textwrap.dedent(
+ """
+ @template
+ @imports(_import='__sandbox__', _as='s')
+ def foo():
+ return s"""
+ ),
+ sandbox,
+ )
+
+ self.assertIs(sandbox["foo"](), sandbox)
+
+ # Nothing leaked from the function being executed
+ self.assertEqual(list(sandbox), ["__builtins__", "foo"])
+ self.assertEqual(sandbox["__builtins__"], ConfigureSandbox.BUILTINS)
+
+ exec_(
+ textwrap.dedent(
+ """
+ @template
+ @imports('sys')
+ def foo():
+ @depends(when=True)
+ def bar():
+ return sys
+ return bar
+ bar = foo()"""
+ ),
+ sandbox,
+ )
+
+ with self.assertRaises(NameError) as e:
+ sandbox._depends[sandbox["bar"]].result()
+
+ self.assertIn("name 'sys' is not defined", str(e.exception))
+
+ def test_apply_imports(self):
+ imports = []
+
+ class CountApplyImportsSandbox(ConfigureSandbox):
+ def _apply_imports(self, *args, **kwargs):
+ imports.append((args, kwargs))
+ super(CountApplyImportsSandbox, self)._apply_imports(*args, **kwargs)
+
+ config = {}
+ out = StringIO()
+ sandbox = CountApplyImportsSandbox(config, {}, ["configure"], out, out)
+
+ exec_(
+ textwrap.dedent(
+ """
+ @template
+ @imports('sys')
+ def foo():
+ return sys
+ foo()
+ foo()"""
+ ),
+ sandbox,
+ )
+
+ self.assertEqual(len(imports), 1)
+
+ def test_import_wrapping(self):
+ bar = object()
+ foo = ReadOnlyNamespace(bar=bar)
+
+ class BasicWrappingSandbox(ConfigureSandbox):
+ @memoized_property
+ def _wrapped_foo(self):
+ return foo
+
+ config = {}
+ out = StringIO()
+ sandbox = BasicWrappingSandbox(config, {}, ["configure"], out, out)
+
+ exec_(
+ textwrap.dedent(
+ """
+ @template
+ @imports('foo')
+ def toplevel():
+ return foo
+ @template
+ @imports('foo.bar')
+ def bar():
+ return foo.bar
+ @template
+ @imports('foo.bar')
+ def bar_upper():
+ return foo
+ @template
+ @imports(_from='foo', _import='bar')
+ def from_import():
+ return bar
+ @template
+ @imports(_from='foo', _import='bar', _as='custom_name')
+ def from_import_as():
+ return custom_name
+ @template
+ @imports(_import='foo', _as='custom_name')
+ def import_as():
+ return custom_name
+ """
+ ),
+ sandbox,
+ )
+ self.assertIs(sandbox["toplevel"](), foo)
+ self.assertIs(sandbox["bar"](), bar)
+ self.assertIs(sandbox["bar_upper"](), foo)
+ self.assertIs(sandbox["from_import"](), bar)
+ self.assertIs(sandbox["from_import_as"](), bar)
+ self.assertIs(sandbox["import_as"](), foo)
+
+ def test_os_path(self):
+ config = self.get_config(["--with-imports=%s" % __file__])
+ self.assertIn("HAS_ABSPATH", config)
+ self.assertEqual(config["HAS_ABSPATH"], True)
+ self.assertIn("HAS_GETATIME", config)
+ self.assertEqual(config["HAS_GETATIME"], True)
+ self.assertIn("HAS_GETATIME2", config)
+ self.assertEqual(config["HAS_GETATIME2"], False)
+
+ def test_template_call(self):
+ config = self.get_config(env={"CC": "gcc"})
+ self.assertIn("TEMPLATE_VALUE", config)
+ self.assertEqual(config["TEMPLATE_VALUE"], 42)
+ self.assertIn("TEMPLATE_VALUE_2", config)
+ self.assertEqual(config["TEMPLATE_VALUE_2"], 21)
+
+ def test_template_imports(self):
+ config = self.get_config(["--enable-imports-in-template"])
+ self.assertIn("PLATFORM", config)
+ self.assertEqual(config["PLATFORM"], sys.platform)
+
+ def test_decorators(self):
+ config = {}
+ out = StringIO()
+ sandbox = ConfigureSandbox(config, {}, ["configure"], out, out)
+
+ sandbox.include_file(mozpath.join(test_data_path, "decorators.configure"))
+
+ self.assertNotIn("FOO", sandbox)
+ self.assertNotIn("BAR", sandbox)
+ self.assertNotIn("QUX", sandbox)
+
+ def test_set_config(self):
+ def get_config(*args):
+ return self.get_config(*args, configure="set_config.configure")
+
+ help, config = get_config(["--help"])
+ self.assertEqual(config, {})
+
+ config = get_config(["--set-foo"])
+ self.assertIn("FOO", config)
+ self.assertEqual(config["FOO"], True)
+
+ config = get_config(["--set-bar"])
+ self.assertNotIn("FOO", config)
+ self.assertIn("BAR", config)
+ self.assertEqual(config["BAR"], True)
+
+ config = get_config(["--set-value=qux"])
+ self.assertIn("VALUE", config)
+ self.assertEqual(config["VALUE"], "qux")
+
+ config = get_config(["--set-name=hoge"])
+ self.assertIn("hoge", config)
+ self.assertEqual(config["hoge"], True)
+
+ config = get_config([])
+ self.assertEqual(config, {"BAR": False})
+
+ with self.assertRaises(ConfigureError):
+ # Both --set-foo and --set-name=FOO are going to try to
+ # set_config('FOO'...)
+ get_config(["--set-foo", "--set-name=FOO"])
+
+ def test_set_config_when(self):
+ with self.moz_configure(
+ """
+ option('--with-qux', help='qux')
+ set_config('FOO', 'foo', when=True)
+ set_config('BAR', 'bar', when=False)
+ set_config('QUX', 'qux', when='--with-qux')
+ """
+ ):
+ config = self.get_config()
+ self.assertEqual(
+ config,
+ {
+ "FOO": "foo",
+ },
+ )
+ config = self.get_config(["--with-qux"])
+ self.assertEqual(
+ config,
+ {
+ "FOO": "foo",
+ "QUX": "qux",
+ },
+ )
+
+ def test_set_config_when_disable(self):
+ with self.moz_configure(
+ """
+ option('--disable-baz', help='Disable baz')
+ set_config('BAZ', True, when='--enable-baz')
+ """
+ ):
+ config = self.get_config()
+ self.assertEqual(config["BAZ"], True)
+ config = self.get_config(["--enable-baz"])
+ self.assertEqual(config["BAZ"], True)
+ config = self.get_config(["--disable-baz"])
+ self.assertEqual(config, {})
+
+ def test_set_define(self):
+ def get_config(*args):
+ return self.get_config(*args, configure="set_define.configure")
+
+ help, config = get_config(["--help"])
+ self.assertEqual(config, {"DEFINES": {}})
+
+ config = get_config(["--set-foo"])
+ self.assertIn("FOO", config["DEFINES"])
+ self.assertEqual(config["DEFINES"]["FOO"], True)
+
+ config = get_config(["--set-bar"])
+ self.assertNotIn("FOO", config["DEFINES"])
+ self.assertIn("BAR", config["DEFINES"])
+ self.assertEqual(config["DEFINES"]["BAR"], True)
+
+ config = get_config(["--set-value=qux"])
+ self.assertIn("VALUE", config["DEFINES"])
+ self.assertEqual(config["DEFINES"]["VALUE"], "qux")
+
+ config = get_config(["--set-name=hoge"])
+ self.assertIn("hoge", config["DEFINES"])
+ self.assertEqual(config["DEFINES"]["hoge"], True)
+
+ config = get_config([])
+ self.assertEqual(config["DEFINES"], {"BAR": False})
+
+ with self.assertRaises(ConfigureError):
+ # Both --set-foo and --set-name=FOO are going to try to
+ # set_define('FOO'...)
+ get_config(["--set-foo", "--set-name=FOO"])
+
+ def test_set_define_when(self):
+ with self.moz_configure(
+ """
+ option('--with-qux', help='qux')
+ set_define('FOO', 'foo', when=True)
+ set_define('BAR', 'bar', when=False)
+ set_define('QUX', 'qux', when='--with-qux')
+ """
+ ):
+ config = self.get_config()
+ self.assertEqual(
+ config["DEFINES"],
+ {
+ "FOO": "foo",
+ },
+ )
+ config = self.get_config(["--with-qux"])
+ self.assertEqual(
+ config["DEFINES"],
+ {
+ "FOO": "foo",
+ "QUX": "qux",
+ },
+ )
+
+ def test_set_define_when_disable(self):
+ with self.moz_configure(
+ """
+ option('--disable-baz', help='Disable baz')
+ set_define('BAZ', True, when='--enable-baz')
+ """
+ ):
+ config = self.get_config()
+ self.assertEqual(config["DEFINES"]["BAZ"], True)
+ config = self.get_config(["--enable-baz"])
+ self.assertEqual(config["DEFINES"]["BAZ"], True)
+ config = self.get_config(["--disable-baz"])
+ self.assertEqual(config["DEFINES"], {})
+
+ def test_imply_option_simple(self):
+ def get_config(*args):
+ return self.get_config(*args, configure="imply_option/simple.configure")
+
+ help, config = get_config(["--help"])
+ self.assertEqual(config, {})
+
+ config = get_config([])
+ self.assertEqual(config, {})
+
+ config = get_config(["--enable-foo"])
+ self.assertIn("BAR", config)
+ self.assertEqual(config["BAR"], PositiveOptionValue())
+
+ with self.assertRaises(InvalidOptionError) as e:
+ get_config(["--enable-foo", "--disable-bar"])
+
+ self.assertEqual(
+ str(e.exception),
+ "'--enable-bar' implied by '--enable-foo' conflicts with "
+ "'--disable-bar' from the command-line",
+ )
+
+ def test_imply_option_negative(self):
+ def get_config(*args):
+ return self.get_config(*args, configure="imply_option/negative.configure")
+
+ help, config = get_config(["--help"])
+ self.assertEqual(config, {})
+
+ config = get_config([])
+ self.assertEqual(config, {})
+
+ config = get_config(["--enable-foo"])
+ self.assertIn("BAR", config)
+ self.assertEqual(config["BAR"], NegativeOptionValue())
+
+ with self.assertRaises(InvalidOptionError) as e:
+ get_config(["--enable-foo", "--enable-bar"])
+
+ self.assertEqual(
+ str(e.exception),
+ "'--disable-bar' implied by '--enable-foo' conflicts with "
+ "'--enable-bar' from the command-line",
+ )
+
+ config = get_config(["--disable-hoge"])
+ self.assertIn("BAR", config)
+ self.assertEqual(config["BAR"], NegativeOptionValue())
+
+ with self.assertRaises(InvalidOptionError) as e:
+ get_config(["--disable-hoge", "--enable-bar"])
+
+ self.assertEqual(
+ str(e.exception),
+ "'--disable-bar' implied by '--disable-hoge' conflicts with "
+ "'--enable-bar' from the command-line",
+ )
+
+ def test_imply_option_values(self):
+ def get_config(*args):
+ return self.get_config(*args, configure="imply_option/values.configure")
+
+ help, config = get_config(["--help"])
+ self.assertEqual(config, {})
+
+ config = get_config([])
+ self.assertEqual(config, {})
+
+ config = get_config(["--enable-foo=a"])
+ self.assertIn("BAR", config)
+ self.assertEqual(config["BAR"], PositiveOptionValue(("a",)))
+
+ config = get_config(["--enable-foo=a,b"])
+ self.assertIn("BAR", config)
+ self.assertEqual(config["BAR"], PositiveOptionValue(("a", "b")))
+
+ with self.assertRaises(InvalidOptionError) as e:
+ get_config(["--enable-foo=a,b", "--disable-bar"])
+
+ self.assertEqual(
+ str(e.exception),
+ "'--enable-bar=a,b' implied by '--enable-foo' conflicts with "
+ "'--disable-bar' from the command-line",
+ )
+
+ def test_imply_option_infer(self):
+ def get_config(*args):
+ return self.get_config(*args, configure="imply_option/infer.configure")
+
+ help, config = get_config(["--help"])
+ self.assertEqual(config, {})
+
+ config = get_config([])
+ self.assertEqual(config, {})
+
+ with self.assertRaises(InvalidOptionError) as e:
+ get_config(["--enable-foo", "--disable-bar"])
+
+ self.assertEqual(
+ str(e.exception),
+ "'--enable-bar' implied by '--enable-foo' conflicts with "
+ "'--disable-bar' from the command-line",
+ )
+
+ with self.assertRaises(ConfigureError) as e:
+ self.get_config([], configure="imply_option/infer_ko.configure")
+
+ self.assertEqual(
+ str(e.exception),
+ "Cannot infer what implies '--enable-bar'. Please add a `reason` "
+ "to the `imply_option` call.",
+ )
+
+ def test_imply_option_immediate_value(self):
+ def get_config(*args):
+ return self.get_config(*args, configure="imply_option/imm.configure")
+
+ help, config = get_config(["--help"])
+ self.assertEqual(config, {})
+
+ config = get_config([])
+ self.assertEqual(config, {})
+
+ config_path = mozpath.abspath(
+ mozpath.join(test_data_path, "imply_option", "imm.configure")
+ )
+
+ with self.assertRaisesRegexp(
+ InvalidOptionError,
+ "--enable-foo' implied by 'imply_option at %s:7' conflicts "
+ "with '--disable-foo' from the command-line" % config_path,
+ ):
+ get_config(["--disable-foo"])
+
+ with self.assertRaisesRegexp(
+ InvalidOptionError,
+ "--enable-bar=foo,bar' implied by 'imply_option at %s:18' "
+ "conflicts with '--enable-bar=a,b,c' from the command-line" % config_path,
+ ):
+ get_config(["--enable-bar=a,b,c"])
+
+ with self.assertRaisesRegexp(
+ InvalidOptionError,
+ "--enable-baz=BAZ' implied by 'imply_option at %s:29' "
+ "conflicts with '--enable-baz=QUUX' from the command-line" % config_path,
+ ):
+ get_config(["--enable-baz=QUUX"])
+
+ def test_imply_option_failures(self):
+ with self.assertRaises(ConfigureError) as e:
+ with self.moz_configure(
+ """
+ imply_option('--with-foo', ('a',), 'bar')
+ """
+ ):
+ self.get_config()
+
+ self.assertEqual(
+ str(e.exception),
+ "`--with-foo`, emitted from `%s` line 2, is unknown."
+ % mozpath.join(test_data_path, "moz.configure"),
+ )
+
+ with self.assertRaises(TypeError) as e:
+ with self.moz_configure(
+ """
+ imply_option('--with-foo', 42, 'bar')
+
+ option('--with-foo', help='foo')
+ @depends('--with-foo')
+ def foo(value):
+ return value
+ """
+ ):
+ self.get_config()
+
+ self.assertEqual(str(e.exception), "Unexpected type: 'int'")
+
+ def test_imply_option_when(self):
+ with self.moz_configure(
+ """
+ option('--with-foo', help='foo')
+ imply_option('--with-qux', True, when='--with-foo')
+ option('--with-qux', help='qux')
+ set_config('QUX', depends('--with-qux')(lambda x: x))
+ """
+ ):
+ config = self.get_config()
+ self.assertEqual(
+ config,
+ {
+ "QUX": NegativeOptionValue(),
+ },
+ )
+
+ config = self.get_config(["--with-foo"])
+ self.assertEqual(
+ config,
+ {
+ "QUX": PositiveOptionValue(),
+ },
+ )
+
+ def test_imply_option_dependency_loop(self):
+ with self.moz_configure(
+ """
+ option('--without-foo', help='foo')
+
+ @depends('--with-foo')
+ def qux_default(foo):
+ return bool(foo)
+
+ option('--with-qux', default=qux_default, help='qux')
+
+ imply_option('--with-foo', depends('--with-qux')(lambda x: x or None))
+
+ set_config('FOO', depends('--with-foo')(lambda x: x))
+ set_config('QUX', depends('--with-qux')(lambda x: x))
+ """
+ ):
+ config = self.get_config()
+ self.assertEqual(
+ config,
+ {
+ "FOO": PositiveOptionValue(),
+ "QUX": PositiveOptionValue(),
+ },
+ )
+
+ config = self.get_config(["--without-foo"])
+ self.assertEqual(
+ config,
+ {
+ "FOO": NegativeOptionValue(),
+ "QUX": NegativeOptionValue(),
+ },
+ )
+
+ config = self.get_config(["--with-qux"])
+ self.assertEqual(
+ config,
+ {
+ "FOO": PositiveOptionValue(),
+ "QUX": PositiveOptionValue(),
+ },
+ )
+
+ with self.assertRaises(InvalidOptionError) as e:
+ config = self.get_config(["--without-foo", "--with-qux"])
+
+ self.assertEqual(
+ str(e.exception),
+ "'--with-foo' implied by '--with-qux' conflicts "
+ "with '--without-foo' from the command-line",
+ )
+
+ config = self.get_config(["--without-qux"])
+ self.assertEqual(
+ config,
+ {
+ "FOO": PositiveOptionValue(),
+ "QUX": NegativeOptionValue(),
+ },
+ )
+
+ with self.moz_configure(
+ """
+ option('--with-foo', help='foo')
+
+ @depends('--with-foo')
+ def qux_default(foo):
+ return bool(foo)
+
+ option('--with-qux', default=qux_default, help='qux')
+
+ imply_option('--with-foo', depends('--with-qux')(lambda x: x or None))
+
+ set_config('FOO', depends('--with-foo')(lambda x: x))
+ set_config('QUX', depends('--with-qux')(lambda x: x))
+ """
+ ):
+ config = self.get_config()
+ self.assertEqual(
+ config,
+ {
+ "FOO": NegativeOptionValue(),
+ "QUX": NegativeOptionValue(),
+ },
+ )
+
+ config = self.get_config(["--with-foo"])
+ self.assertEqual(
+ config,
+ {
+ "FOO": PositiveOptionValue(),
+ "QUX": PositiveOptionValue(),
+ },
+ )
+
+ with self.assertRaises(InvalidOptionError) as e:
+ config = self.get_config(["--with-qux"])
+
+ self.assertEqual(
+ str(e.exception),
+ "'--with-foo' implied by '--with-qux' conflicts "
+ "with '--without-foo' from the default",
+ )
+
+ with self.assertRaises(InvalidOptionError) as e:
+ config = self.get_config(["--without-foo", "--with-qux"])
+
+ self.assertEqual(
+ str(e.exception),
+ "'--with-foo' implied by '--with-qux' conflicts "
+ "with '--without-foo' from the command-line",
+ )
+
+ config = self.get_config(["--without-qux"])
+ self.assertEqual(
+ config,
+ {
+ "FOO": NegativeOptionValue(),
+ "QUX": NegativeOptionValue(),
+ },
+ )
+
+ config_path = mozpath.abspath(mozpath.join(test_data_path, "moz.configure"))
+
+ # Same test as above, but using `when` in the `imply_option`.
+ with self.moz_configure(
+ """
+ option('--with-foo', help='foo')
+
+ @depends('--with-foo')
+ def qux_default(foo):
+ return bool(foo)
+
+ option('--with-qux', default=qux_default, help='qux')
+
+ imply_option('--with-foo', True, when='--with-qux')
+
+ set_config('FOO', depends('--with-foo')(lambda x: x))
+ set_config('QUX', depends('--with-qux')(lambda x: x))
+ """
+ ):
+ config = self.get_config()
+ self.assertEqual(
+ config,
+ {
+ "FOO": NegativeOptionValue(),
+ "QUX": NegativeOptionValue(),
+ },
+ )
+
+ config = self.get_config(["--with-foo"])
+ self.assertEqual(
+ config,
+ {
+ "FOO": PositiveOptionValue(),
+ "QUX": PositiveOptionValue(),
+ },
+ )
+
+ with self.assertRaises(InvalidOptionError) as e:
+ config = self.get_config(["--with-qux"])
+
+ self.assertEqual(
+ str(e.exception),
+ "'--with-foo' implied by 'imply_option at %s:10' conflicts "
+ "with '--without-foo' from the default" % config_path,
+ )
+
+ with self.assertRaises(InvalidOptionError) as e:
+ config = self.get_config(["--without-foo", "--with-qux"])
+
+ self.assertEqual(
+ str(e.exception),
+ "'--with-foo' implied by 'imply_option at %s:10' conflicts "
+ "with '--without-foo' from the command-line" % config_path,
+ )
+
+ config = self.get_config(["--without-qux"])
+ self.assertEqual(
+ config,
+ {
+ "FOO": NegativeOptionValue(),
+ "QUX": NegativeOptionValue(),
+ },
+ )
+
+ def test_imply_option_recursion(self):
+ config_path = mozpath.abspath(mozpath.join(test_data_path, "moz.configure"))
+
+ message = (
+ "'--without-foo' appears somewhere in the direct or indirect dependencies "
+ "when resolving imply_option at %s:8" % config_path
+ )
+
+ with self.moz_configure(
+ """
+ option('--without-foo', help='foo')
+
+ imply_option('--with-qux', depends('--with-foo')(lambda x: x or None))
+
+ option('--with-qux', help='qux')
+
+ imply_option('--with-foo', depends('--with-qux')(lambda x: x or None))
+
+ set_config('FOO', depends('--with-foo')(lambda x: x))
+ set_config('QUX', depends('--with-qux')(lambda x: x))
+ """
+ ):
+ # Note: no error is detected when the depends function in the
+ # imply_options resolve to None, which disables the imply_option.
+
+ with self.assertRaises(ConfigureError) as e:
+ self.get_config()
+
+ self.assertEqual(str(e.exception), message)
+
+ with self.assertRaises(ConfigureError) as e:
+ self.get_config(["--with-qux"])
+
+ self.assertEqual(str(e.exception), message)
+
+ with self.assertRaises(ConfigureError) as e:
+ self.get_config(["--without-foo", "--with-qux"])
+
+ self.assertEqual(str(e.exception), message)
+
+ def test_option_failures(self):
+ with self.assertRaises(ConfigureError) as e:
+ with self.moz_configure('option("--with-foo", help="foo")'):
+ self.get_config()
+
+ self.assertEqual(
+ str(e.exception),
+ "Option `--with-foo` is not handled ; reference it with a @depends",
+ )
+
+ with self.assertRaises(ConfigureError) as e:
+ with self.moz_configure(
+ """
+ option("--with-foo", help="foo")
+ option("--with-foo", help="foo")
+ """
+ ):
+ self.get_config()
+
+ self.assertEqual(str(e.exception), "Option `--with-foo` already defined")
+
+ with self.assertRaises(ConfigureError) as e:
+ with self.moz_configure(
+ """
+ option(env="MOZ_FOO", help="foo")
+ option(env="MOZ_FOO", help="foo")
+ """
+ ):
+ self.get_config()
+
+ self.assertEqual(str(e.exception), "Option `MOZ_FOO` already defined")
+
+ with self.assertRaises(ConfigureError) as e:
+ with self.moz_configure(
+ """
+ option('--with-foo', env="MOZ_FOO", help="foo")
+ option(env="MOZ_FOO", help="foo")
+ """
+ ):
+ self.get_config()
+
+ self.assertEqual(str(e.exception), "Option `MOZ_FOO` already defined")
+
+ with self.assertRaises(ConfigureError) as e:
+ with self.moz_configure(
+ """
+ option(env="MOZ_FOO", help="foo")
+ option('--with-foo', env="MOZ_FOO", help="foo")
+ """
+ ):
+ self.get_config()
+
+ self.assertEqual(str(e.exception), "Option `MOZ_FOO` already defined")
+
+ with self.assertRaises(ConfigureError) as e:
+ with self.moz_configure(
+ """
+ option('--with-foo', env="MOZ_FOO", help="foo")
+ option('--with-foo', help="foo")
+ """
+ ):
+ self.get_config()
+
+ self.assertEqual(str(e.exception), "Option `--with-foo` already defined")
+
+ def test_option_when(self):
+ with self.moz_configure(
+ """
+ option('--with-foo', help='foo', when=True)
+ option('--with-bar', help='bar', when=False)
+ option('--with-qux', env="QUX", help='qux', when='--with-foo')
+
+ set_config('FOO', depends('--with-foo', when=True)(lambda x: x))
+ set_config('BAR', depends('--with-bar', when=False)(lambda x: x))
+ set_config('QUX', depends('--with-qux', when='--with-foo')(lambda x: x))
+ """
+ ):
+ config = self.get_config()
+ self.assertEqual(
+ config,
+ {
+ "FOO": NegativeOptionValue(),
+ },
+ )
+
+ config = self.get_config(["--with-foo"])
+ self.assertEqual(
+ config,
+ {
+ "FOO": PositiveOptionValue(),
+ "QUX": NegativeOptionValue(),
+ },
+ )
+
+ config = self.get_config(["--with-foo", "--with-qux"])
+ self.assertEqual(
+ config,
+ {
+ "FOO": PositiveOptionValue(),
+ "QUX": PositiveOptionValue(),
+ },
+ )
+
+ with self.assertRaises(InvalidOptionError) as e:
+ self.get_config(["--with-bar"])
+
+ self.assertEqual(
+ str(e.exception), "--with-bar is not available in this configuration"
+ )
+
+ with self.assertRaises(InvalidOptionError) as e:
+ self.get_config(["--with-qux"])
+
+ self.assertEqual(
+ str(e.exception), "--with-qux is not available in this configuration"
+ )
+
+ with self.assertRaises(InvalidOptionError) as e:
+ self.get_config(["QUX=1"])
+
+ self.assertEqual(
+ str(e.exception), "QUX is not available in this configuration"
+ )
+
+ config = self.get_config(env={"QUX": "1"})
+ self.assertEqual(
+ config,
+ {
+ "FOO": NegativeOptionValue(),
+ },
+ )
+
+ help, config = self.get_config(["--help"])
+ self.assertEqual(
+ help.replace("\\", "/"),
+ textwrap.dedent(
+ """\
+ Usage: configure [options]
+
+ Options: [defaults in brackets after descriptions]
+ Help options:
+ --help print this message
+
+ Options from python/mozbuild/mozbuild/test/configure/data/moz.configure:
+ --with-foo foo
+
+
+ Environment variables:
+ """
+ ),
+ )
+
+ help, config = self.get_config(["--help", "--with-foo"])
+ self.assertEqual(
+ help.replace("\\", "/"),
+ textwrap.dedent(
+ """\
+ Usage: configure [options]
+
+ Options: [defaults in brackets after descriptions]
+ Help options:
+ --help print this message
+
+ Options from python/mozbuild/mozbuild/test/configure/data/moz.configure:
+ --with-foo foo
+ --with-qux qux
+
+
+ Environment variables:
+ """
+ ),
+ )
+
+ with self.moz_configure(
+ """
+ option('--with-foo', help='foo', when=True)
+ set_config('FOO', depends('--with-foo')(lambda x: x))
+ """
+ ):
+ with self.assertRaises(ConfigureError) as e:
+ self.get_config()
+
+ self.assertEqual(
+ str(e.exception),
+ "@depends function needs the same `when` as " "options it depends on",
+ )
+
+ with self.moz_configure(
+ """
+ @depends(when=True)
+ def always():
+ return True
+ @depends(when=True)
+ def always2():
+ return True
+ option('--with-foo', help='foo', when=always)
+ set_config('FOO', depends('--with-foo', when=always2)(lambda x: x))
+ """
+ ):
+ with self.assertRaises(ConfigureError) as e:
+ self.get_config()
+
+ self.assertEqual(
+ str(e.exception),
+ "@depends function needs the same `when` as " "options it depends on",
+ )
+
+ with self.moz_configure(
+ """
+ @depends(when=True)
+ def always():
+ return True
+ @depends(when=True)
+ def always2():
+ return True
+ with only_when(always2):
+ option('--with-foo', help='foo', when=always)
+ # include() triggers resolution of its dependencies, and their
+ # side effects.
+ include(depends('--with-foo', when=always)(lambda x: x))
+ # The sandbox should figure that the `when` here is
+ # appropriate. Bad behavior in CombinedDependsFunction.__eq__
+ # made this fail in the past.
+ set_config('FOO', depends('--with-foo', when=always)(lambda x: x))
+ """
+ ):
+ self.get_config()
+
+ with self.moz_configure(
+ """
+ option('--with-foo', help='foo')
+ option('--without-bar', help='bar', when='--with-foo')
+ option('--with-qux', help='qux', when='--with-bar')
+ set_config('QUX', True, when='--with-qux')
+ """
+ ):
+ # These are valid:
+ self.get_config(["--with-foo"])
+ self.get_config(["--with-foo", "--with-bar"])
+ self.get_config(["--with-foo", "--without-bar"])
+ self.get_config(["--with-foo", "--with-bar", "--with-qux"])
+ self.get_config(["--with-foo", "--with-bar", "--without-qux"])
+ with self.assertRaises(InvalidOptionError) as e:
+ self.get_config(["--with-bar"])
+ with self.assertRaises(InvalidOptionError) as e:
+ self.get_config(["--without-bar"])
+ with self.assertRaises(InvalidOptionError) as e:
+ self.get_config(["--with-qux"])
+ with self.assertRaises(InvalidOptionError) as e:
+ self.get_config(["--without-qux"])
+ with self.assertRaises(InvalidOptionError) as e:
+ self.get_config(["--with-foo", "--without-bar", "--with-qux"])
+ with self.assertRaises(InvalidOptionError) as e:
+ self.get_config(["--with-foo", "--without-bar", "--without-qux"])
+
+ def test_include_failures(self):
+ with self.assertRaises(ConfigureError) as e:
+ with self.moz_configure('include("../foo.configure")'):
+ self.get_config()
+
+ self.assertEqual(
+ str(e.exception),
+ "Cannot include `%s` because it is not in a subdirectory of `%s`"
+ % (
+ mozpath.normpath(mozpath.join(test_data_path, "..", "foo.configure")),
+ mozpath.normsep(test_data_path),
+ ),
+ )
+
+ with self.assertRaises(ConfigureError) as e:
+ with self.moz_configure(
+ """
+ include('extra.configure')
+ include('extra.configure')
+ """
+ ):
+ self.get_config()
+
+ self.assertEqual(
+ str(e.exception),
+ "Cannot include `%s` because it was included already."
+ % mozpath.normpath(mozpath.join(test_data_path, "extra.configure")),
+ )
+
+ with self.assertRaises(TypeError) as e:
+ with self.moz_configure(
+ """
+ include(42)
+ """
+ ):
+ self.get_config()
+
+ self.assertEqual(str(e.exception), "Unexpected type: 'int'")
+
+ def test_include_when(self):
+ with MockedOpen(
+ {
+ os.path.join(test_data_path, "moz.configure"): textwrap.dedent(
+ """
+ option('--with-foo', help='foo')
+
+ include('always.configure', when=True)
+ include('never.configure', when=False)
+ include('foo.configure', when='--with-foo')
+
+ set_config('FOO', foo)
+ set_config('BAR', bar)
+ set_config('QUX', qux)
+ """
+ ),
+ os.path.join(test_data_path, "always.configure"): textwrap.dedent(
+ """
+ option('--with-bar', help='bar')
+ @depends('--with-bar')
+ def bar(x):
+ if x:
+ return 'bar'
+ """
+ ),
+ os.path.join(test_data_path, "never.configure"): textwrap.dedent(
+ """
+ option('--with-qux', help='qux')
+ @depends('--with-qux')
+ def qux(x):
+ if x:
+ return 'qux'
+ """
+ ),
+ os.path.join(test_data_path, "foo.configure"): textwrap.dedent(
+ """
+ option('--with-foo-really', help='really foo')
+ @depends('--with-foo-really')
+ def foo(x):
+ if x:
+ return 'foo'
+
+ include('foo2.configure', when='--with-foo-really')
+ """
+ ),
+ os.path.join(test_data_path, "foo2.configure"): textwrap.dedent(
+ """
+ set_config('FOO2', True)
+ """
+ ),
+ }
+ ):
+ config = self.get_config()
+ self.assertEqual(config, {})
+
+ config = self.get_config(["--with-foo"])
+ self.assertEqual(config, {})
+
+ config = self.get_config(["--with-bar"])
+ self.assertEqual(
+ config,
+ {
+ "BAR": "bar",
+ },
+ )
+
+ with self.assertRaises(InvalidOptionError) as e:
+ self.get_config(["--with-qux"])
+
+ self.assertEqual(
+ str(e.exception), "--with-qux is not available in this configuration"
+ )
+
+ config = self.get_config(["--with-foo", "--with-foo-really"])
+ self.assertEqual(
+ config,
+ {
+ "FOO": "foo",
+ "FOO2": True,
+ },
+ )
+
+ def test_sandbox_failures(self):
+ with self.assertRaises(KeyError) as e:
+ with self.moz_configure(
+ """
+ include = 42
+ """
+ ):
+ self.get_config()
+
+ self.assertIn("Cannot reassign builtins", str(e.exception))
+
+ with self.assertRaises(KeyError) as e:
+ with self.moz_configure(
+ """
+ foo = 42
+ """
+ ):
+ self.get_config()
+
+ self.assertIn(
+ "Cannot assign `foo` because it is neither a @depends nor a " "@template",
+ str(e.exception),
+ )
+
+ def test_depends_failures(self):
+ with self.assertRaises(ConfigureError) as e:
+ with self.moz_configure(
+ """
+ @depends()
+ def foo():
+ return
+ """
+ ):
+ self.get_config()
+
+ self.assertEqual(str(e.exception), "@depends needs at least one argument")
+
+ with self.assertRaises(ConfigureError) as e:
+ with self.moz_configure(
+ """
+ @depends('--with-foo')
+ def foo(value):
+ return value
+ """
+ ):
+ self.get_config()
+
+ self.assertEqual(
+ str(e.exception),
+ "'--with-foo' is not a known option. Maybe it's " "declared too late?",
+ )
+
+ with self.assertRaises(ConfigureError) as e:
+ with self.moz_configure(
+ """
+ @depends('--with-foo=42')
+ def foo(value):
+ return value
+ """
+ ):
+ self.get_config()
+
+ self.assertEqual(str(e.exception), "Option must not contain an '='")
+
+ with self.assertRaises(TypeError) as e:
+ with self.moz_configure(
+ """
+ @depends(42)
+ def foo(value):
+ return value
+ """
+ ):
+ self.get_config()
+
+ self.assertEqual(
+ str(e.exception),
+ "Cannot use object of type 'int' as argument " "to @depends",
+ )
+
+ with self.assertRaises(ConfigureError) as e:
+ with self.moz_configure(
+ """
+ @depends('--help')
+ def foo(value):
+ yield
+ """
+ ):
+ self.get_config()
+
+ self.assertEqual(
+ str(e.exception), "Cannot decorate generator functions with @depends"
+ )
+
+ with self.assertRaises(TypeError) as e:
+ with self.moz_configure(
+ """
+ @depends('--help')
+ def foo(value):
+ return value
+
+ depends('--help')(foo)
+ """
+ ):
+ self.get_config()
+
+ self.assertEqual(str(e.exception), "Cannot nest @depends functions")
+
+ with self.assertRaises(TypeError) as e:
+ with self.moz_configure(
+ """
+ @template
+ def foo(f):
+ pass
+
+ depends('--help')(foo)
+ """
+ ):
+ self.get_config()
+
+ self.assertEqual(str(e.exception), "Cannot use a @template function here")
+
+ with self.assertRaises(ConfigureError) as e:
+ with self.moz_configure(
+ """
+ option('--foo', help='foo')
+ @depends('--foo')
+ def foo(value):
+ return value
+
+ foo()
+ """
+ ):
+ self.get_config()
+
+ self.assertEqual(str(e.exception), "The `foo` function may not be called")
+
+ with self.assertRaises(TypeError) as e:
+ with self.moz_configure(
+ """
+ @depends('--help', foo=42)
+ def foo(_):
+ return
+ """
+ ):
+ self.get_config()
+
+ self.assertEqual(
+ str(e.exception), "depends_impl() got an unexpected keyword argument 'foo'"
+ )
+
+ def test_depends_when(self):
+ with self.moz_configure(
+ """
+ @depends(when=True)
+ def foo():
+ return 'foo'
+
+ set_config('FOO', foo)
+
+ @depends(when=False)
+ def bar():
+ return 'bar'
+
+ set_config('BAR', bar)
+
+ option('--with-qux', help='qux')
+ @depends(when='--with-qux')
+ def qux():
+ return 'qux'
+
+ set_config('QUX', qux)
+ """
+ ):
+ config = self.get_config()
+ self.assertEqual(
+ config,
+ {
+ "FOO": "foo",
+ },
+ )
+
+ config = self.get_config(["--with-qux"])
+ self.assertEqual(
+ config,
+ {
+ "FOO": "foo",
+ "QUX": "qux",
+ },
+ )
+
+ def test_depends_value(self):
+ with self.moz_configure(
+ """
+ foo = depends(when=True)('foo')
+
+ set_config('FOO', foo)
+
+ bar = depends(when=False)('bar')
+
+ set_config('BAR', bar)
+
+ option('--with-qux', help='qux')
+ @depends(when='--with-qux')
+ def qux():
+ return 'qux'
+
+ set_config('QUX', qux)
+ """
+ ):
+ config = self.get_config()
+ self.assertEqual(
+ config,
+ {
+ "FOO": "foo",
+ },
+ )
+
+ with self.assertRaises(TypeError) as e:
+ with self.moz_configure(
+ """
+ option('--foo', help='foo')
+
+ depends('--foo')('foo')
+ """
+ ):
+ self.get_config()
+
+ self.assertEqual(
+ str(e.exception), "Cannot wrap literal values in @depends with dependencies"
+ )
+
+ def test_imports_failures(self):
+ with self.assertRaises(ConfigureError) as e:
+ with self.moz_configure(
+ """
+ @imports('os')
+ @template
+ def foo(value):
+ return value
+ """
+ ):
+ self.get_config()
+
+ self.assertEqual(str(e.exception), "@imports must appear after @template")
+
+ with self.assertRaises(ConfigureError) as e:
+ with self.moz_configure(
+ """
+ option('--foo', help='foo')
+ @imports('os')
+ @depends('--foo')
+ def foo(value):
+ return value
+ """
+ ):
+ self.get_config()
+
+ self.assertEqual(str(e.exception), "@imports must appear after @depends")
+
+ for import_ in (
+ "42",
+ "_from=42, _import='os'",
+ "_from='os', _import='path', _as=42",
+ ):
+ with self.assertRaises(TypeError) as e:
+ with self.moz_configure(
+ """
+ @imports(%s)
+ @template
+ def foo(value):
+ return value
+ """
+ % import_
+ ):
+ self.get_config()
+
+ self.assertEqual(str(e.exception), "Unexpected type: 'int'")
+
+ with self.assertRaises(TypeError) as e:
+ with self.moz_configure(
+ """
+ @imports('os', 42)
+ @template
+ def foo(value):
+ return value
+ """
+ ):
+ self.get_config()
+
+ self.assertEqual(str(e.exception), "Unexpected type: 'int'")
+
+ with self.assertRaises(ValueError) as e:
+ with self.moz_configure(
+ """
+ @imports('os*')
+ def foo(value):
+ return value
+ """
+ ):
+ self.get_config()
+
+ self.assertEqual(str(e.exception), "Invalid argument to @imports: 'os*'")
+
+ def test_only_when(self):
+ moz_configure = """
+ option('--enable-when', help='when')
+ @depends('--enable-when', '--help')
+ def when(value, _):
+ return bool(value)
+
+ with only_when(when):
+ option('--foo', nargs='*', help='foo')
+ @depends('--foo')
+ def foo(value):
+ return value
+
+ set_config('FOO', foo)
+ set_define('FOO', foo)
+
+ # It is possible to depend on a function defined in a only_when
+ # block. It then resolves to `None`.
+ set_config('BAR', depends(foo)(lambda x: x))
+ set_define('BAR', depends(foo)(lambda x: x))
+ """
+
+ with self.moz_configure(moz_configure):
+ config = self.get_config()
+ self.assertEqual(
+ config,
+ {
+ "DEFINES": {},
+ },
+ )
+
+ config = self.get_config(["--enable-when"])
+ self.assertEqual(
+ config,
+ {
+ "BAR": NegativeOptionValue(),
+ "FOO": NegativeOptionValue(),
+ "DEFINES": {
+ "BAR": NegativeOptionValue(),
+ "FOO": NegativeOptionValue(),
+ },
+ },
+ )
+
+ config = self.get_config(["--enable-when", "--foo=bar"])
+ self.assertEqual(
+ config,
+ {
+ "BAR": PositiveOptionValue(["bar"]),
+ "FOO": PositiveOptionValue(["bar"]),
+ "DEFINES": {
+ "BAR": PositiveOptionValue(["bar"]),
+ "FOO": PositiveOptionValue(["bar"]),
+ },
+ },
+ )
+
+ # The --foo option doesn't exist when --enable-when is not given.
+ with self.assertRaises(InvalidOptionError) as e:
+ self.get_config(["--foo"])
+
+ self.assertEqual(
+ str(e.exception), "--foo is not available in this configuration"
+ )
+
+ # Cannot depend on an option defined in a only_when block, because we
+ # don't know what OptionValue would make sense.
+ with self.moz_configure(
+ moz_configure
+ + """
+ set_config('QUX', depends('--foo')(lambda x: x))
+ """
+ ):
+ with self.assertRaises(ConfigureError) as e:
+ self.get_config()
+
+ self.assertEqual(
+ str(e.exception),
+ "@depends function needs the same `when` as " "options it depends on",
+ )
+
+ with self.moz_configure(
+ moz_configure
+ + """
+ set_config('QUX', depends('--foo', when=when)(lambda x: x))
+ """
+ ):
+ self.get_config(["--enable-when"])
+
+ # Using imply_option for an option defined in a only_when block fails
+ # similarly if the imply_option happens outside the block.
+ with self.moz_configure(
+ """
+ imply_option('--foo', True)
+ """
+ + moz_configure
+ ):
+ with self.assertRaises(InvalidOptionError) as e:
+ self.get_config()
+
+ self.assertEqual(
+ str(e.exception), "--foo is not available in this configuration"
+ )
+
+ # And similarly doesn't fail when the condition is true.
+ with self.moz_configure(
+ """
+ imply_option('--foo', True)
+ """
+ + moz_configure
+ ):
+ self.get_config(["--enable-when"])
+
+ def test_depends_binary_ops(self):
+ with self.moz_configure(
+ """
+ option('--foo', nargs=1, help='foo')
+ @depends('--foo')
+ def foo(value):
+ return value or 0
+
+ option('--bar', nargs=1, help='bar')
+ @depends('--bar')
+ def bar(value):
+ return value or ''
+
+ option('--baz', nargs=1, help='baz')
+ @depends('--baz')
+ def baz(value):
+ return value
+
+ set_config('FOOorBAR', foo | bar)
+ set_config('FOOorBARorBAZ', foo | bar | baz)
+ set_config('FOOandBAR', foo & bar)
+ set_config('FOOandBARandBAZ', foo & bar & baz)
+ """
+ ):
+ for foo_opt, foo_value in (
+ ("", 0),
+ ("--foo=foo", PositiveOptionValue(("foo",))),
+ ):
+ for bar_opt, bar_value in (
+ ("", ""),
+ ("--bar=bar", PositiveOptionValue(("bar",))),
+ ):
+ for baz_opt, baz_value in (
+ ("", NegativeOptionValue()),
+ ("--baz=baz", PositiveOptionValue(("baz",))),
+ ):
+ config = self.get_config(
+ [x for x in (foo_opt, bar_opt, baz_opt) if x]
+ )
+ self.assertEqual(
+ config,
+ {
+ "FOOorBAR": foo_value or bar_value,
+ "FOOorBARorBAZ": foo_value or bar_value or baz_value,
+ "FOOandBAR": foo_value and bar_value,
+ "FOOandBARandBAZ": foo_value
+ and bar_value
+ and baz_value,
+ },
+ )
+
+ def test_depends_getattr(self):
+ with self.moz_configure(
+ """
+ @imports(_from='mozbuild.util', _import='ReadOnlyNamespace')
+ def namespace(**kwargs):
+ return ReadOnlyNamespace(**kwargs)
+
+ option('--foo', nargs=1, help='foo')
+ @depends('--foo')
+ def foo(value):
+ return value
+
+ option('--bar', nargs=1, help='bar')
+ @depends('--bar')
+ def bar(value):
+ return value or None
+
+ @depends(foo, bar)
+ def foobar(foo, bar):
+ return namespace(foo=foo, bar=bar)
+
+ set_config('FOO', foobar.foo)
+ set_config('BAR', foobar.bar)
+ set_config('BAZ', foobar.baz)
+ """
+ ):
+ config = self.get_config()
+ self.assertEqual(
+ config,
+ {
+ "FOO": NegativeOptionValue(),
+ },
+ )
+
+ config = self.get_config(["--foo=foo"])
+ self.assertEqual(
+ config,
+ {
+ "FOO": PositiveOptionValue(("foo",)),
+ },
+ )
+
+ config = self.get_config(["--bar=bar"])
+ self.assertEqual(
+ config,
+ {
+ "FOO": NegativeOptionValue(),
+ "BAR": PositiveOptionValue(("bar",)),
+ },
+ )
+
+ config = self.get_config(["--foo=foo", "--bar=bar"])
+ self.assertEqual(
+ config,
+ {
+ "FOO": PositiveOptionValue(("foo",)),
+ "BAR": PositiveOptionValue(("bar",)),
+ },
+ )
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/test/configure/test_lint.py b/python/mozbuild/mozbuild/test/configure/test_lint.py
new file mode 100644
index 0000000000..7ecac769c3
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/test_lint.py
@@ -0,0 +1,487 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import contextlib
+import os
+import sys
+import textwrap
+import traceback
+import unittest
+
+import mozpack.path as mozpath
+from mozunit import MockedOpen, main
+
+from mozbuild.configure import ConfigureError
+from mozbuild.configure.lint import LintSandbox
+
+test_data_path = mozpath.abspath(mozpath.dirname(__file__))
+test_data_path = mozpath.join(test_data_path, "data")
+
+
+class TestLint(unittest.TestCase):
+ def lint_test(self, options=[], env={}):
+ sandbox = LintSandbox(env, ["configure"] + options)
+
+ sandbox.run(mozpath.join(test_data_path, "moz.configure"))
+
+ def moz_configure(self, source):
+ return MockedOpen(
+ {os.path.join(test_data_path, "moz.configure"): textwrap.dedent(source)}
+ )
+
+ @contextlib.contextmanager
+ def assertRaisesFromLine(self, exc_type, line):
+ with self.assertRaises(exc_type) as e:
+ yield e
+
+ _, _, tb = sys.exc_info()
+ self.assertEqual(
+ traceback.extract_tb(tb)[-1][:2],
+ (mozpath.join(test_data_path, "moz.configure"), line),
+ )
+
+ def test_configure_testcase(self):
+ # Lint python/mozbuild/mozbuild/test/configure/data/moz.configure
+ self.lint_test()
+
+ def test_depends_failures(self):
+ with self.moz_configure(
+ """
+ option('--foo', help='foo')
+ @depends('--foo')
+ def foo(value):
+ return value
+
+ @depends('--help', foo)
+ @imports('os')
+ def bar(help, foo):
+ return foo
+ """
+ ):
+ self.lint_test()
+
+ with self.assertRaisesFromLine(ConfigureError, 7) as e:
+ with self.moz_configure(
+ """
+ option('--foo', help='foo')
+ @depends('--foo')
+ def foo(value):
+ return value
+
+ @depends('--help', foo)
+ def bar(help, foo):
+ return foo
+ """
+ ):
+ self.lint_test()
+
+ self.assertEqual(str(e.exception), "The dependency on `--help` is unused")
+
+ with self.assertRaisesFromLine(ConfigureError, 3) as e:
+ with self.moz_configure(
+ """
+ option('--foo', help='foo')
+ @depends('--foo')
+ @imports('os')
+ def foo(value):
+ return value
+
+ @depends('--help', foo)
+ @imports('os')
+ def bar(help, foo):
+ return foo
+ """
+ ):
+ self.lint_test()
+
+ self.assertEqual(
+ str(e.exception),
+ "Missing '--help' dependency because `bar` depends on '--help' and `foo`",
+ )
+
+ with self.assertRaisesFromLine(ConfigureError, 7) as e:
+ with self.moz_configure(
+ """
+ @template
+ def tmpl():
+ qux = 42
+
+ option('--foo', help='foo')
+ @depends('--foo')
+ def foo(value):
+ qux
+ return value
+
+ @depends('--help', foo)
+ @imports('os')
+ def bar(help, foo):
+ return foo
+ tmpl()
+ """
+ ):
+ self.lint_test()
+
+ self.assertEqual(
+ str(e.exception),
+ "Missing '--help' dependency because `bar` depends on '--help' and `foo`",
+ )
+
+ with self.moz_configure(
+ """
+ option('--foo', help='foo')
+ @depends('--foo')
+ def foo(value):
+ return value
+
+ include(foo)
+ """
+ ):
+ self.lint_test()
+
+ with self.assertRaisesFromLine(ConfigureError, 3) as e:
+ with self.moz_configure(
+ """
+ option('--foo', help='foo')
+ @depends('--foo')
+ @imports('os')
+ def foo(value):
+ return value
+
+ include(foo)
+ """
+ ):
+ self.lint_test()
+
+ self.assertEqual(str(e.exception), "Missing '--help' dependency")
+
+ with self.assertRaisesFromLine(ConfigureError, 3) as e:
+ with self.moz_configure(
+ """
+ option('--foo', help='foo')
+ @depends('--foo')
+ @imports('os')
+ def foo(value):
+ return value
+
+ @depends(foo)
+ def bar(value):
+ return value
+
+ include(bar)
+ """
+ ):
+ self.lint_test()
+
+ self.assertEqual(str(e.exception), "Missing '--help' dependency")
+
+ with self.assertRaisesFromLine(ConfigureError, 3) as e:
+ with self.moz_configure(
+ """
+ option('--foo', help='foo')
+ @depends('--foo')
+ @imports('os')
+ def foo(value):
+ return value
+
+ option('--bar', help='bar', when=foo)
+ """
+ ):
+ self.lint_test()
+
+ self.assertEqual(str(e.exception), "Missing '--help' dependency")
+
+ # This would have failed with "Missing '--help' dependency"
+ # in the past, because of the reference to the builtin False.
+ with self.moz_configure(
+ """
+ option('--foo', help='foo')
+ @depends('--foo')
+ def foo(value):
+ return False or value
+
+ option('--bar', help='bar', when=foo)
+ """
+ ):
+ self.lint_test()
+
+ # However, when something that is normally a builtin is overridden,
+ # we should still want the dependency on --help.
+ with self.assertRaisesFromLine(ConfigureError, 7) as e:
+ with self.moz_configure(
+ """
+ @template
+ def tmpl():
+ sorted = 42
+
+ option('--foo', help='foo')
+ @depends('--foo')
+ def foo(value):
+ return sorted
+
+ option('--bar', help='bar', when=foo)
+ tmpl()
+ """
+ ):
+ self.lint_test()
+
+ self.assertEqual(str(e.exception), "Missing '--help' dependency")
+
+ # There is a default restricted `os` module when there is no explicit
+ # @imports, and it's fine to use it without a dependency on --help.
+ with self.moz_configure(
+ """
+ option('--foo', help='foo')
+ @depends('--foo')
+ def foo(value):
+ os
+ return value
+
+ include(foo)
+ """
+ ):
+ self.lint_test()
+
+ with self.assertRaisesFromLine(ConfigureError, 3) as e:
+ with self.moz_configure(
+ """
+ option('--foo', help='foo')
+ @depends('--foo')
+ def foo(value):
+ return
+
+ include(foo)
+ """
+ ):
+ self.lint_test()
+
+ self.assertEqual(str(e.exception), "The dependency on `--foo` is unused")
+
+ with self.assertRaisesFromLine(ConfigureError, 5) as e:
+ with self.moz_configure(
+ """
+ @depends(when=True)
+ def bar():
+ return
+ @depends(bar)
+ def foo(value):
+ return
+
+ include(foo)
+ """
+ ):
+ self.lint_test()
+
+ self.assertEqual(str(e.exception), "The dependency on `bar` is unused")
+
+ with self.assertRaisesFromLine(ConfigureError, 2) as e:
+ with self.moz_configure(
+ """
+ @depends(depends(when=True)(lambda: None))
+ def foo(value):
+ return
+
+ include(foo)
+ """
+ ):
+ self.lint_test()
+
+ self.assertEqual(str(e.exception), "The dependency on `<lambda>` is unused")
+
+ with self.assertRaisesFromLine(ConfigureError, 9) as e:
+ with self.moz_configure(
+ """
+ @template
+ def tmpl():
+ @depends(when=True)
+ def bar():
+ return
+ return bar
+ qux = tmpl()
+ @depends(qux)
+ def foo(value):
+ return
+
+ include(foo)
+ """
+ ):
+ self.lint_test()
+
+ self.assertEqual(str(e.exception), "The dependency on `qux` is unused")
+
+ def test_default_enable(self):
+ # --enable-* with default=True is not allowed.
+ with self.moz_configure(
+ """
+ option('--enable-foo', default=False, help='foo')
+ """
+ ):
+ self.lint_test()
+ with self.assertRaisesFromLine(ConfigureError, 2) as e:
+ with self.moz_configure(
+ """
+ option('--enable-foo', default=True, help='foo')
+ """
+ ):
+ self.lint_test()
+ self.assertEqual(
+ str(e.exception),
+ "--disable-foo should be used instead of " "--enable-foo with default=True",
+ )
+
+ def test_default_disable(self):
+ # --disable-* with default=False is not allowed.
+ with self.moz_configure(
+ """
+ option('--disable-foo', default=True, help='foo')
+ """
+ ):
+ self.lint_test()
+ with self.assertRaisesFromLine(ConfigureError, 2) as e:
+ with self.moz_configure(
+ """
+ option('--disable-foo', default=False, help='foo')
+ """
+ ):
+ self.lint_test()
+ self.assertEqual(
+ str(e.exception),
+ "--enable-foo should be used instead of "
+ "--disable-foo with default=False",
+ )
+
+ def test_default_with(self):
+ # --with-* with default=True is not allowed.
+ with self.moz_configure(
+ """
+ option('--with-foo', default=False, help='foo')
+ """
+ ):
+ self.lint_test()
+ with self.assertRaisesFromLine(ConfigureError, 2) as e:
+ with self.moz_configure(
+ """
+ option('--with-foo', default=True, help='foo')
+ """
+ ):
+ self.lint_test()
+ self.assertEqual(
+ str(e.exception),
+ "--without-foo should be used instead of " "--with-foo with default=True",
+ )
+
+ def test_default_without(self):
+ # --without-* with default=False is not allowed.
+ with self.moz_configure(
+ """
+ option('--without-foo', default=True, help='foo')
+ """
+ ):
+ self.lint_test()
+ with self.assertRaisesFromLine(ConfigureError, 2) as e:
+ with self.moz_configure(
+ """
+ option('--without-foo', default=False, help='foo')
+ """
+ ):
+ self.lint_test()
+ self.assertEqual(
+ str(e.exception),
+ "--with-foo should be used instead of " "--without-foo with default=False",
+ )
+
+ def test_default_func(self):
+ # Help text for an option with variable default should contain
+ # {enable|disable} rule.
+ with self.moz_configure(
+ """
+ option(env='FOO', help='foo')
+ option('--enable-bar', default=depends('FOO')(lambda x: bool(x)),
+ help='{Enable|Disable} bar')
+ """
+ ):
+ self.lint_test()
+ with self.assertRaisesFromLine(ConfigureError, 3) as e:
+ with self.moz_configure(
+ """
+ option(env='FOO', help='foo')
+ option('--enable-bar', default=depends('FOO')(lambda x: bool(x)),\
+ help='Enable bar')
+ """
+ ):
+ self.lint_test()
+ self.assertEqual(
+ str(e.exception),
+ '`help` should contain "{Enable|Disable}" because of '
+ "non-constant default",
+ )
+
+ def test_large_offset(self):
+ with self.assertRaisesFromLine(ConfigureError, 375):
+ with self.moz_configure(
+ """
+ option(env='FOO', help='foo')
+ """
+ + "\n" * 371
+ + """
+ option('--enable-bar', default=depends('FOO')(lambda x: bool(x)),\
+ help='Enable bar')
+ """
+ ):
+ self.lint_test()
+
+ def test_undefined_global(self):
+ with self.assertRaisesFromLine(NameError, 6) as e:
+ with self.moz_configure(
+ """
+ option(env='FOO', help='foo')
+ @depends('FOO')
+ def foo(value):
+ if value:
+ return unknown
+ return value
+ """
+ ):
+ self.lint_test()
+
+ self.assertEqual(str(e.exception), "global name 'unknown' is not defined")
+
+ # Ideally, this would raise on line 4, where `unknown` is used, but
+ # python disassembly doesn't give use the information.
+ with self.assertRaisesFromLine(NameError, 2) as e:
+ with self.moz_configure(
+ """
+ @template
+ def tmpl():
+ @depends(unknown)
+ def foo(value):
+ if value:
+ return True
+ return foo
+ tmpl()
+ """
+ ):
+ self.lint_test()
+
+ self.assertEqual(str(e.exception), "global name 'unknown' is not defined")
+
+ def test_unnecessary_imports(self):
+ with self.assertRaisesFromLine(NameError, 3) as e:
+ with self.moz_configure(
+ """
+ option(env='FOO', help='foo')
+ @depends('FOO')
+ @imports(_from='__builtin__', _import='list')
+ def foo(value):
+ if value:
+ return list()
+ return value
+ """
+ ):
+ self.lint_test()
+
+ self.assertEqual(str(e.exception), "builtin 'list' doesn't need to be imported")
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/test/configure/test_moz_configure.py b/python/mozbuild/mozbuild/test/configure/test_moz_configure.py
new file mode 100644
index 0000000000..22129a3970
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/test_moz_configure.py
@@ -0,0 +1,185 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from mozunit import main
+
+from common import BaseConfigureTest, ConfigureTestSandbox
+from mozbuild.util import ReadOnlyNamespace, exec_, memoized_property
+
+
+def sandbox_class(platform):
+ class ConfigureTestSandboxOverridingPlatform(ConfigureTestSandbox):
+ @memoized_property
+ def _wrapped_sys(self):
+ sys = {}
+ exec_("from sys import *", sys)
+ sys["platform"] = platform
+ return ReadOnlyNamespace(**sys)
+
+ return ConfigureTestSandboxOverridingPlatform
+
+
+class TargetTest(BaseConfigureTest):
+ def get_target(self, args, env={}):
+ if "linux" in self.HOST:
+ platform = "linux2"
+ elif "mingw" in self.HOST or "windows" in self.HOST:
+ platform = "win32"
+ elif "openbsd6" in self.HOST:
+ platform = "openbsd6"
+ else:
+ raise Exception("Missing platform for HOST {}".format(self.HOST))
+ sandbox = self.get_sandbox({}, {}, args, env, cls=sandbox_class(platform))
+ return sandbox._value_for(sandbox["target"]).alias
+
+
+class TestTargetLinux(TargetTest):
+ def test_target(self):
+ self.assertEqual(self.get_target([]), self.HOST)
+ self.assertEqual(self.get_target(["--target=i686"]), "i686-pc-linux-gnu")
+ self.assertEqual(
+ self.get_target(["--target=i686-unknown-linux-gnu"]),
+ "i686-unknown-linux-gnu",
+ )
+ self.assertEqual(
+ self.get_target(["--target=i686-pc-windows-msvc"]), "i686-pc-windows-msvc"
+ )
+
+
+class TestTargetWindows(TargetTest):
+ # BaseConfigureTest uses this as the return value for config.guess
+ HOST = "i686-pc-windows-msvc"
+
+ def test_target(self):
+ self.assertEqual(self.get_target([]), self.HOST)
+ self.assertEqual(
+ self.get_target(["--target=x86_64-pc-windows-msvc"]),
+ "x86_64-pc-windows-msvc",
+ )
+ self.assertEqual(self.get_target(["--target=x86_64"]), "x86_64-pc-windows-msvc")
+
+ # The tests above are actually not realistic, because most Windows
+ # machines will have a few environment variables that make us not
+ # use config.guess.
+
+ # 32-bits process on x86_64 host.
+ env = {
+ "PROCESSOR_ARCHITECTURE": "x86",
+ "PROCESSOR_ARCHITEW6432": "AMD64",
+ }
+ self.assertEqual(self.get_target([], env), "x86_64-pc-windows-msvc")
+ self.assertEqual(
+ self.get_target(["--target=i686-pc-windows-msvc"]), "i686-pc-windows-msvc"
+ )
+ self.assertEqual(self.get_target(["--target=i686"]), "i686-pc-windows-msvc")
+
+ # 64-bits process on x86_64 host.
+ env = {
+ "PROCESSOR_ARCHITECTURE": "AMD64",
+ }
+ self.assertEqual(self.get_target([], env), "x86_64-pc-windows-msvc")
+ self.assertEqual(
+ self.get_target(["--target=i686-pc-windows-msvc"]), "i686-pc-windows-msvc"
+ )
+ self.assertEqual(self.get_target(["--target=i686"]), "i686-pc-windows-msvc")
+
+ # 32-bits process on x86 host.
+ env = {
+ "PROCESSOR_ARCHITECTURE": "x86",
+ }
+ self.assertEqual(self.get_target([], env), "i686-pc-windows-msvc")
+ self.assertEqual(
+ self.get_target(["--target=x86_64-pc-windows-msvc"]),
+ "x86_64-pc-windows-msvc",
+ )
+ self.assertEqual(self.get_target(["--target=x86_64"]), "x86_64-pc-windows-msvc")
+
+ # While host autodection will give us a -windows-msvc triplet, setting host
+ # is expecting to implicitly set the target.
+ self.assertEqual(
+ self.get_target(["--host=x86_64-pc-windows-gnu"]), "x86_64-pc-windows-gnu"
+ )
+ self.assertEqual(
+ self.get_target(["--host=x86_64-pc-mingw32"]), "x86_64-pc-mingw32"
+ )
+
+
+class TestTargetAndroid(TargetTest):
+ HOST = "x86_64-pc-linux-gnu"
+
+ def test_target(self):
+ self.assertEqual(
+ self.get_target(["--enable-project=mobile/android"]),
+ "arm-unknown-linux-androideabi",
+ )
+ self.assertEqual(
+ self.get_target(["--enable-project=mobile/android", "--target=i686"]),
+ "i686-unknown-linux-android",
+ )
+ self.assertEqual(
+ self.get_target(["--enable-project=mobile/android", "--target=x86_64"]),
+ "x86_64-unknown-linux-android",
+ )
+ self.assertEqual(
+ self.get_target(["--enable-project=mobile/android", "--target=aarch64"]),
+ "aarch64-unknown-linux-android",
+ )
+ self.assertEqual(
+ self.get_target(["--enable-project=mobile/android", "--target=arm"]),
+ "arm-unknown-linux-androideabi",
+ )
+
+
+class TestTargetOpenBSD(TargetTest):
+ # config.guess returns amd64 on OpenBSD, which we need to pass through to
+ # config.sub so that it canonicalizes to x86_64.
+ HOST = "amd64-unknown-openbsd6.4"
+
+ def test_target(self):
+ self.assertEqual(self.get_target([]), "x86_64-unknown-openbsd6.4")
+
+ def config_sub(self, stdin, args):
+ if args[0] == "amd64-unknown-openbsd6.4":
+ return 0, "x86_64-unknown-openbsd6.4", ""
+ return super(TestTargetOpenBSD, self).config_sub(stdin, args)
+
+
+class TestMozConfigure(BaseConfigureTest):
+ def test_nsis_version(self):
+ this = self
+
+ class FakeNSIS(object):
+ def __init__(self, version):
+ self.version = version
+
+ def __call__(self, stdin, args):
+ this.assertEqual(args, ("-version",))
+ return 0, self.version, ""
+
+ def check_nsis_version(version):
+ sandbox = self.get_sandbox(
+ {"/usr/bin/makensis": FakeNSIS(version)},
+ {},
+ ["--target=x86_64-pc-windows-msvc", "--disable-bootstrap"],
+ {"PATH": "/usr/bin", "MAKENSISU": "/usr/bin/makensis"},
+ )
+ return sandbox._value_for(sandbox["nsis_version"])
+
+ with self.assertRaises(SystemExit):
+ check_nsis_version("v2.5")
+
+ with self.assertRaises(SystemExit):
+ check_nsis_version("v3.0a2")
+
+ self.assertEqual(check_nsis_version("v3.0b1"), "3.0b1")
+ self.assertEqual(check_nsis_version("v3.0b2"), "3.0b2")
+ self.assertEqual(check_nsis_version("v3.0rc1"), "3.0rc1")
+ self.assertEqual(check_nsis_version("v3.0"), "3.0")
+ self.assertEqual(check_nsis_version("v3.0-2"), "3.0")
+ self.assertEqual(check_nsis_version("v3.0.1"), "3.0")
+ self.assertEqual(check_nsis_version("v3.1"), "3.1")
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/test/configure/test_options.py b/python/mozbuild/mozbuild/test/configure/test_options.py
new file mode 100644
index 0000000000..59ba616355
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/test_options.py
@@ -0,0 +1,905 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+
+from mozunit import main
+
+from mozbuild.configure.options import (
+ CommandLineHelper,
+ ConflictingOptionError,
+ InvalidOptionError,
+ NegativeOptionValue,
+ Option,
+ OptionValue,
+ PositiveOptionValue,
+)
+
+
+class Option(Option):
+ def __init__(self, *args, **kwargs):
+ kwargs["help"] = "Dummy help"
+ super(Option, self).__init__(*args, **kwargs)
+
+
+class TestOption(unittest.TestCase):
+ def test_option(self):
+ option = Option("--option")
+ self.assertEqual(option.prefix, "")
+ self.assertEqual(option.name, "option")
+ self.assertEqual(option.env, None)
+ self.assertFalse(option.default)
+
+ option = Option("--enable-option")
+ self.assertEqual(option.prefix, "enable")
+ self.assertEqual(option.name, "option")
+ self.assertEqual(option.env, None)
+ self.assertFalse(option.default)
+
+ option = Option("--disable-option")
+ self.assertEqual(option.prefix, "disable")
+ self.assertEqual(option.name, "option")
+ self.assertEqual(option.env, None)
+ self.assertTrue(option.default)
+
+ option = Option("--with-option")
+ self.assertEqual(option.prefix, "with")
+ self.assertEqual(option.name, "option")
+ self.assertEqual(option.env, None)
+ self.assertFalse(option.default)
+
+ option = Option("--without-option")
+ self.assertEqual(option.prefix, "without")
+ self.assertEqual(option.name, "option")
+ self.assertEqual(option.env, None)
+ self.assertTrue(option.default)
+
+ option = Option("--without-option-foo", env="MOZ_OPTION")
+ self.assertEqual(option.env, "MOZ_OPTION")
+
+ option = Option(env="MOZ_OPTION")
+ self.assertEqual(option.prefix, "")
+ self.assertEqual(option.name, None)
+ self.assertEqual(option.env, "MOZ_OPTION")
+ self.assertFalse(option.default)
+
+ with self.assertRaises(InvalidOptionError) as e:
+ Option("--option", nargs=0, default=("a",))
+ self.assertEqual(
+ str(e.exception), "The given `default` doesn't satisfy `nargs`"
+ )
+
+ with self.assertRaises(InvalidOptionError) as e:
+ Option("--option", nargs=1, default=())
+ self.assertEqual(
+ str(e.exception), "default must be a bool, a string or a tuple of strings"
+ )
+
+ with self.assertRaises(InvalidOptionError) as e:
+ Option("--option", nargs=1, default=True)
+ self.assertEqual(
+ str(e.exception), "The given `default` doesn't satisfy `nargs`"
+ )
+
+ with self.assertRaises(InvalidOptionError) as e:
+ Option("--option", nargs=1, default=("a", "b"))
+ self.assertEqual(
+ str(e.exception), "The given `default` doesn't satisfy `nargs`"
+ )
+
+ with self.assertRaises(InvalidOptionError) as e:
+ Option("--option", nargs=2, default=())
+ self.assertEqual(
+ str(e.exception), "default must be a bool, a string or a tuple of strings"
+ )
+
+ with self.assertRaises(InvalidOptionError) as e:
+ Option("--option", nargs=2, default=True)
+ self.assertEqual(
+ str(e.exception), "The given `default` doesn't satisfy `nargs`"
+ )
+
+ with self.assertRaises(InvalidOptionError) as e:
+ Option("--option", nargs=2, default=("a",))
+ self.assertEqual(
+ str(e.exception), "The given `default` doesn't satisfy `nargs`"
+ )
+
+ with self.assertRaises(InvalidOptionError) as e:
+ Option("--option", nargs="?", default=("a", "b"))
+ self.assertEqual(
+ str(e.exception), "The given `default` doesn't satisfy `nargs`"
+ )
+
+ with self.assertRaises(InvalidOptionError) as e:
+ Option("--option", nargs="+", default=())
+ self.assertEqual(
+ str(e.exception), "default must be a bool, a string or a tuple of strings"
+ )
+
+ with self.assertRaises(InvalidOptionError) as e:
+ Option("--option", nargs="+", default=True)
+ self.assertEqual(
+ str(e.exception), "The given `default` doesn't satisfy `nargs`"
+ )
+
+ # --disable options with a nargs value that requires at least one
+ # argument need to be given a default.
+ with self.assertRaises(InvalidOptionError) as e:
+ Option("--disable-option", nargs=1)
+ self.assertEqual(
+ str(e.exception), "The given `default` doesn't satisfy `nargs`"
+ )
+
+ with self.assertRaises(InvalidOptionError) as e:
+ Option("--disable-option", nargs="+")
+ self.assertEqual(
+ str(e.exception), "The given `default` doesn't satisfy `nargs`"
+ )
+
+ # Test nargs inference from default value
+ option = Option("--with-foo", default=True)
+ self.assertEqual(option.nargs, 0)
+
+ option = Option("--with-foo", default=False)
+ self.assertEqual(option.nargs, 0)
+
+ option = Option("--with-foo", default="a")
+ self.assertEqual(option.nargs, "?")
+
+ option = Option("--with-foo", default=("a",))
+ self.assertEqual(option.nargs, "?")
+
+ option = Option("--with-foo", default=("a", "b"))
+ self.assertEqual(option.nargs, "*")
+
+ option = Option(env="FOO", default=True)
+ self.assertEqual(option.nargs, 0)
+
+ option = Option(env="FOO", default=False)
+ self.assertEqual(option.nargs, 0)
+
+ option = Option(env="FOO", default="a")
+ self.assertEqual(option.nargs, "?")
+
+ option = Option(env="FOO", default=("a",))
+ self.assertEqual(option.nargs, "?")
+
+ option = Option(env="FOO", default=("a", "b"))
+ self.assertEqual(option.nargs, "*")
+
+ def test_option_option(self):
+ for option in (
+ "--option",
+ "--enable-option",
+ "--disable-option",
+ "--with-option",
+ "--without-option",
+ ):
+ self.assertEqual(Option(option).option, option)
+ self.assertEqual(Option(option, env="FOO").option, option)
+
+ opt = Option(option, default=False)
+ self.assertEqual(
+ opt.option,
+ option.replace("-disable-", "-enable-").replace("-without-", "-with-"),
+ )
+
+ opt = Option(option, default=True)
+ self.assertEqual(
+ opt.option,
+ option.replace("-enable-", "-disable-").replace("-with-", "-without-"),
+ )
+
+ self.assertEqual(Option(env="FOO").option, "FOO")
+
+ def test_option_choices(self):
+ with self.assertRaises(InvalidOptionError) as e:
+ Option("--option", nargs=3, choices=("a", "b"))
+ self.assertEqual(str(e.exception), "Not enough `choices` for `nargs`")
+
+ with self.assertRaises(InvalidOptionError) as e:
+ Option("--without-option", nargs=1, choices=("a", "b"))
+ self.assertEqual(
+ str(e.exception), "A `default` must be given along with `choices`"
+ )
+
+ with self.assertRaises(InvalidOptionError) as e:
+ Option("--without-option", nargs="+", choices=("a", "b"))
+ self.assertEqual(
+ str(e.exception), "A `default` must be given along with `choices`"
+ )
+
+ with self.assertRaises(InvalidOptionError) as e:
+ Option("--without-option", default="c", choices=("a", "b"))
+ self.assertEqual(
+ str(e.exception), "The `default` value must be one of 'a', 'b'"
+ )
+
+ with self.assertRaises(InvalidOptionError) as e:
+ Option(
+ "--without-option",
+ default=(
+ "a",
+ "c",
+ ),
+ choices=("a", "b"),
+ )
+ self.assertEqual(
+ str(e.exception), "The `default` value must be one of 'a', 'b'"
+ )
+
+ with self.assertRaises(InvalidOptionError) as e:
+ Option("--without-option", default=("c",), choices=("a", "b"))
+ self.assertEqual(
+ str(e.exception), "The `default` value must be one of 'a', 'b'"
+ )
+
+ option = Option("--with-option", nargs="+", choices=("a", "b"))
+ with self.assertRaises(InvalidOptionError) as e:
+ option.get_value("--with-option=c")
+ self.assertEqual(str(e.exception), "'c' is not one of 'a', 'b'")
+
+ value = option.get_value("--with-option=b,a")
+ self.assertTrue(value)
+ self.assertEqual(PositiveOptionValue(("b", "a")), value)
+
+ option = Option("--without-option", nargs="*", default="a", choices=("a", "b"))
+ with self.assertRaises(InvalidOptionError) as e:
+ option.get_value("--with-option=c")
+ self.assertEqual(str(e.exception), "'c' is not one of 'a', 'b'")
+
+ value = option.get_value("--with-option=b,a")
+ self.assertTrue(value)
+ self.assertEqual(PositiveOptionValue(("b", "a")), value)
+
+ # Test nargs inference from choices
+ option = Option("--with-option", choices=("a", "b"))
+ self.assertEqual(option.nargs, 1)
+
+ # Test "relative" values
+ option = Option(
+ "--with-option", nargs="*", default=("b", "c"), choices=("a", "b", "c", "d")
+ )
+
+ value = option.get_value("--with-option=+d")
+ self.assertEqual(PositiveOptionValue(("b", "c", "d")), value)
+
+ value = option.get_value("--with-option=-b")
+ self.assertEqual(PositiveOptionValue(("c",)), value)
+
+ value = option.get_value("--with-option=-b,+d")
+ self.assertEqual(PositiveOptionValue(("c", "d")), value)
+
+ # Adding something that is in the default is fine
+ value = option.get_value("--with-option=+b")
+ self.assertEqual(PositiveOptionValue(("b", "c")), value)
+
+ # Removing something that is not in the default is fine, as long as it
+ # is one of the choices
+ value = option.get_value("--with-option=-a")
+ self.assertEqual(PositiveOptionValue(("b", "c")), value)
+
+ with self.assertRaises(InvalidOptionError) as e:
+ option.get_value("--with-option=-e")
+ self.assertEqual(str(e.exception), "'e' is not one of 'a', 'b', 'c', 'd'")
+
+ # Other "not a choice" errors.
+ with self.assertRaises(InvalidOptionError) as e:
+ option.get_value("--with-option=+e")
+ self.assertEqual(str(e.exception), "'e' is not one of 'a', 'b', 'c', 'd'")
+
+ with self.assertRaises(InvalidOptionError) as e:
+ option.get_value("--with-option=e")
+ self.assertEqual(str(e.exception), "'e' is not one of 'a', 'b', 'c', 'd'")
+
+ def test_option_value_compare(self):
+ # OptionValue are tuple and equivalence should compare as tuples.
+ val = PositiveOptionValue(("foo",))
+
+ self.assertEqual(val[0], "foo")
+ self.assertEqual(val, PositiveOptionValue(("foo",)))
+ self.assertNotEqual(val, PositiveOptionValue(("foo", "bar")))
+
+ # Can compare a tuple to an OptionValue.
+ self.assertEqual(val, ("foo",))
+ self.assertNotEqual(val, ("foo", "bar"))
+
+ # Different OptionValue types are never equal.
+ self.assertNotEqual(val, OptionValue(("foo",)))
+
+ # For usability reasons, we raise TypeError when attempting to compare
+ # against a non-tuple.
+ with self.assertRaisesRegexp(TypeError, "cannot compare a"):
+ val == "foo"
+
+ # But we allow empty option values to compare otherwise we can't
+ # easily compare value-less types like PositiveOptionValue and
+ # NegativeOptionValue.
+ empty_positive = PositiveOptionValue()
+ empty_negative = NegativeOptionValue()
+ self.assertEqual(empty_positive, ())
+ self.assertEqual(empty_positive, PositiveOptionValue())
+ self.assertEqual(empty_negative, ())
+ self.assertEqual(empty_negative, NegativeOptionValue())
+ self.assertNotEqual(empty_positive, "foo")
+ self.assertNotEqual(empty_positive, ("foo",))
+ self.assertNotEqual(empty_negative, "foo")
+ self.assertNotEqual(empty_negative, ("foo",))
+
+ def test_option_value_format(self):
+ val = PositiveOptionValue()
+ self.assertEqual("--with-value", val.format("--with-value"))
+ self.assertEqual("--with-value", val.format("--without-value"))
+ self.assertEqual("--enable-value", val.format("--enable-value"))
+ self.assertEqual("--enable-value", val.format("--disable-value"))
+ self.assertEqual("--value", val.format("--value"))
+ self.assertEqual("VALUE=1", val.format("VALUE"))
+
+ val = PositiveOptionValue(("a",))
+ self.assertEqual("--with-value=a", val.format("--with-value"))
+ self.assertEqual("--with-value=a", val.format("--without-value"))
+ self.assertEqual("--enable-value=a", val.format("--enable-value"))
+ self.assertEqual("--enable-value=a", val.format("--disable-value"))
+ self.assertEqual("--value=a", val.format("--value"))
+ self.assertEqual("VALUE=a", val.format("VALUE"))
+
+ val = PositiveOptionValue(("a", "b"))
+ self.assertEqual("--with-value=a,b", val.format("--with-value"))
+ self.assertEqual("--with-value=a,b", val.format("--without-value"))
+ self.assertEqual("--enable-value=a,b", val.format("--enable-value"))
+ self.assertEqual("--enable-value=a,b", val.format("--disable-value"))
+ self.assertEqual("--value=a,b", val.format("--value"))
+ self.assertEqual("VALUE=a,b", val.format("VALUE"))
+
+ val = NegativeOptionValue()
+ self.assertEqual("--without-value", val.format("--with-value"))
+ self.assertEqual("--without-value", val.format("--without-value"))
+ self.assertEqual("--disable-value", val.format("--enable-value"))
+ self.assertEqual("--disable-value", val.format("--disable-value"))
+ self.assertEqual("", val.format("--value"))
+ self.assertEqual("VALUE=", val.format("VALUE"))
+
+ def test_option_value(self, name="option", nargs=0, default=None):
+ disabled = name.startswith(("disable-", "without-"))
+ if disabled:
+ negOptionValue = PositiveOptionValue
+ posOptionValue = NegativeOptionValue
+ else:
+ posOptionValue = PositiveOptionValue
+ negOptionValue = NegativeOptionValue
+ defaultValue = PositiveOptionValue(default) if default else negOptionValue()
+
+ option = Option("--%s" % name, nargs=nargs, default=default)
+
+ if nargs in (0, "?", "*") or disabled:
+ value = option.get_value("--%s" % name, "option")
+ self.assertEqual(value, posOptionValue())
+ self.assertEqual(value.origin, "option")
+ else:
+ with self.assertRaises(InvalidOptionError) as e:
+ option.get_value("--%s" % name)
+ if nargs == 1:
+ self.assertEqual(str(e.exception), "--%s takes 1 value" % name)
+ elif nargs == "+":
+ self.assertEqual(str(e.exception), "--%s takes 1 or more values" % name)
+ else:
+ self.assertEqual(str(e.exception), "--%s takes 2 values" % name)
+
+ value = option.get_value("")
+ self.assertEqual(value, defaultValue)
+ self.assertEqual(value.origin, "default")
+
+ value = option.get_value(None)
+ self.assertEqual(value, defaultValue)
+ self.assertEqual(value.origin, "default")
+
+ with self.assertRaises(AssertionError):
+ value = option.get_value("MOZ_OPTION=", "environment")
+
+ with self.assertRaises(AssertionError):
+ value = option.get_value("MOZ_OPTION=1", "environment")
+
+ with self.assertRaises(AssertionError):
+ value = option.get_value("--foo")
+
+ if nargs in (1, "?", "*", "+") and not disabled:
+ value = option.get_value("--%s=" % name, "option")
+ self.assertEqual(value, PositiveOptionValue(("",)))
+ self.assertEqual(value.origin, "option")
+ else:
+ with self.assertRaises(InvalidOptionError) as e:
+ option.get_value("--%s=" % name)
+ if disabled:
+ self.assertEqual(str(e.exception), "Cannot pass a value to --%s" % name)
+ else:
+ self.assertEqual(
+ str(e.exception), "--%s takes %d values" % (name, nargs)
+ )
+
+ if nargs in (1, "?", "*", "+") and not disabled:
+ value = option.get_value("--%s=foo" % name, "option")
+ self.assertEqual(value, PositiveOptionValue(("foo",)))
+ self.assertEqual(value.origin, "option")
+ else:
+ with self.assertRaises(InvalidOptionError) as e:
+ option.get_value("--%s=foo" % name)
+ if disabled:
+ self.assertEqual(str(e.exception), "Cannot pass a value to --%s" % name)
+ else:
+ self.assertEqual(
+ str(e.exception), "--%s takes %d values" % (name, nargs)
+ )
+
+ if nargs in (2, "*", "+") and not disabled:
+ value = option.get_value("--%s=foo,bar" % name, "option")
+ self.assertEqual(value, PositiveOptionValue(("foo", "bar")))
+ self.assertEqual(value.origin, "option")
+ else:
+ with self.assertRaises(InvalidOptionError) as e:
+ option.get_value("--%s=foo,bar" % name, "option")
+ if disabled:
+ self.assertEqual(str(e.exception), "Cannot pass a value to --%s" % name)
+ elif nargs == "?":
+ self.assertEqual(str(e.exception), "--%s takes 0 or 1 values" % name)
+ else:
+ self.assertEqual(
+ str(e.exception),
+ "--%s takes %d value%s" % (name, nargs, "s" if nargs != 1 else ""),
+ )
+
+ option = Option("--%s" % name, env="MOZ_OPTION", nargs=nargs, default=default)
+ if nargs in (0, "?", "*") or disabled:
+ value = option.get_value("--%s" % name, "option")
+ self.assertEqual(value, posOptionValue())
+ self.assertEqual(value.origin, "option")
+ else:
+ with self.assertRaises(InvalidOptionError) as e:
+ option.get_value("--%s" % name)
+ if disabled:
+ self.assertEqual(str(e.exception), "Cannot pass a value to --%s" % name)
+ elif nargs == "+":
+ self.assertEqual(str(e.exception), "--%s takes 1 or more values" % name)
+ else:
+ self.assertEqual(
+ str(e.exception),
+ "--%s takes %d value%s" % (name, nargs, "s" if nargs != 1 else ""),
+ )
+
+ value = option.get_value("")
+ self.assertEqual(value, defaultValue)
+ self.assertEqual(value.origin, "default")
+
+ value = option.get_value(None)
+ self.assertEqual(value, defaultValue)
+ self.assertEqual(value.origin, "default")
+
+ value = option.get_value("MOZ_OPTION=", "environment")
+ self.assertEqual(value, NegativeOptionValue())
+ self.assertEqual(value.origin, "environment")
+
+ if nargs in (0, "?", "*"):
+ value = option.get_value("MOZ_OPTION=1", "environment")
+ self.assertEqual(value, PositiveOptionValue())
+ self.assertEqual(value.origin, "environment")
+ elif nargs in (1, "+"):
+ value = option.get_value("MOZ_OPTION=1", "environment")
+ self.assertEqual(value, PositiveOptionValue(("1",)))
+ self.assertEqual(value.origin, "environment")
+ else:
+ with self.assertRaises(InvalidOptionError) as e:
+ option.get_value("MOZ_OPTION=1", "environment")
+ self.assertEqual(str(e.exception), "MOZ_OPTION takes 2 values")
+
+ if nargs in (1, "?", "*", "+") and not disabled:
+ value = option.get_value("--%s=" % name, "option")
+ self.assertEqual(value, PositiveOptionValue(("",)))
+ self.assertEqual(value.origin, "option")
+ else:
+ with self.assertRaises(InvalidOptionError) as e:
+ option.get_value("--%s=" % name, "option")
+ if disabled:
+ self.assertEqual(str(e.exception), "Cannot pass a value to --%s" % name)
+ else:
+ self.assertEqual(
+ str(e.exception), "--%s takes %d values" % (name, nargs)
+ )
+
+ with self.assertRaises(AssertionError):
+ value = option.get_value("--foo", "option")
+
+ if nargs in (1, "?", "*", "+"):
+ value = option.get_value("MOZ_OPTION=foo", "environment")
+ self.assertEqual(value, PositiveOptionValue(("foo",)))
+ self.assertEqual(value.origin, "environment")
+ else:
+ with self.assertRaises(InvalidOptionError) as e:
+ option.get_value("MOZ_OPTION=foo", "environment")
+ self.assertEqual(str(e.exception), "MOZ_OPTION takes %d values" % nargs)
+
+ if nargs in (2, "*", "+"):
+ value = option.get_value("MOZ_OPTION=foo,bar", "environment")
+ self.assertEqual(value, PositiveOptionValue(("foo", "bar")))
+ self.assertEqual(value.origin, "environment")
+ else:
+ with self.assertRaises(InvalidOptionError) as e:
+ option.get_value("MOZ_OPTION=foo,bar", "environment")
+ if nargs == "?":
+ self.assertEqual(str(e.exception), "MOZ_OPTION takes 0 or 1 values")
+ else:
+ self.assertEqual(
+ str(e.exception),
+ "MOZ_OPTION takes %d value%s" % (nargs, "s" if nargs != 1 else ""),
+ )
+
+ if disabled:
+ return option
+
+ env_option = Option(env="MOZ_OPTION", nargs=nargs, default=default)
+ with self.assertRaises(AssertionError):
+ env_option.get_value("--%s" % name)
+
+ value = env_option.get_value("")
+ self.assertEqual(value, defaultValue)
+ self.assertEqual(value.origin, "default")
+
+ value = env_option.get_value("MOZ_OPTION=", "environment")
+ self.assertEqual(value, negOptionValue())
+ self.assertEqual(value.origin, "environment")
+
+ if nargs in (0, "?", "*"):
+ value = env_option.get_value("MOZ_OPTION=1", "environment")
+ self.assertEqual(value, posOptionValue())
+ self.assertTrue(value)
+ self.assertEqual(value.origin, "environment")
+ elif nargs in (1, "+"):
+ value = env_option.get_value("MOZ_OPTION=1", "environment")
+ self.assertEqual(value, PositiveOptionValue(("1",)))
+ self.assertEqual(value.origin, "environment")
+ else:
+ with self.assertRaises(InvalidOptionError) as e:
+ env_option.get_value("MOZ_OPTION=1", "environment")
+ self.assertEqual(str(e.exception), "MOZ_OPTION takes 2 values")
+
+ with self.assertRaises(AssertionError) as e:
+ env_option.get_value("--%s" % name)
+
+ with self.assertRaises(AssertionError) as e:
+ env_option.get_value("--foo")
+
+ if nargs in (1, "?", "*", "+"):
+ value = env_option.get_value("MOZ_OPTION=foo", "environment")
+ self.assertEqual(value, PositiveOptionValue(("foo",)))
+ self.assertEqual(value.origin, "environment")
+ else:
+ with self.assertRaises(InvalidOptionError) as e:
+ env_option.get_value("MOZ_OPTION=foo", "environment")
+ self.assertEqual(str(e.exception), "MOZ_OPTION takes %d values" % nargs)
+
+ if nargs in (2, "*", "+"):
+ value = env_option.get_value("MOZ_OPTION=foo,bar", "environment")
+ self.assertEqual(value, PositiveOptionValue(("foo", "bar")))
+ self.assertEqual(value.origin, "environment")
+ else:
+ with self.assertRaises(InvalidOptionError) as e:
+ env_option.get_value("MOZ_OPTION=foo,bar", "environment")
+ if nargs == "?":
+ self.assertEqual(str(e.exception), "MOZ_OPTION takes 0 or 1 values")
+ else:
+ self.assertEqual(
+ str(e.exception),
+ "MOZ_OPTION takes %d value%s" % (nargs, "s" if nargs != 1 else ""),
+ )
+
+ return option
+
+ def test_option_value_enable(
+ self, enable="enable", disable="disable", nargs=0, default=None
+ ):
+ option = self.test_option_value(
+ "%s-option" % enable, nargs=nargs, default=default
+ )
+
+ value = option.get_value("--%s-option" % disable, "option")
+ self.assertEqual(value, NegativeOptionValue())
+ self.assertEqual(value.origin, "option")
+
+ option = self.test_option_value(
+ "%s-option" % disable, nargs=nargs, default=default
+ )
+
+ if nargs in (0, "?", "*"):
+ value = option.get_value("--%s-option" % enable, "option")
+ self.assertEqual(value, PositiveOptionValue())
+ self.assertEqual(value.origin, "option")
+ else:
+ with self.assertRaises(InvalidOptionError) as e:
+ option.get_value("--%s-option" % enable, "option")
+ if nargs == 1:
+ self.assertEqual(str(e.exception), "--%s-option takes 1 value" % enable)
+ elif nargs == "+":
+ self.assertEqual(
+ str(e.exception), "--%s-option takes 1 or more values" % enable
+ )
+ else:
+ self.assertEqual(
+ str(e.exception), "--%s-option takes 2 values" % enable
+ )
+
+ def test_option_value_with(self):
+ self.test_option_value_enable("with", "without")
+
+ def test_option_value_invalid_nargs(self):
+ with self.assertRaises(InvalidOptionError) as e:
+ Option("--option", nargs="foo")
+ self.assertEqual(
+ str(e.exception), "nargs must be a positive integer, '?', '*' or '+'"
+ )
+
+ with self.assertRaises(InvalidOptionError) as e:
+ Option("--option", nargs=-2)
+ self.assertEqual(
+ str(e.exception), "nargs must be a positive integer, '?', '*' or '+'"
+ )
+
+ def test_option_value_nargs_1(self):
+ self.test_option_value(nargs=1)
+ self.test_option_value(nargs=1, default=("a",))
+ self.test_option_value_enable(nargs=1, default=("a",))
+
+ # A default is required
+ with self.assertRaises(InvalidOptionError) as e:
+ Option("--disable-option", nargs=1)
+ self.assertEqual(
+ str(e.exception), "The given `default` doesn't satisfy `nargs`"
+ )
+
+ def test_option_value_nargs_2(self):
+ self.test_option_value(nargs=2)
+ self.test_option_value(nargs=2, default=("a", "b"))
+ self.test_option_value_enable(nargs=2, default=("a", "b"))
+
+ # A default is required
+ with self.assertRaises(InvalidOptionError) as e:
+ Option("--disable-option", nargs=2)
+ self.assertEqual(
+ str(e.exception), "The given `default` doesn't satisfy `nargs`"
+ )
+
+ def test_option_value_nargs_0_or_1(self):
+ self.test_option_value(nargs="?")
+ self.test_option_value(nargs="?", default=("a",))
+ self.test_option_value_enable(nargs="?")
+ self.test_option_value_enable(nargs="?", default=("a",))
+
+ def test_option_value_nargs_0_or_more(self):
+ self.test_option_value(nargs="*")
+ self.test_option_value(nargs="*", default=("a",))
+ self.test_option_value(nargs="*", default=("a", "b"))
+ self.test_option_value_enable(nargs="*")
+ self.test_option_value_enable(nargs="*", default=("a",))
+ self.test_option_value_enable(nargs="*", default=("a", "b"))
+
+ def test_option_value_nargs_1_or_more(self):
+ self.test_option_value(nargs="+")
+ self.test_option_value(nargs="+", default=("a",))
+ self.test_option_value(nargs="+", default=("a", "b"))
+ self.test_option_value_enable(nargs="+", default=("a",))
+ self.test_option_value_enable(nargs="+", default=("a", "b"))
+
+ # A default is required
+ with self.assertRaises(InvalidOptionError) as e:
+ Option("--disable-option", nargs="+")
+ self.assertEqual(
+ str(e.exception), "The given `default` doesn't satisfy `nargs`"
+ )
+
+
+class TestCommandLineHelper(unittest.TestCase):
+ def test_basic(self):
+ helper = CommandLineHelper({}, ["cmd", "--foo", "--bar"])
+
+ self.assertEqual(["--foo", "--bar"], list(helper))
+
+ helper.add("--enable-qux")
+
+ self.assertEqual(["--foo", "--bar", "--enable-qux"], list(helper))
+
+ value, option = helper.handle(Option("--bar"))
+ self.assertEqual(["--foo", "--enable-qux"], list(helper))
+ self.assertEqual(PositiveOptionValue(), value)
+ self.assertEqual("--bar", option)
+
+ value, option = helper.handle(Option("--baz"))
+ self.assertEqual(["--foo", "--enable-qux"], list(helper))
+ self.assertEqual(NegativeOptionValue(), value)
+ self.assertEqual(None, option)
+
+ with self.assertRaises(AssertionError):
+ CommandLineHelper({}, ["--foo", "--bar"])
+
+ def test_precedence(self):
+ foo = Option("--with-foo", nargs="*")
+ helper = CommandLineHelper({}, ["cmd", "--with-foo=a,b"])
+ value, option = helper.handle(foo)
+ self.assertEqual(PositiveOptionValue(("a", "b")), value)
+ self.assertEqual("command-line", value.origin)
+ self.assertEqual("--with-foo=a,b", option)
+
+ helper = CommandLineHelper({}, ["cmd", "--with-foo=a,b", "--without-foo"])
+ value, option = helper.handle(foo)
+ self.assertEqual(NegativeOptionValue(), value)
+ self.assertEqual("command-line", value.origin)
+ self.assertEqual("--without-foo", option)
+
+ helper = CommandLineHelper({}, ["cmd", "--without-foo", "--with-foo=a,b"])
+ value, option = helper.handle(foo)
+ self.assertEqual(PositiveOptionValue(("a", "b")), value)
+ self.assertEqual("command-line", value.origin)
+ self.assertEqual("--with-foo=a,b", option)
+
+ foo = Option("--with-foo", env="FOO", nargs="*")
+ helper = CommandLineHelper({"FOO": ""}, ["cmd", "--with-foo=a,b"])
+ value, option = helper.handle(foo)
+ self.assertEqual(PositiveOptionValue(("a", "b")), value)
+ self.assertEqual("command-line", value.origin)
+ self.assertEqual("--with-foo=a,b", option)
+
+ helper = CommandLineHelper({"FOO": "a,b"}, ["cmd", "--without-foo"])
+ value, option = helper.handle(foo)
+ self.assertEqual(NegativeOptionValue(), value)
+ self.assertEqual("command-line", value.origin)
+ self.assertEqual("--without-foo", option)
+
+ helper = CommandLineHelper({"FOO": ""}, ["cmd", "--with-bar=a,b"])
+ value, option = helper.handle(foo)
+ self.assertEqual(NegativeOptionValue(), value)
+ self.assertEqual("environment", value.origin)
+ self.assertEqual("FOO=", option)
+
+ helper = CommandLineHelper({"FOO": "a,b"}, ["cmd", "--without-bar"])
+ value, option = helper.handle(foo)
+ self.assertEqual(PositiveOptionValue(("a", "b")), value)
+ self.assertEqual("environment", value.origin)
+ self.assertEqual("FOO=a,b", option)
+
+ helper = CommandLineHelper({}, ["cmd", "--with-foo=a,b", "FOO="])
+ value, option = helper.handle(foo)
+ self.assertEqual(NegativeOptionValue(), value)
+ self.assertEqual("command-line", value.origin)
+ self.assertEqual("FOO=", option)
+
+ helper = CommandLineHelper({}, ["cmd", "--without-foo", "FOO=a,b"])
+ value, option = helper.handle(foo)
+ self.assertEqual(PositiveOptionValue(("a", "b")), value)
+ self.assertEqual("command-line", value.origin)
+ self.assertEqual("FOO=a,b", option)
+
+ helper = CommandLineHelper({}, ["cmd", "FOO=", "--with-foo=a,b"])
+ value, option = helper.handle(foo)
+ self.assertEqual(PositiveOptionValue(("a", "b")), value)
+ self.assertEqual("command-line", value.origin)
+ self.assertEqual("--with-foo=a,b", option)
+
+ helper = CommandLineHelper({}, ["cmd", "FOO=a,b", "--without-foo"])
+ value, option = helper.handle(foo)
+ self.assertEqual(NegativeOptionValue(), value)
+ self.assertEqual("command-line", value.origin)
+ self.assertEqual("--without-foo", option)
+
+ def test_extra_args(self):
+ foo = Option("--with-foo", env="FOO", nargs="*")
+ helper = CommandLineHelper({}, ["cmd"])
+ helper.add("FOO=a,b,c", "other-origin")
+ value, option = helper.handle(foo)
+ self.assertEqual(PositiveOptionValue(("a", "b", "c")), value)
+ self.assertEqual("other-origin", value.origin)
+ self.assertEqual("FOO=a,b,c", option)
+
+ helper = CommandLineHelper({}, ["cmd"])
+ helper.add("FOO=a,b,c", "other-origin")
+ helper.add("--with-foo=a,b,c", "other-origin")
+ value, option = helper.handle(foo)
+ self.assertEqual(PositiveOptionValue(("a", "b", "c")), value)
+ self.assertEqual("other-origin", value.origin)
+ self.assertEqual("--with-foo=a,b,c", option)
+
+ # Adding conflicting options is not allowed.
+ helper = CommandLineHelper({}, ["cmd"])
+ helper.add("FOO=a,b,c", "other-origin")
+ with self.assertRaises(ConflictingOptionError) as cm:
+ helper.add("FOO=", "other-origin")
+ self.assertEqual("FOO=", cm.exception.arg)
+ self.assertEqual("other-origin", cm.exception.origin)
+ self.assertEqual("FOO=a,b,c", cm.exception.old_arg)
+ self.assertEqual("other-origin", cm.exception.old_origin)
+ with self.assertRaises(ConflictingOptionError) as cm:
+ helper.add("FOO=a,b", "other-origin")
+ self.assertEqual("FOO=a,b", cm.exception.arg)
+ self.assertEqual("other-origin", cm.exception.origin)
+ self.assertEqual("FOO=a,b,c", cm.exception.old_arg)
+ self.assertEqual("other-origin", cm.exception.old_origin)
+ # But adding the same is allowed.
+ helper.add("FOO=a,b,c", "other-origin")
+ value, option = helper.handle(foo)
+ self.assertEqual(PositiveOptionValue(("a", "b", "c")), value)
+ self.assertEqual("other-origin", value.origin)
+ self.assertEqual("FOO=a,b,c", option)
+
+ # The same rule as above applies when using the option form vs. the
+ # variable form. But we can't detect it when .add is called.
+ helper = CommandLineHelper({}, ["cmd"])
+ helper.add("FOO=a,b,c", "other-origin")
+ helper.add("--without-foo", "other-origin")
+ with self.assertRaises(ConflictingOptionError) as cm:
+ helper.handle(foo)
+ self.assertEqual("--without-foo", cm.exception.arg)
+ self.assertEqual("other-origin", cm.exception.origin)
+ self.assertEqual("FOO=a,b,c", cm.exception.old_arg)
+ self.assertEqual("other-origin", cm.exception.old_origin)
+ helper = CommandLineHelper({}, ["cmd"])
+ helper.add("FOO=a,b,c", "other-origin")
+ helper.add("--with-foo=a,b", "other-origin")
+ with self.assertRaises(ConflictingOptionError) as cm:
+ helper.handle(foo)
+ self.assertEqual("--with-foo=a,b", cm.exception.arg)
+ self.assertEqual("other-origin", cm.exception.origin)
+ self.assertEqual("FOO=a,b,c", cm.exception.old_arg)
+ self.assertEqual("other-origin", cm.exception.old_origin)
+ helper = CommandLineHelper({}, ["cmd"])
+ helper.add("FOO=a,b,c", "other-origin")
+ helper.add("--with-foo=a,b,c", "other-origin")
+ value, option = helper.handle(foo)
+ self.assertEqual(PositiveOptionValue(("a", "b", "c")), value)
+ self.assertEqual("other-origin", value.origin)
+ self.assertEqual("--with-foo=a,b,c", option)
+
+ # Conflicts are also not allowed against what is in the
+ # environment/on the command line.
+ helper = CommandLineHelper({}, ["cmd", "--with-foo=a,b"])
+ helper.add("FOO=a,b,c", "other-origin")
+ with self.assertRaises(ConflictingOptionError) as cm:
+ helper.handle(foo)
+ self.assertEqual("FOO=a,b,c", cm.exception.arg)
+ self.assertEqual("other-origin", cm.exception.origin)
+ self.assertEqual("--with-foo=a,b", cm.exception.old_arg)
+ self.assertEqual("command-line", cm.exception.old_origin)
+
+ helper = CommandLineHelper({}, ["cmd", "--with-foo=a,b"])
+ helper.add("--without-foo", "other-origin")
+ with self.assertRaises(ConflictingOptionError) as cm:
+ helper.handle(foo)
+ self.assertEqual("--without-foo", cm.exception.arg)
+ self.assertEqual("other-origin", cm.exception.origin)
+ self.assertEqual("--with-foo=a,b", cm.exception.old_arg)
+ self.assertEqual("command-line", cm.exception.old_origin)
+
+ def test_possible_origins(self):
+ with self.assertRaises(InvalidOptionError):
+ Option("--foo", possible_origins="command-line")
+
+ helper = CommandLineHelper({"BAZ": "1"}, ["cmd", "--foo", "--bar"])
+ foo = Option("--foo", possible_origins=("command-line",))
+ value, option = helper.handle(foo)
+ self.assertEqual(PositiveOptionValue(), value)
+ self.assertEqual("command-line", value.origin)
+ self.assertEqual("--foo", option)
+
+ bar = Option("--bar", possible_origins=("mozconfig",))
+ with self.assertRaisesRegexp(
+ InvalidOptionError,
+ "--bar can not be set by command-line. Values are accepted from: mozconfig",
+ ):
+ helper.handle(bar)
+
+ baz = Option(env="BAZ", possible_origins=("implied",))
+ with self.assertRaisesRegexp(
+ InvalidOptionError,
+ "BAZ=1 can not be set by environment. Values are accepted from: implied",
+ ):
+ helper.handle(baz)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/test/configure/test_toolchain_configure.py b/python/mozbuild/mozbuild/test/configure/test_toolchain_configure.py
new file mode 100644
index 0000000000..c6af3d99d4
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/test_toolchain_configure.py
@@ -0,0 +1,2056 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import logging
+import os
+
+import six
+from mozboot.util import MINIMUM_RUST_VERSION
+from mozpack import path as mozpath
+from mozunit import main
+from six import StringIO
+from test_toolchain_helpers import CompilerResult, FakeCompiler, PrependFlags
+
+from common import BaseConfigureTest
+from mozbuild.configure.util import Version
+from mozbuild.util import ReadOnlyNamespace, memoize
+
+DEFAULT_C99 = {"__STDC_VERSION__": "199901L"}
+
+DEFAULT_C11 = {"__STDC_VERSION__": "201112L"}
+
+DEFAULT_C17 = {"__STDC_VERSION__": "201710L"}
+
+DEFAULT_CXX_97 = {"__cplusplus": "199711L"}
+
+DEFAULT_CXX_11 = {"__cplusplus": "201103L"}
+
+DRAFT_CXX_14 = {"__cplusplus": "201300L"}
+
+DEFAULT_CXX_14 = {"__cplusplus": "201402L"}
+
+DRAFT_CXX17_201500 = {"__cplusplus": "201500L"}
+
+DRAFT_CXX17_201406 = {"__cplusplus": "201406L"}
+
+DEFAULT_CXX_17 = {"__cplusplus": "201703L"}
+
+SUPPORTS_GNU99 = {"-std=gnu99": DEFAULT_C99}
+
+SUPPORTS_GNUXX11 = {"-std=gnu++11": DEFAULT_CXX_11}
+
+SUPPORTS_GNUXX14 = {"-std=gnu++14": DEFAULT_CXX_14}
+
+SUPPORTS_CXX14 = {"-std=c++14": DEFAULT_CXX_14}
+
+SUPPORTS_GNUXX17 = {"-std=gnu++17": DEFAULT_CXX_17}
+
+SUPPORTS_CXX17 = {"-std=c++17": DEFAULT_CXX_17}
+
+
+@memoize
+def GCC_BASE(version):
+ version = Version(version)
+ return FakeCompiler(
+ {
+ "__GNUC__": version.major,
+ "__GNUC_MINOR__": version.minor,
+ "__GNUC_PATCHLEVEL__": version.patch,
+ "__STDC__": 1,
+ }
+ )
+
+
+@memoize
+def GCC(version):
+ return GCC_BASE(version) + SUPPORTS_GNU99
+
+
+@memoize
+def GXX(version):
+ return GCC_BASE(version) + DEFAULT_CXX_97 + SUPPORTS_GNUXX11
+
+
+SUPPORTS_DRAFT_CXX14_VERSION = {"-std=gnu++14": DRAFT_CXX_14}
+
+SUPPORTS_GNUXX1Z = {"-std=gnu++1z": DRAFT_CXX17_201406}
+
+SUPPORTS_DRAFT_CXX17_201500_VERSION = {"-std=gnu++17": DRAFT_CXX17_201500}
+
+GCC_4_9 = GCC("4.9.3")
+GXX_4_9 = GXX("4.9.3") + SUPPORTS_DRAFT_CXX14_VERSION
+GCC_5 = GCC("5.2.1") + DEFAULT_C11
+GXX_5 = GXX("5.2.1") + SUPPORTS_GNUXX14
+GCC_6 = GCC("6.4.0") + DEFAULT_C11
+GXX_6 = (
+ GXX("6.4.0")
+ + DEFAULT_CXX_14
+ + SUPPORTS_GNUXX17
+ + SUPPORTS_DRAFT_CXX17_201500_VERSION
+)
+GCC_7 = GCC("7.3.0") + DEFAULT_C11
+GXX_7 = GXX("7.3.0") + DEFAULT_CXX_14 + SUPPORTS_GNUXX17 + SUPPORTS_CXX17
+GCC_8 = GCC("8.3.0") + DEFAULT_C11
+GXX_8 = GXX("8.3.0") + DEFAULT_CXX_14 + SUPPORTS_GNUXX17 + SUPPORTS_CXX17
+GCC_10 = GCC("10.2.1") + DEFAULT_C17
+GXX_10 = GXX("10.2.1") + DEFAULT_CXX_14 + SUPPORTS_GNUXX17 + SUPPORTS_CXX17
+
+DEFAULT_GCC = GCC_8
+DEFAULT_GXX = GXX_8
+
+GCC_PLATFORM_LITTLE_ENDIAN = {
+ "__ORDER_LITTLE_ENDIAN__": 1234,
+ "__ORDER_BIG_ENDIAN__": 4321,
+ "__BYTE_ORDER__": 1234,
+}
+
+GCC_PLATFORM_BIG_ENDIAN = {
+ "__ORDER_LITTLE_ENDIAN__": 1234,
+ "__ORDER_BIG_ENDIAN__": 4321,
+ "__BYTE_ORDER__": 4321,
+}
+
+GCC_PLATFORM_X86 = FakeCompiler(GCC_PLATFORM_LITTLE_ENDIAN) + {
+ None: {"__i386__": 1},
+ "-m64": {"__i386__": False, "__x86_64__": 1},
+}
+
+GCC_PLATFORM_X86_64 = FakeCompiler(GCC_PLATFORM_LITTLE_ENDIAN) + {
+ None: {"__x86_64__": 1},
+ "-m32": {"__x86_64__": False, "__i386__": 1},
+}
+
+GCC_PLATFORM_ARM = FakeCompiler(GCC_PLATFORM_LITTLE_ENDIAN) + {"__arm__": 1}
+
+GCC_PLATFORM_LINUX = {"__linux__": 1}
+
+GCC_PLATFORM_DARWIN = {"__APPLE__": 1}
+
+GCC_PLATFORM_WIN = {"_WIN32": 1, "WINNT": 1}
+
+GCC_PLATFORM_OPENBSD = {"__OpenBSD__": 1}
+
+GCC_PLATFORM_X86_LINUX = FakeCompiler(GCC_PLATFORM_X86, GCC_PLATFORM_LINUX)
+GCC_PLATFORM_X86_64_LINUX = FakeCompiler(GCC_PLATFORM_X86_64, GCC_PLATFORM_LINUX)
+GCC_PLATFORM_ARM_LINUX = FakeCompiler(GCC_PLATFORM_ARM, GCC_PLATFORM_LINUX)
+GCC_PLATFORM_X86_OSX = FakeCompiler(GCC_PLATFORM_X86, GCC_PLATFORM_DARWIN)
+GCC_PLATFORM_X86_64_OSX = FakeCompiler(GCC_PLATFORM_X86_64, GCC_PLATFORM_DARWIN)
+GCC_PLATFORM_X86_WIN = FakeCompiler(GCC_PLATFORM_X86, GCC_PLATFORM_WIN)
+GCC_PLATFORM_X86_64_WIN = FakeCompiler(GCC_PLATFORM_X86_64, GCC_PLATFORM_WIN)
+
+
+@memoize
+def CLANG_BASE(version):
+ version = Version(version)
+ return FakeCompiler(
+ {
+ "__clang__": 1,
+ "__clang_major__": version.major,
+ "__clang_minor__": version.minor,
+ "__clang_patchlevel__": version.patch,
+ }
+ )
+
+
+@memoize
+def CLANG(version):
+ return GCC_BASE("4.2.1") + CLANG_BASE(version) + SUPPORTS_GNU99
+
+
+@memoize
+def CLANGXX(version):
+ return (
+ GCC_BASE("4.2.1")
+ + CLANG_BASE(version)
+ + DEFAULT_CXX_97
+ + SUPPORTS_GNUXX11
+ + SUPPORTS_GNUXX14
+ )
+
+
+CLANG_3_3 = CLANG("3.3.0") + DEFAULT_C99
+CLANGXX_3_3 = CLANGXX("3.3.0")
+CLANG_4_0 = CLANG("4.0.2") + DEFAULT_C11
+CLANGXX_4_0 = CLANGXX("4.0.2") + SUPPORTS_GNUXX1Z
+CLANG_7_0 = CLANG("7.0.0") + DEFAULT_C11
+CLANGXX_7_0 = CLANGXX("7.0.0") + DEFAULT_CXX_14 + SUPPORTS_GNUXX17
+XCODE_CLANG_3_3 = (
+ CLANG("5.0")
+ + DEFAULT_C99
+ + {
+ # Real Xcode clang has a full version here, but we don't care about it.
+ "__apple_build_version__": "1"
+ }
+)
+XCODE_CLANGXX_3_3 = CLANGXX("5.0") + {"__apple_build_version__": "1"}
+XCODE_CLANG_4_0 = CLANG("9.0.0") + DEFAULT_C11 + {"__apple_build_version__": "1"}
+XCODE_CLANGXX_4_0 = (
+ CLANGXX("9.0.0") + SUPPORTS_GNUXX1Z + {"__apple_build_version__": "1"}
+)
+XCODE_CLANG_7_0 = CLANG("10.0.1") + DEFAULT_C11 + {"__apple_build_version__": "1"}
+XCODE_CLANGXX_7_0 = (
+ CLANGXX("10.0.1") + SUPPORTS_GNUXX17 + {"__apple_build_version__": "1"}
+)
+DEFAULT_CLANG = CLANG_7_0
+DEFAULT_CLANGXX = CLANGXX_7_0
+
+
+def CLANG_PLATFORM(gcc_platform):
+ base = {
+ "--target=x86_64-linux-gnu": GCC_PLATFORM_X86_64_LINUX[None],
+ "--target=x86_64-apple-darwin11.2.0": GCC_PLATFORM_X86_64_OSX[None],
+ "--target=i686-linux-gnu": GCC_PLATFORM_X86_LINUX[None],
+ "--target=i686-apple-darwin11.2.0": GCC_PLATFORM_X86_OSX[None],
+ "--target=arm-linux-gnu": GCC_PLATFORM_ARM_LINUX[None],
+ }
+ undo_gcc_platform = {
+ k: {symbol: False for symbol in gcc_platform[None]} for k in base
+ }
+ return FakeCompiler(gcc_platform, undo_gcc_platform, base)
+
+
+CLANG_PLATFORM_X86_LINUX = CLANG_PLATFORM(GCC_PLATFORM_X86_LINUX)
+CLANG_PLATFORM_X86_64_LINUX = CLANG_PLATFORM(GCC_PLATFORM_X86_64_LINUX)
+CLANG_PLATFORM_X86_OSX = CLANG_PLATFORM(GCC_PLATFORM_X86_OSX)
+CLANG_PLATFORM_X86_64_OSX = CLANG_PLATFORM(GCC_PLATFORM_X86_64_OSX)
+CLANG_PLATFORM_X86_WIN = CLANG_PLATFORM(GCC_PLATFORM_X86_WIN)
+CLANG_PLATFORM_X86_64_WIN = CLANG_PLATFORM(GCC_PLATFORM_X86_64_WIN)
+
+
+@memoize
+def VS(version):
+ version = Version(version)
+ return FakeCompiler(
+ {
+ None: {
+ "_MSC_VER": "%02d%02d" % (version.major, version.minor),
+ "_MSC_FULL_VER": "%02d%02d%05d"
+ % (version.major, version.minor, version.patch),
+ "_MT": "1",
+ },
+ "*.cpp": DEFAULT_CXX_97,
+ }
+ )
+
+
+VS_2017u8 = VS("19.15.26726")
+
+VS_PLATFORM_X86 = {"_M_IX86": 600, "_WIN32": 1}
+
+VS_PLATFORM_X86_64 = {"_M_X64": 100, "_WIN32": 1, "_WIN64": 1}
+
+# Despite the 32 in the name, this macro is defined for 32- and 64-bit.
+MINGW32 = {"__MINGW32__": True}
+
+# Note: In reality, the -std=gnu* options are only supported when preceded by
+# -Xclang.
+CLANG_CL_3_9 = (
+ CLANG_BASE("3.9.0")
+ + VS("18.00.00000")
+ + DEFAULT_C11
+ + SUPPORTS_GNU99
+ + SUPPORTS_GNUXX11
+ + SUPPORTS_CXX14
+) + {"*.cpp": {"__STDC_VERSION__": False, "__cplusplus": "201103L"}}
+CLANG_CL_9_0 = (
+ CLANG_BASE("9.0.0")
+ + VS("18.00.00000")
+ + DEFAULT_C11
+ + SUPPORTS_GNU99
+ + SUPPORTS_GNUXX11
+ + SUPPORTS_CXX14
+ + SUPPORTS_CXX17
+) + {"*.cpp": {"__STDC_VERSION__": False, "__cplusplus": "201103L"}}
+
+CLANG_CL_PLATFORM_X86 = FakeCompiler(
+ VS_PLATFORM_X86, GCC_PLATFORM_X86[None], GCC_PLATFORM_LITTLE_ENDIAN
+)
+CLANG_CL_PLATFORM_X86_64 = FakeCompiler(
+ VS_PLATFORM_X86_64, GCC_PLATFORM_X86_64[None], GCC_PLATFORM_LITTLE_ENDIAN
+)
+
+LIBRARY_NAME_INFOS = {
+ "linux-gnu": {
+ "DLL_PREFIX": "lib",
+ "DLL_SUFFIX": ".so",
+ "LIB_PREFIX": "lib",
+ "LIB_SUFFIX": "a",
+ "IMPORT_LIB_SUFFIX": "",
+ "OBJ_SUFFIX": "o",
+ },
+ "darwin11.2.0": {
+ "DLL_PREFIX": "lib",
+ "DLL_SUFFIX": ".dylib",
+ "LIB_PREFIX": "lib",
+ "LIB_SUFFIX": "a",
+ "IMPORT_LIB_SUFFIX": "",
+ "OBJ_SUFFIX": "o",
+ },
+ "mingw32": {
+ "DLL_PREFIX": "",
+ "DLL_SUFFIX": ".dll",
+ "LIB_PREFIX": "lib",
+ "LIB_SUFFIX": "a",
+ "IMPORT_LIB_SUFFIX": "a",
+ "OBJ_SUFFIX": "o",
+ },
+ "windows-msvc": {
+ "DLL_PREFIX": "",
+ "DLL_SUFFIX": ".dll",
+ "LIB_PREFIX": "",
+ "LIB_SUFFIX": "lib",
+ "IMPORT_LIB_SUFFIX": "lib",
+ "OBJ_SUFFIX": "obj",
+ },
+ "windows-gnu": {
+ "DLL_PREFIX": "",
+ "DLL_SUFFIX": ".dll",
+ "LIB_PREFIX": "lib",
+ "LIB_SUFFIX": "a",
+ "IMPORT_LIB_SUFFIX": "a",
+ "OBJ_SUFFIX": "o",
+ },
+ "openbsd6.1": {
+ "DLL_PREFIX": "lib",
+ "DLL_SUFFIX": ".so.1.0",
+ "LIB_PREFIX": "lib",
+ "LIB_SUFFIX": "a",
+ "IMPORT_LIB_SUFFIX": "",
+ "OBJ_SUFFIX": "o",
+ },
+}
+
+
+class BaseToolchainTest(BaseConfigureTest):
+ def setUp(self):
+ super(BaseToolchainTest, self).setUp()
+ self.out = StringIO()
+ self.logger = logging.getLogger("BaseToolchainTest")
+ self.logger.setLevel(logging.ERROR)
+ self.handler = logging.StreamHandler(self.out)
+ self.logger.addHandler(self.handler)
+
+ def tearDown(self):
+ self.logger.removeHandler(self.handler)
+ del self.handler
+ del self.out
+ super(BaseToolchainTest, self).tearDown()
+
+ def do_toolchain_test(self, paths, results, args=[], environ={}):
+ """Helper to test the toolchain checks from toolchain.configure.
+
+ - `paths` is a dict associating compiler paths to FakeCompiler
+ definitions from above.
+ - `results` is a dict associating result variable names from
+ toolchain.configure (c_compiler, cxx_compiler, host_c_compiler,
+ host_cxx_compiler) with a result.
+ The result can either be an error string, or a CompilerResult
+ corresponding to the object returned by toolchain.configure checks.
+ When the results for host_c_compiler are identical to c_compiler,
+ they can be omitted. Likewise for host_cxx_compiler vs.
+ cxx_compiler.
+ """
+ environ = dict(environ)
+ if "PATH" not in environ:
+ environ["PATH"] = os.pathsep.join(
+ mozpath.abspath(p) for p in ("/bin", "/usr/bin")
+ )
+
+ args = args + ["--enable-release", "--disable-bootstrap"]
+
+ sandbox = self.get_sandbox(paths, {}, args, environ, logger=self.logger)
+
+ for var in (
+ "c_compiler",
+ "cxx_compiler",
+ "host_c_compiler",
+ "host_cxx_compiler",
+ ):
+ if var in results:
+ result = results[var]
+ elif var.startswith("host_"):
+ result = results.get(var[5:], {})
+ else:
+ result = {}
+ try:
+ self.out.truncate(0)
+ self.out.seek(0)
+ compiler = sandbox._value_for(sandbox[var])
+ # Add var on both ends to make it clear which of the
+ # variables is failing the test when that happens.
+ self.assertEqual((var, compiler), (var, result))
+ except SystemExit:
+ self.assertEqual((var, result), (var, self.out.getvalue().strip()))
+ return
+
+ # Normalize the target os to match what we have as keys in
+ # LIBRARY_NAME_INFOS.
+ target_os = getattr(self, "TARGET", self.HOST).split("-", 2)[2]
+ if target_os == "mingw32":
+ compiler_type = sandbox._value_for(sandbox["c_compiler"]).type
+ if compiler_type == "clang-cl":
+ target_os = "windows-msvc"
+ elif target_os == "linux-gnuabi64":
+ target_os = "linux-gnu"
+
+ self.do_library_name_info_test(target_os, sandbox)
+
+ # Try again on artifact builds. In that case, we always get library
+ # name info for msvc on Windows
+ if target_os == "mingw32":
+ target_os = "windows-msvc"
+
+ sandbox = self.get_sandbox(
+ paths, {}, args + ["--enable-artifact-builds"], environ, logger=self.logger
+ )
+
+ self.do_library_name_info_test(target_os, sandbox)
+
+ def do_library_name_info_test(self, target_os, sandbox):
+ library_name_info = LIBRARY_NAME_INFOS[target_os]
+ for k in (
+ "DLL_PREFIX",
+ "DLL_SUFFIX",
+ "LIB_PREFIX",
+ "LIB_SUFFIX",
+ "IMPORT_LIB_SUFFIX",
+ "OBJ_SUFFIX",
+ ):
+ self.assertEqual(
+ "%s=%s" % (k, sandbox.get_config(k)),
+ "%s=%s" % (k, library_name_info[k]),
+ )
+
+
+def old_gcc_message(old_ver):
+ return "Only GCC 8.1 or newer is supported (found version {}).".format(old_ver)
+
+
+class LinuxToolchainTest(BaseToolchainTest):
+ PATHS = {
+ "/usr/bin/gcc": DEFAULT_GCC + GCC_PLATFORM_X86_64_LINUX,
+ "/usr/bin/g++": DEFAULT_GXX + GCC_PLATFORM_X86_64_LINUX,
+ "/usr/bin/gcc-4.9": GCC_4_9 + GCC_PLATFORM_X86_64_LINUX,
+ "/usr/bin/g++-4.9": GXX_4_9 + GCC_PLATFORM_X86_64_LINUX,
+ "/usr/bin/gcc-5": GCC_5 + GCC_PLATFORM_X86_64_LINUX,
+ "/usr/bin/g++-5": GXX_5 + GCC_PLATFORM_X86_64_LINUX,
+ "/usr/bin/gcc-6": GCC_6 + GCC_PLATFORM_X86_64_LINUX,
+ "/usr/bin/g++-6": GXX_6 + GCC_PLATFORM_X86_64_LINUX,
+ "/usr/bin/gcc-7": GCC_7 + GCC_PLATFORM_X86_64_LINUX,
+ "/usr/bin/g++-7": GXX_7 + GCC_PLATFORM_X86_64_LINUX,
+ "/usr/bin/gcc-8": GCC_8 + GCC_PLATFORM_X86_64_LINUX,
+ "/usr/bin/g++-8": GXX_8 + GCC_PLATFORM_X86_64_LINUX,
+ "/usr/bin/gcc-10": GCC_10 + GCC_PLATFORM_X86_64_LINUX,
+ "/usr/bin/g++-10": GXX_10 + GCC_PLATFORM_X86_64_LINUX,
+ "/usr/bin/clang": DEFAULT_CLANG + CLANG_PLATFORM_X86_64_LINUX,
+ "/usr/bin/clang++": DEFAULT_CLANGXX + CLANG_PLATFORM_X86_64_LINUX,
+ "/usr/bin/clang-7.0": CLANG_7_0 + CLANG_PLATFORM_X86_64_LINUX,
+ "/usr/bin/clang++-7.0": CLANGXX_7_0 + CLANG_PLATFORM_X86_64_LINUX,
+ "/usr/bin/clang-4.0": CLANG_4_0 + CLANG_PLATFORM_X86_64_LINUX,
+ "/usr/bin/clang++-4.0": CLANGXX_4_0 + CLANG_PLATFORM_X86_64_LINUX,
+ "/usr/bin/clang-3.3": CLANG_3_3 + CLANG_PLATFORM_X86_64_LINUX,
+ "/usr/bin/clang++-3.3": CLANGXX_3_3 + CLANG_PLATFORM_X86_64_LINUX,
+ }
+
+ GCC_4_7_RESULT = old_gcc_message("4.7.3")
+ GXX_4_7_RESULT = GCC_4_7_RESULT
+ GCC_4_9_RESULT = old_gcc_message("4.9.3")
+ GXX_4_9_RESULT = GCC_4_9_RESULT
+ GCC_5_RESULT = old_gcc_message("5.2.1")
+ GXX_5_RESULT = GCC_5_RESULT
+ GCC_6_RESULT = old_gcc_message("6.4.0")
+ GXX_6_RESULT = GCC_6_RESULT
+ GCC_7_RESULT = old_gcc_message("7.3.0")
+ GXX_7_RESULT = GCC_7_RESULT
+ GCC_8_RESULT = CompilerResult(
+ flags=["-std=gnu99"],
+ version="8.3.0",
+ type="gcc",
+ compiler="/usr/bin/gcc-8",
+ language="C",
+ )
+ GXX_8_RESULT = CompilerResult(
+ flags=["-std=gnu++17"],
+ version="8.3.0",
+ type="gcc",
+ compiler="/usr/bin/g++-8",
+ language="C++",
+ )
+ DEFAULT_GCC_RESULT = GCC_8_RESULT + {"compiler": "/usr/bin/gcc"}
+ DEFAULT_GXX_RESULT = GXX_8_RESULT + {"compiler": "/usr/bin/g++"}
+
+ CLANG_3_3_RESULT = (
+ "Only clang/llvm 7.0 or newer is supported (found version 3.3.0)."
+ )
+ CLANGXX_3_3_RESULT = (
+ "Only clang/llvm 7.0 or newer is supported (found version 3.3.0)."
+ )
+ CLANG_4_0_RESULT = (
+ "Only clang/llvm 7.0 or newer is supported (found version 4.0.2)."
+ )
+ CLANGXX_4_0_RESULT = (
+ "Only clang/llvm 7.0 or newer is supported (found version 4.0.2)."
+ )
+ CLANG_7_0_RESULT = CompilerResult(
+ flags=["-std=gnu99"],
+ version="7.0.0",
+ type="clang",
+ compiler="/usr/bin/clang-7.0",
+ language="C",
+ )
+ CLANGXX_7_0_RESULT = CompilerResult(
+ flags=["-std=gnu++17"],
+ version="7.0.0",
+ type="clang",
+ compiler="/usr/bin/clang++-7.0",
+ language="C++",
+ )
+ DEFAULT_CLANG_RESULT = CLANG_7_0_RESULT + {"compiler": "/usr/bin/clang"}
+ DEFAULT_CLANGXX_RESULT = CLANGXX_7_0_RESULT + {"compiler": "/usr/bin/clang++"}
+
+ def test_default(self):
+ # We'll try clang and gcc, and find clang first.
+ self.do_toolchain_test(
+ self.PATHS,
+ {
+ "c_compiler": self.DEFAULT_CLANG_RESULT,
+ "cxx_compiler": self.DEFAULT_CLANGXX_RESULT,
+ },
+ )
+
+ def test_gcc(self):
+ self.do_toolchain_test(
+ self.PATHS,
+ {
+ "c_compiler": self.DEFAULT_GCC_RESULT,
+ "cxx_compiler": self.DEFAULT_GXX_RESULT,
+ },
+ environ={"CC": "gcc", "CXX": "g++"},
+ )
+
+ def test_unsupported_gcc(self):
+ self.do_toolchain_test(
+ self.PATHS,
+ {"c_compiler": self.GCC_4_9_RESULT},
+ environ={"CC": "gcc-4.9", "CXX": "g++-4.9"},
+ )
+
+ # Maybe this should be reporting the mismatched version instead.
+ self.do_toolchain_test(
+ self.PATHS,
+ {
+ "c_compiler": self.DEFAULT_GCC_RESULT,
+ "cxx_compiler": self.GXX_4_9_RESULT,
+ },
+ environ={"CC": "gcc", "CXX": "g++-4.9"},
+ )
+
+ def test_overridden_gcc(self):
+ self.do_toolchain_test(
+ self.PATHS,
+ {"c_compiler": self.GCC_7_RESULT, "cxx_compiler": self.GXX_7_RESULT},
+ environ={"CC": "gcc-7", "CXX": "g++-7"},
+ )
+
+ def test_guess_cxx(self):
+ # When CXX is not set, we guess it from CC.
+ self.do_toolchain_test(
+ self.PATHS,
+ {"c_compiler": self.GCC_7_RESULT, "cxx_compiler": self.GXX_7_RESULT},
+ environ={"CC": "gcc-7"},
+ )
+
+ def test_mismatched_gcc(self):
+ self.do_toolchain_test(
+ self.PATHS,
+ {
+ "c_compiler": self.DEFAULT_GCC_RESULT,
+ "cxx_compiler": (
+ "The target C compiler is version 8.3.0, while the target "
+ "C++ compiler is version 10.2.1. Need to use the same compiler "
+ "version."
+ ),
+ },
+ environ={"CC": "gcc", "CXX": "g++-10"},
+ )
+
+ self.do_toolchain_test(
+ self.PATHS,
+ {
+ "c_compiler": self.DEFAULT_GCC_RESULT,
+ "cxx_compiler": self.DEFAULT_GXX_RESULT,
+ "host_c_compiler": self.DEFAULT_GCC_RESULT,
+ "host_cxx_compiler": (
+ "The host C compiler is version 8.3.0, while the host "
+ "C++ compiler is version 10.2.1. Need to use the same compiler "
+ "version."
+ ),
+ },
+ environ={"CC": "gcc", "HOST_CXX": "g++-10"},
+ )
+
+ def test_mismatched_compiler(self):
+ self.do_toolchain_test(
+ self.PATHS,
+ {
+ "c_compiler": self.DEFAULT_CLANG_RESULT,
+ "cxx_compiler": (
+ "The target C compiler is clang, while the target C++ compiler "
+ "is gcc. Need to use the same compiler suite."
+ ),
+ },
+ environ={"CXX": "g++"},
+ )
+
+ self.do_toolchain_test(
+ self.PATHS,
+ {
+ "c_compiler": self.DEFAULT_CLANG_RESULT,
+ "cxx_compiler": self.DEFAULT_CLANGXX_RESULT,
+ "host_c_compiler": self.DEFAULT_CLANG_RESULT,
+ "host_cxx_compiler": (
+ "The host C compiler is clang, while the host C++ compiler "
+ "is gcc. Need to use the same compiler suite."
+ ),
+ },
+ environ={"HOST_CXX": "g++"},
+ )
+
+ self.do_toolchain_test(
+ self.PATHS,
+ {
+ "c_compiler": "`%s` is not a C compiler."
+ % mozpath.abspath("/usr/bin/g++")
+ },
+ environ={"CC": "g++"},
+ )
+
+ self.do_toolchain_test(
+ self.PATHS,
+ {
+ "c_compiler": self.DEFAULT_CLANG_RESULT,
+ "cxx_compiler": "`%s` is not a C++ compiler."
+ % mozpath.abspath("/usr/bin/clang"),
+ },
+ environ={"CXX": "clang"},
+ )
+
+ def test_clang(self):
+ # We'll try gcc and clang, but since there is no gcc (gcc-x.y doesn't
+ # count), find clang.
+ paths = {
+ k: v
+ for k, v in six.iteritems(self.PATHS)
+ if os.path.basename(k) not in ("gcc", "g++")
+ }
+ self.do_toolchain_test(
+ paths,
+ {
+ "c_compiler": self.DEFAULT_CLANG_RESULT,
+ "cxx_compiler": self.DEFAULT_CLANGXX_RESULT,
+ },
+ )
+
+ def test_guess_cxx_clang(self):
+ # When CXX is not set, we guess it from CC.
+ self.do_toolchain_test(
+ self.PATHS,
+ {
+ "c_compiler": self.CLANG_7_0_RESULT,
+ "cxx_compiler": self.CLANGXX_7_0_RESULT,
+ },
+ environ={"CC": "clang-7.0"},
+ )
+
+ def test_unsupported_clang(self):
+ self.do_toolchain_test(
+ self.PATHS,
+ {
+ "c_compiler": self.CLANG_3_3_RESULT,
+ "cxx_compiler": self.CLANGXX_3_3_RESULT,
+ },
+ environ={"CC": "clang-3.3", "CXX": "clang++-3.3"},
+ )
+ self.do_toolchain_test(
+ self.PATHS,
+ {
+ "c_compiler": self.CLANG_4_0_RESULT,
+ "cxx_compiler": self.CLANGXX_4_0_RESULT,
+ },
+ environ={"CC": "clang-4.0", "CXX": "clang++-4.0"},
+ )
+
+ def test_no_supported_compiler(self):
+ # Even if there are gcc-x.y or clang-x.y compilers available, we
+ # don't try them. This could be considered something to improve.
+ paths = {
+ k: v
+ for k, v in six.iteritems(self.PATHS)
+ if os.path.basename(k) not in ("gcc", "g++", "clang", "clang++")
+ }
+ self.do_toolchain_test(
+ paths, {"c_compiler": "Cannot find the target C compiler"}
+ )
+
+ def test_absolute_path(self):
+ paths = dict(self.PATHS)
+ paths.update(
+ {
+ "/opt/clang/bin/clang": paths["/usr/bin/clang"],
+ "/opt/clang/bin/clang++": paths["/usr/bin/clang++"],
+ }
+ )
+ result = {
+ "c_compiler": self.DEFAULT_CLANG_RESULT
+ + {"compiler": "/opt/clang/bin/clang"},
+ "cxx_compiler": self.DEFAULT_CLANGXX_RESULT
+ + {"compiler": "/opt/clang/bin/clang++"},
+ }
+ self.do_toolchain_test(
+ paths,
+ result,
+ environ={"CC": "/opt/clang/bin/clang", "CXX": "/opt/clang/bin/clang++"},
+ )
+ # With CXX guess too.
+ self.do_toolchain_test(paths, result, environ={"CC": "/opt/clang/bin/clang"})
+
+ def test_atypical_name(self):
+ paths = dict(self.PATHS)
+ paths.update(
+ {
+ "/usr/bin/afl-clang-fast": paths["/usr/bin/clang"],
+ "/usr/bin/afl-clang-fast++": paths["/usr/bin/clang++"],
+ }
+ )
+ self.do_toolchain_test(
+ paths,
+ {
+ "c_compiler": self.DEFAULT_CLANG_RESULT
+ + {"compiler": "/usr/bin/afl-clang-fast"},
+ "cxx_compiler": self.DEFAULT_CLANGXX_RESULT
+ + {"compiler": "/usr/bin/afl-clang-fast++"},
+ },
+ environ={"CC": "afl-clang-fast", "CXX": "afl-clang-fast++"},
+ )
+
+ def test_mixed_compilers(self):
+ self.do_toolchain_test(
+ self.PATHS,
+ {
+ "c_compiler": self.DEFAULT_CLANG_RESULT,
+ "cxx_compiler": self.DEFAULT_CLANGXX_RESULT,
+ "host_c_compiler": self.DEFAULT_GCC_RESULT,
+ "host_cxx_compiler": self.DEFAULT_GXX_RESULT,
+ },
+ environ={"CC": "clang", "HOST_CC": "gcc"},
+ )
+
+ self.do_toolchain_test(
+ self.PATHS,
+ {
+ "c_compiler": self.DEFAULT_CLANG_RESULT,
+ "cxx_compiler": self.DEFAULT_CLANGXX_RESULT,
+ "host_c_compiler": self.DEFAULT_GCC_RESULT,
+ "host_cxx_compiler": self.DEFAULT_GXX_RESULT,
+ },
+ environ={"CC": "clang", "CXX": "clang++", "HOST_CC": "gcc"},
+ )
+
+
+class LinuxSimpleCrossToolchainTest(BaseToolchainTest):
+ TARGET = "i686-pc-linux-gnu"
+ PATHS = LinuxToolchainTest.PATHS
+ DEFAULT_GCC_RESULT = LinuxToolchainTest.DEFAULT_GCC_RESULT
+ DEFAULT_GXX_RESULT = LinuxToolchainTest.DEFAULT_GXX_RESULT
+ DEFAULT_CLANG_RESULT = LinuxToolchainTest.DEFAULT_CLANG_RESULT
+ DEFAULT_CLANGXX_RESULT = LinuxToolchainTest.DEFAULT_CLANGXX_RESULT
+
+ def test_cross_gcc(self):
+ self.do_toolchain_test(
+ self.PATHS,
+ {
+ "c_compiler": self.DEFAULT_GCC_RESULT + {"flags": ["-m32"]},
+ "cxx_compiler": self.DEFAULT_GXX_RESULT + {"flags": ["-m32"]},
+ "host_c_compiler": self.DEFAULT_GCC_RESULT,
+ "host_cxx_compiler": self.DEFAULT_GXX_RESULT,
+ },
+ environ={"CC": "gcc"},
+ )
+
+ def test_cross_clang(self):
+ self.do_toolchain_test(
+ self.PATHS,
+ {
+ "c_compiler": self.DEFAULT_CLANG_RESULT + {"flags": ["-m32"]},
+ "cxx_compiler": self.DEFAULT_CLANGXX_RESULT + {"flags": ["-m32"]},
+ "host_c_compiler": self.DEFAULT_CLANG_RESULT,
+ "host_cxx_compiler": self.DEFAULT_CLANGXX_RESULT,
+ },
+ )
+
+
+class LinuxX86_64CrossToolchainTest(BaseToolchainTest):
+ HOST = "i686-pc-linux-gnu"
+ TARGET = "x86_64-pc-linux-gnu"
+ PATHS = {
+ "/usr/bin/gcc": DEFAULT_GCC + GCC_PLATFORM_X86_LINUX,
+ "/usr/bin/g++": DEFAULT_GXX + GCC_PLATFORM_X86_LINUX,
+ "/usr/bin/clang": DEFAULT_CLANG + CLANG_PLATFORM_X86_LINUX,
+ "/usr/bin/clang++": DEFAULT_CLANGXX + CLANG_PLATFORM_X86_LINUX,
+ }
+ DEFAULT_GCC_RESULT = LinuxToolchainTest.DEFAULT_GCC_RESULT
+ DEFAULT_GXX_RESULT = LinuxToolchainTest.DEFAULT_GXX_RESULT
+ DEFAULT_CLANG_RESULT = LinuxToolchainTest.DEFAULT_CLANG_RESULT
+ DEFAULT_CLANGXX_RESULT = LinuxToolchainTest.DEFAULT_CLANGXX_RESULT
+
+ def test_cross_gcc(self):
+ self.do_toolchain_test(
+ self.PATHS,
+ {
+ "c_compiler": self.DEFAULT_GCC_RESULT + {"flags": ["-m64"]},
+ "cxx_compiler": self.DEFAULT_GXX_RESULT + {"flags": ["-m64"]},
+ "host_c_compiler": self.DEFAULT_GCC_RESULT,
+ "host_cxx_compiler": self.DEFAULT_GXX_RESULT,
+ },
+ environ={"CC": "gcc"},
+ )
+
+ def test_cross_clang(self):
+ self.do_toolchain_test(
+ self.PATHS,
+ {
+ "c_compiler": self.DEFAULT_CLANG_RESULT + {"flags": ["-m64"]},
+ "cxx_compiler": self.DEFAULT_CLANGXX_RESULT + {"flags": ["-m64"]},
+ "host_c_compiler": self.DEFAULT_CLANG_RESULT,
+ "host_cxx_compiler": self.DEFAULT_CLANGXX_RESULT,
+ },
+ )
+
+
+def xcrun(stdin, args):
+ if args == ("--show-sdk-path",):
+ return (
+ 0,
+ mozpath.join(os.path.abspath(os.path.dirname(__file__)), "macos_fake_sdk"),
+ "",
+ )
+ raise NotImplementedError()
+
+
+class OSXToolchainTest(BaseToolchainTest):
+ HOST = "x86_64-apple-darwin11.2.0"
+ PATHS = {
+ "/usr/bin/gcc-5": GCC_5 + GCC_PLATFORM_X86_64_OSX,
+ "/usr/bin/g++-5": GXX_5 + GCC_PLATFORM_X86_64_OSX,
+ "/usr/bin/gcc-8": GCC_8 + GCC_PLATFORM_X86_64_OSX,
+ "/usr/bin/g++-8": GXX_8 + GCC_PLATFORM_X86_64_OSX,
+ "/usr/bin/clang": XCODE_CLANG_7_0 + CLANG_PLATFORM_X86_64_OSX,
+ "/usr/bin/clang++": XCODE_CLANGXX_7_0 + CLANG_PLATFORM_X86_64_OSX,
+ "/usr/bin/clang-4.0": XCODE_CLANG_4_0 + CLANG_PLATFORM_X86_64_OSX,
+ "/usr/bin/clang++-4.0": XCODE_CLANGXX_4_0 + CLANG_PLATFORM_X86_64_OSX,
+ "/usr/bin/clang-3.3": XCODE_CLANG_3_3 + CLANG_PLATFORM_X86_64_OSX,
+ "/usr/bin/clang++-3.3": XCODE_CLANGXX_3_3 + CLANG_PLATFORM_X86_64_OSX,
+ "/usr/bin/xcrun": xcrun,
+ }
+ CLANG_3_3_RESULT = (
+ "Only clang/llvm 7.0 or newer is supported (found version 4.0.0.or.less)."
+ )
+ CLANGXX_3_3_RESULT = (
+ "Only clang/llvm 7.0 or newer is supported (found version 4.0.0.or.less)."
+ )
+ CLANG_4_0_RESULT = (
+ "Only clang/llvm 7.0 or newer is supported (found version 4.0.0.or.less)."
+ )
+ CLANGXX_4_0_RESULT = (
+ "Only clang/llvm 7.0 or newer is supported (found version 4.0.0.or.less)."
+ )
+ DEFAULT_CLANG_RESULT = CompilerResult(
+ flags=["-std=gnu99"],
+ version="7.0.0",
+ type="clang",
+ compiler="/usr/bin/clang",
+ language="C",
+ )
+ DEFAULT_CLANGXX_RESULT = CompilerResult(
+ flags=["-stdlib=libc++", "-std=gnu++17"],
+ version="7.0.0",
+ type="clang",
+ compiler="/usr/bin/clang++",
+ language="C++",
+ )
+ GCC_5_RESULT = LinuxToolchainTest.GCC_5_RESULT
+ GXX_5_RESULT = LinuxToolchainTest.GXX_5_RESULT
+ GCC_8_RESULT = LinuxToolchainTest.GCC_8_RESULT
+ GXX_8_RESULT = LinuxToolchainTest.GXX_8_RESULT
+ SYSROOT_FLAGS = {
+ "flags": PrependFlags(
+ [
+ "-isysroot",
+ xcrun("", ("--show-sdk-path",))[1],
+ "-mmacosx-version-min=10.12",
+ ]
+ )
+ }
+
+ def test_clang(self):
+ # We only try clang because gcc is known not to work.
+ self.do_toolchain_test(
+ self.PATHS,
+ {
+ "c_compiler": self.DEFAULT_CLANG_RESULT + self.SYSROOT_FLAGS,
+ "cxx_compiler": self.DEFAULT_CLANGXX_RESULT + self.SYSROOT_FLAGS,
+ },
+ )
+
+ def test_not_gcc(self):
+ # We won't pick GCC if it's the only thing available.
+ paths = {
+ k: v
+ for k, v in six.iteritems(self.PATHS)
+ if os.path.basename(k) not in ("clang", "clang++")
+ }
+ self.do_toolchain_test(
+ paths, {"c_compiler": "Cannot find the target C compiler"}
+ )
+
+ def test_unsupported_clang(self):
+ self.do_toolchain_test(
+ self.PATHS,
+ {
+ "c_compiler": self.CLANG_3_3_RESULT,
+ "cxx_compiler": self.CLANGXX_3_3_RESULT,
+ },
+ environ={"CC": "clang-3.3", "CXX": "clang++-3.3"},
+ )
+ # When targeting mac, we require at least version 5.
+ self.do_toolchain_test(
+ self.PATHS,
+ {
+ "c_compiler": self.CLANG_4_0_RESULT,
+ "cxx_compiler": self.CLANGXX_4_0_RESULT,
+ },
+ environ={"CC": "clang-4.0", "CXX": "clang++-4.0"},
+ )
+
+ def test_forced_gcc(self):
+ # GCC can still be forced if the user really wants it.
+ self.do_toolchain_test(
+ self.PATHS,
+ {
+ "c_compiler": self.GCC_8_RESULT + self.SYSROOT_FLAGS,
+ "cxx_compiler": self.GXX_8_RESULT + self.SYSROOT_FLAGS,
+ },
+ environ={"CC": "gcc-8", "CXX": "g++-8"},
+ )
+
+ def test_forced_unsupported_gcc(self):
+ self.do_toolchain_test(
+ self.PATHS,
+ {"c_compiler": self.GCC_5_RESULT},
+ environ={"CC": "gcc-5", "CXX": "g++-5"},
+ )
+
+
+class MingwToolchainTest(BaseToolchainTest):
+ HOST = "i686-pc-mingw32"
+
+ # For the purpose of this test, it doesn't matter that the paths are not
+ # real Windows paths.
+ PATHS = {
+ "/usr/bin/cl": VS_2017u8 + VS_PLATFORM_X86,
+ "/usr/bin/clang-cl-3.9": CLANG_CL_3_9 + CLANG_CL_PLATFORM_X86,
+ "/usr/bin/clang-cl": CLANG_CL_9_0 + CLANG_CL_PLATFORM_X86,
+ "/usr/bin/gcc": DEFAULT_GCC + GCC_PLATFORM_X86_WIN + MINGW32,
+ "/usr/bin/g++": DEFAULT_GXX + GCC_PLATFORM_X86_WIN + MINGW32,
+ "/usr/bin/gcc-4.9": GCC_4_9 + GCC_PLATFORM_X86_WIN + MINGW32,
+ "/usr/bin/g++-4.9": GXX_4_9 + GCC_PLATFORM_X86_WIN + MINGW32,
+ "/usr/bin/gcc-5": GCC_5 + GCC_PLATFORM_X86_WIN + MINGW32,
+ "/usr/bin/g++-5": GXX_5 + GCC_PLATFORM_X86_WIN + MINGW32,
+ "/usr/bin/gcc-6": GCC_6 + GCC_PLATFORM_X86_WIN + MINGW32,
+ "/usr/bin/g++-6": GXX_6 + GCC_PLATFORM_X86_WIN + MINGW32,
+ "/usr/bin/gcc-7": GCC_7 + GCC_PLATFORM_X86_WIN + MINGW32,
+ "/usr/bin/g++-7": GXX_7 + GCC_PLATFORM_X86_WIN + MINGW32,
+ "/usr/bin/clang": DEFAULT_CLANG + CLANG_PLATFORM_X86_WIN,
+ "/usr/bin/clang++": DEFAULT_CLANGXX + CLANG_PLATFORM_X86_WIN,
+ "/usr/bin/clang-7.0": CLANG_7_0 + CLANG_PLATFORM_X86_WIN,
+ "/usr/bin/clang++-7.0": CLANGXX_7_0 + CLANG_PLATFORM_X86_WIN,
+ "/usr/bin/clang-4.0": CLANG_4_0 + CLANG_PLATFORM_X86_WIN,
+ "/usr/bin/clang++-4.0": CLANGXX_4_0 + CLANG_PLATFORM_X86_WIN,
+ "/usr/bin/clang-3.3": CLANG_3_3 + CLANG_PLATFORM_X86_WIN,
+ "/usr/bin/clang++-3.3": CLANGXX_3_3 + CLANG_PLATFORM_X86_WIN,
+ }
+
+ CLANG_CL_3_9_RESULT = (
+ "Only clang-cl 9.0 or newer is supported (found version 3.9.0)"
+ )
+ CLANG_CL_9_0_RESULT = CompilerResult(
+ version="9.0.0",
+ flags=["-Xclang", "-std=gnu99"],
+ type="clang-cl",
+ compiler="/usr/bin/clang-cl",
+ language="C",
+ )
+ CLANGXX_CL_3_9_RESULT = (
+ "Only clang-cl 9.0 or newer is supported (found version 3.9.0)"
+ )
+ CLANGXX_CL_9_0_RESULT = CompilerResult(
+ version="9.0.0",
+ flags=["-Xclang", "-std=c++17"],
+ type="clang-cl",
+ compiler="/usr/bin/clang-cl",
+ language="C++",
+ )
+ CLANG_3_3_RESULT = LinuxToolchainTest.CLANG_3_3_RESULT
+ CLANGXX_3_3_RESULT = LinuxToolchainTest.CLANGXX_3_3_RESULT
+ CLANG_4_0_RESULT = LinuxToolchainTest.CLANG_4_0_RESULT
+ CLANGXX_4_0_RESULT = LinuxToolchainTest.CLANGXX_4_0_RESULT
+ DEFAULT_CLANG_RESULT = LinuxToolchainTest.DEFAULT_CLANG_RESULT
+ DEFAULT_CLANGXX_RESULT = LinuxToolchainTest.DEFAULT_CLANGXX_RESULT
+
+ def test_unsupported_msvc(self):
+ self.do_toolchain_test(
+ self.PATHS,
+ {"c_compiler": "Unknown compiler or compiler not supported."},
+ environ={"CC": "/usr/bin/cl"},
+ )
+
+ def test_unsupported_clang_cl(self):
+ self.do_toolchain_test(
+ self.PATHS,
+ {"c_compiler": self.CLANG_CL_3_9_RESULT},
+ environ={"CC": "/usr/bin/clang-cl-3.9"},
+ )
+
+ def test_clang_cl(self):
+ self.do_toolchain_test(
+ self.PATHS,
+ {
+ "c_compiler": self.CLANG_CL_9_0_RESULT,
+ "cxx_compiler": self.CLANGXX_CL_9_0_RESULT,
+ },
+ )
+
+ def test_gcc(self):
+ # GCC is unsupported, if you try it should find clang.
+ paths = {
+ k: v
+ for k, v in six.iteritems(self.PATHS)
+ if os.path.basename(k) != "clang-cl"
+ }
+ self.do_toolchain_test(
+ paths,
+ {
+ "c_compiler": self.DEFAULT_CLANG_RESULT,
+ "cxx_compiler": self.DEFAULT_CLANGXX_RESULT,
+ },
+ )
+
+ # This test is not perfect, as the GCC version needs to be updated when we
+ # bump the minimum GCC version, but the idea is that even supported GCC
+ # on other platforms should not be supported on Windows.
+ def test_overridden_supported_elsewhere_gcc(self):
+ self.do_toolchain_test(
+ self.PATHS,
+ {"c_compiler": "Unknown compiler or compiler not supported."},
+ environ={"CC": "gcc-7", "CXX": "g++-7"},
+ )
+
+ def test_overridden_unsupported_gcc(self):
+ self.do_toolchain_test(
+ self.PATHS,
+ {"c_compiler": "Unknown compiler or compiler not supported."},
+ environ={"CC": "gcc-5", "CXX": "g++-5"},
+ )
+
+ def test_clang(self):
+ # We'll pick clang if nothing else is found.
+ paths = {
+ k: v
+ for k, v in six.iteritems(self.PATHS)
+ if os.path.basename(k) not in ("clang-cl", "gcc")
+ }
+ self.do_toolchain_test(
+ paths,
+ {
+ "c_compiler": self.DEFAULT_CLANG_RESULT,
+ "cxx_compiler": self.DEFAULT_CLANGXX_RESULT,
+ },
+ )
+
+ def test_overridden_unsupported_clang(self):
+ # clang 3.3 C compiler is perfectly fine, but we need more for C++.
+ self.do_toolchain_test(
+ self.PATHS,
+ {
+ "c_compiler": self.CLANG_3_3_RESULT,
+ "cxx_compiler": self.CLANGXX_3_3_RESULT,
+ },
+ environ={"CC": "clang-3.3", "CXX": "clang++-3.3"},
+ )
+
+
+class Mingw64ToolchainTest(MingwToolchainTest):
+ HOST = "x86_64-pc-mingw32"
+
+ # For the purpose of this test, it doesn't matter that the paths are not
+ # real Windows paths.
+ PATHS = {
+ "/usr/bin/cl": VS_2017u8 + VS_PLATFORM_X86_64,
+ "/usr/bin/clang-cl": CLANG_CL_9_0 + CLANG_CL_PLATFORM_X86_64,
+ "/usr/bin/clang-cl-3.9": CLANG_CL_3_9 + CLANG_CL_PLATFORM_X86_64,
+ "/usr/bin/gcc": DEFAULT_GCC + GCC_PLATFORM_X86_64_WIN + MINGW32,
+ "/usr/bin/g++": DEFAULT_GXX + GCC_PLATFORM_X86_64_WIN + MINGW32,
+ "/usr/bin/gcc-4.9": GCC_4_9 + GCC_PLATFORM_X86_64_WIN + MINGW32,
+ "/usr/bin/g++-4.9": GXX_4_9 + GCC_PLATFORM_X86_64_WIN + MINGW32,
+ "/usr/bin/gcc-5": GCC_5 + GCC_PLATFORM_X86_64_WIN + MINGW32,
+ "/usr/bin/g++-5": GXX_5 + GCC_PLATFORM_X86_64_WIN + MINGW32,
+ "/usr/bin/gcc-6": GCC_6 + GCC_PLATFORM_X86_64_WIN + MINGW32,
+ "/usr/bin/g++-6": GXX_6 + GCC_PLATFORM_X86_64_WIN + MINGW32,
+ "/usr/bin/gcc-7": GCC_7 + GCC_PLATFORM_X86_64_WIN + MINGW32,
+ "/usr/bin/g++-7": GXX_7 + GCC_PLATFORM_X86_64_WIN + MINGW32,
+ "/usr/bin/clang": DEFAULT_CLANG + CLANG_PLATFORM_X86_64_WIN,
+ "/usr/bin/clang++": DEFAULT_CLANGXX + CLANG_PLATFORM_X86_64_WIN,
+ "/usr/bin/clang-7.0": CLANG_7_0 + CLANG_PLATFORM_X86_64_WIN,
+ "/usr/bin/clang++-7.0": CLANGXX_7_0 + CLANG_PLATFORM_X86_64_WIN,
+ "/usr/bin/clang-4.0": CLANG_4_0 + CLANG_PLATFORM_X86_64_WIN,
+ "/usr/bin/clang++-4.0": CLANGXX_4_0 + CLANG_PLATFORM_X86_64_WIN,
+ "/usr/bin/clang-3.3": CLANG_3_3 + CLANG_PLATFORM_X86_64_WIN,
+ "/usr/bin/clang++-3.3": CLANGXX_3_3 + CLANG_PLATFORM_X86_64_WIN,
+ }
+
+
+class WindowsToolchainTest(BaseToolchainTest):
+ HOST = "i686-pc-windows-msvc"
+
+ PATHS = MingwToolchainTest.PATHS
+
+ def test_unsupported_msvc(self):
+ self.do_toolchain_test(
+ self.PATHS,
+ {"c_compiler": "Unknown compiler or compiler not supported."},
+ environ={"CC": "/usr/bin/cl"},
+ )
+
+ def test_unsupported_clang_cl(self):
+ self.do_toolchain_test(
+ self.PATHS,
+ {"c_compiler": MingwToolchainTest.CLANG_CL_3_9_RESULT},
+ environ={"CC": "/usr/bin/clang-cl-3.9"},
+ )
+
+ def test_clang_cl(self):
+ self.do_toolchain_test(
+ self.PATHS,
+ {
+ "c_compiler": MingwToolchainTest.CLANG_CL_9_0_RESULT,
+ "cxx_compiler": MingwToolchainTest.CLANGXX_CL_9_0_RESULT,
+ },
+ )
+
+ def test_unsupported_gcc(self):
+ paths = {
+ k: v
+ for k, v in six.iteritems(self.PATHS)
+ if os.path.basename(k) != "clang-cl"
+ }
+ self.do_toolchain_test(
+ paths,
+ {"c_compiler": "Cannot find the target C compiler"},
+ )
+
+ def test_overridden_unsupported_gcc(self):
+ self.do_toolchain_test(
+ self.PATHS,
+ {"c_compiler": "Unknown compiler or compiler not supported."},
+ environ={"CC": "gcc-5", "CXX": "g++-5"},
+ )
+
+ def test_unsupported_clang(self):
+ paths = {
+ k: v
+ for k, v in six.iteritems(self.PATHS)
+ if os.path.basename(k) not in ("clang-cl", "gcc")
+ }
+ self.do_toolchain_test(
+ paths,
+ {"c_compiler": "Cannot find the target C compiler"},
+ )
+
+ def test_overridden_unsupported_clang(self):
+ self.do_toolchain_test(
+ self.PATHS,
+ {"c_compiler": "Unknown compiler or compiler not supported."},
+ environ={"CC": "clang-3.3", "CXX": "clang++-3.3"},
+ )
+
+
+class Windows64ToolchainTest(WindowsToolchainTest):
+ HOST = "x86_64-pc-windows-msvc"
+
+ PATHS = Mingw64ToolchainTest.PATHS
+
+
+class WindowsGnuToolchainTest(BaseToolchainTest):
+ HOST = "i686-pc-windows-gnu"
+
+ PATHS = MingwToolchainTest.PATHS
+
+ def test_unsupported_msvc(self):
+ self.do_toolchain_test(
+ self.PATHS,
+ {"c_compiler": "Unknown compiler or compiler not supported."},
+ environ={"CC": "/usr/bin/cl"},
+ )
+
+ def test_unsupported_clang_cl(self):
+ paths = {
+ k: v
+ for k, v in six.iteritems(self.PATHS)
+ if os.path.basename(k) == "clang-cl"
+ }
+ self.do_toolchain_test(
+ paths,
+ {"c_compiler": "Cannot find the target C compiler"},
+ )
+
+ def test_overridden_unsupported_clang_cl(self):
+ self.do_toolchain_test(
+ self.PATHS,
+ {"c_compiler": "Unknown compiler or compiler not supported."},
+ environ={"CC": "clang-cl", "CXX": "clang-cl"},
+ )
+
+ def test_unsupported_gcc(self):
+ paths = {
+ k: v for k, v in six.iteritems(self.PATHS) if os.path.basename(k) == "gcc"
+ }
+ self.do_toolchain_test(
+ paths,
+ {"c_compiler": "Cannot find the target C compiler"},
+ )
+
+ def test_overridden_unsupported_gcc(self):
+ self.do_toolchain_test(
+ self.PATHS,
+ {"c_compiler": "Unknown compiler or compiler not supported."},
+ environ={"CC": "gcc-5", "CXX": "g++-5"},
+ )
+
+ def test_clang(self):
+ paths = {
+ k: v
+ for k, v in six.iteritems(self.PATHS)
+ if os.path.basename(k) not in ("clang-cl", "gcc")
+ }
+ self.do_toolchain_test(
+ paths,
+ {
+ "c_compiler": MingwToolchainTest.DEFAULT_CLANG_RESULT,
+ "cxx_compiler": MingwToolchainTest.DEFAULT_CLANGXX_RESULT,
+ },
+ )
+
+ def test_overridden_unsupported_clang(self):
+ self.do_toolchain_test(
+ self.PATHS,
+ {
+ "c_compiler": MingwToolchainTest.CLANG_3_3_RESULT,
+ "cxx_compiler": MingwToolchainTest.CLANGXX_3_3_RESULT,
+ },
+ environ={"CC": "clang-3.3", "CXX": "clang++-3.3"},
+ )
+
+
+class WindowsGnu64ToolchainTest(WindowsGnuToolchainTest):
+ HOST = "x86_64-pc-windows-gnu"
+
+ PATHS = Mingw64ToolchainTest.PATHS
+
+
+class LinuxCrossCompileToolchainTest(BaseToolchainTest):
+ TARGET = "arm-unknown-linux-gnu"
+ PATHS = {
+ "/usr/bin/arm-linux-gnu-gcc-4.9": GCC_4_9 + GCC_PLATFORM_ARM_LINUX,
+ "/usr/bin/arm-linux-gnu-g++-4.9": GXX_4_9 + GCC_PLATFORM_ARM_LINUX,
+ "/usr/bin/arm-linux-gnu-gcc-5": GCC_5 + GCC_PLATFORM_ARM_LINUX,
+ "/usr/bin/arm-linux-gnu-g++-5": GXX_5 + GCC_PLATFORM_ARM_LINUX,
+ "/usr/bin/arm-linux-gnu-gcc": DEFAULT_GCC + GCC_PLATFORM_ARM_LINUX,
+ "/usr/bin/arm-linux-gnu-g++": DEFAULT_GXX + GCC_PLATFORM_ARM_LINUX,
+ "/usr/bin/arm-linux-gnu-gcc-7": GCC_7 + GCC_PLATFORM_ARM_LINUX,
+ "/usr/bin/arm-linux-gnu-g++-7": GXX_7 + GCC_PLATFORM_ARM_LINUX,
+ }
+ PATHS.update(LinuxToolchainTest.PATHS)
+ ARM_GCC_4_9_RESULT = LinuxToolchainTest.GCC_4_9_RESULT
+ ARM_GXX_4_9_RESULT = LinuxToolchainTest.GXX_4_9_RESULT
+ ARM_GCC_5_RESULT = LinuxToolchainTest.GCC_5_RESULT
+ ARM_GXX_5_RESULT = LinuxToolchainTest.GXX_5_RESULT
+ ARM_DEFAULT_GCC_RESULT = LinuxToolchainTest.DEFAULT_GCC_RESULT + {
+ "compiler": "/usr/bin/arm-linux-gnu-gcc"
+ }
+ ARM_DEFAULT_GXX_RESULT = LinuxToolchainTest.DEFAULT_GXX_RESULT + {
+ "compiler": "/usr/bin/arm-linux-gnu-g++"
+ }
+ ARM_GCC_7_RESULT = LinuxToolchainTest.GCC_7_RESULT
+ ARM_GXX_7_RESULT = LinuxToolchainTest.GXX_7_RESULT
+ DEFAULT_CLANG_RESULT = LinuxToolchainTest.DEFAULT_CLANG_RESULT
+ DEFAULT_CLANGXX_RESULT = LinuxToolchainTest.DEFAULT_CLANGXX_RESULT
+ DEFAULT_GCC_RESULT = LinuxToolchainTest.DEFAULT_GCC_RESULT
+ DEFAULT_GXX_RESULT = LinuxToolchainTest.DEFAULT_GXX_RESULT
+
+ little_endian = FakeCompiler(GCC_PLATFORM_LINUX, GCC_PLATFORM_LITTLE_ENDIAN)
+ big_endian = FakeCompiler(GCC_PLATFORM_LINUX, GCC_PLATFORM_BIG_ENDIAN)
+
+ PLATFORMS = {
+ "i686-pc-linux-gnu": GCC_PLATFORM_X86_LINUX,
+ "x86_64-pc-linux-gnu": GCC_PLATFORM_X86_64_LINUX,
+ "arm-unknown-linux-gnu": GCC_PLATFORM_ARM_LINUX,
+ "aarch64-unknown-linux-gnu": little_endian + {"__aarch64__": 1},
+ "ia64-unknown-linux-gnu": little_endian + {"__ia64__": 1},
+ "s390x-unknown-linux-gnu": big_endian + {"__s390x__": 1, "__s390__": 1},
+ "s390-unknown-linux-gnu": big_endian + {"__s390__": 1},
+ "powerpc64-unknown-linux-gnu": big_endian
+ + {
+ None: {"__powerpc64__": 1, "__powerpc__": 1},
+ "-m32": {"__powerpc64__": False},
+ },
+ "powerpc-unknown-linux-gnu": big_endian
+ + {None: {"__powerpc__": 1}, "-m64": {"__powerpc64__": 1}},
+ "alpha-unknown-linux-gnu": little_endian + {"__alpha__": 1},
+ "hppa-unknown-linux-gnu": big_endian + {"__hppa__": 1},
+ "sparc64-unknown-linux-gnu": big_endian
+ + {None: {"__arch64__": 1, "__sparc__": 1}, "-m32": {"__arch64__": False}},
+ "sparc-unknown-linux-gnu": big_endian
+ + {None: {"__sparc__": 1}, "-m64": {"__arch64__": 1}},
+ "m68k-unknown-linux-gnu": big_endian + {"__m68k__": 1},
+ "mips64-unknown-linux-gnuabi64": big_endian + {"__mips64": 1, "__mips__": 1},
+ "mips-unknown-linux-gnu": big_endian + {"__mips__": 1},
+ "riscv64-unknown-linux-gnu": little_endian + {"__riscv": 1, "__riscv_xlen": 64},
+ "sh4-unknown-linux-gnu": little_endian + {"__sh__": 1},
+ }
+
+ PLATFORMS["powerpc64le-unknown-linux-gnu"] = (
+ PLATFORMS["powerpc64-unknown-linux-gnu"] + GCC_PLATFORM_LITTLE_ENDIAN
+ )
+ PLATFORMS["mips64el-unknown-linux-gnuabi64"] = (
+ PLATFORMS["mips64-unknown-linux-gnuabi64"] + GCC_PLATFORM_LITTLE_ENDIAN
+ )
+ PLATFORMS["mipsel-unknown-linux-gnu"] = (
+ PLATFORMS["mips-unknown-linux-gnu"] + GCC_PLATFORM_LITTLE_ENDIAN
+ )
+
+ def do_test_cross_gcc_32_64(self, host, target):
+ self.HOST = host
+ self.TARGET = target
+ paths = {
+ "/usr/bin/gcc": DEFAULT_GCC + self.PLATFORMS[host],
+ "/usr/bin/g++": DEFAULT_GXX + self.PLATFORMS[host],
+ }
+ cross_flags = {"flags": ["-m64" if "64" in target else "-m32"]}
+ self.do_toolchain_test(
+ paths,
+ {
+ "c_compiler": self.DEFAULT_GCC_RESULT + cross_flags,
+ "cxx_compiler": self.DEFAULT_GXX_RESULT + cross_flags,
+ "host_c_compiler": self.DEFAULT_GCC_RESULT,
+ "host_cxx_compiler": self.DEFAULT_GXX_RESULT,
+ },
+ )
+ self.HOST = LinuxCrossCompileToolchainTest.HOST
+ self.TARGET = LinuxCrossCompileToolchainTest.TARGET
+
+ def test_cross_x86_x64(self):
+ self.do_test_cross_gcc_32_64("i686-pc-linux-gnu", "x86_64-pc-linux-gnu")
+ self.do_test_cross_gcc_32_64("x86_64-pc-linux-gnu", "i686-pc-linux-gnu")
+
+ def test_cross_sparc_sparc64(self):
+ self.do_test_cross_gcc_32_64(
+ "sparc-unknown-linux-gnu", "sparc64-unknown-linux-gnu"
+ )
+ self.do_test_cross_gcc_32_64(
+ "sparc64-unknown-linux-gnu", "sparc-unknown-linux-gnu"
+ )
+
+ def test_cross_ppc_ppc64(self):
+ self.do_test_cross_gcc_32_64(
+ "powerpc-unknown-linux-gnu", "powerpc64-unknown-linux-gnu"
+ )
+ self.do_test_cross_gcc_32_64(
+ "powerpc64-unknown-linux-gnu", "powerpc-unknown-linux-gnu"
+ )
+
+ def do_test_cross_gcc(self, host, target):
+ self.HOST = host
+ self.TARGET = target
+ host_cpu = host.split("-")[0]
+ cpu, manufacturer, os = target.split("-", 2)
+ toolchain_prefix = "/usr/bin/%s-%s" % (cpu, os)
+ paths = {
+ "/usr/bin/gcc": DEFAULT_GCC + self.PLATFORMS[host],
+ "/usr/bin/g++": DEFAULT_GXX + self.PLATFORMS[host],
+ }
+ self.do_toolchain_test(
+ paths,
+ {
+ "c_compiler": (
+ "Target C compiler target CPU (%s) "
+ "does not match --target CPU (%s)" % (host_cpu, cpu)
+ )
+ },
+ )
+
+ paths.update(
+ {
+ "%s-gcc" % toolchain_prefix: DEFAULT_GCC + self.PLATFORMS[target],
+ "%s-g++" % toolchain_prefix: DEFAULT_GXX + self.PLATFORMS[target],
+ }
+ )
+ self.do_toolchain_test(
+ paths,
+ {
+ "c_compiler": self.DEFAULT_GCC_RESULT
+ + {"compiler": "%s-gcc" % toolchain_prefix},
+ "cxx_compiler": self.DEFAULT_GXX_RESULT
+ + {"compiler": "%s-g++" % toolchain_prefix},
+ "host_c_compiler": self.DEFAULT_GCC_RESULT,
+ "host_cxx_compiler": self.DEFAULT_GXX_RESULT,
+ },
+ )
+ self.HOST = LinuxCrossCompileToolchainTest.HOST
+ self.TARGET = LinuxCrossCompileToolchainTest.TARGET
+
+ def test_cross_gcc_misc(self):
+ for target in self.PLATFORMS:
+ if not target.endswith("-pc-linux-gnu"):
+ self.do_test_cross_gcc("x86_64-pc-linux-gnu", target)
+
+ def test_cannot_cross(self):
+ self.TARGET = "mipsel-unknown-linux-gnu"
+
+ paths = {
+ "/usr/bin/gcc": DEFAULT_GCC + self.PLATFORMS["mips-unknown-linux-gnu"],
+ "/usr/bin/g++": DEFAULT_GXX + self.PLATFORMS["mips-unknown-linux-gnu"],
+ }
+ self.do_toolchain_test(
+ paths,
+ {
+ "c_compiler": (
+ "Target C compiler target endianness (big) "
+ "does not match --target endianness (little)"
+ )
+ },
+ )
+ self.TARGET = LinuxCrossCompileToolchainTest.TARGET
+
+ def test_overridden_cross_gcc(self):
+ self.do_toolchain_test(
+ self.PATHS,
+ {
+ "c_compiler": self.ARM_GCC_7_RESULT,
+ "cxx_compiler": self.ARM_GXX_7_RESULT,
+ "host_c_compiler": self.DEFAULT_GCC_RESULT,
+ "host_cxx_compiler": self.DEFAULT_GXX_RESULT,
+ },
+ environ={"CC": "arm-linux-gnu-gcc-7", "CXX": "arm-linux-gnu-g++-7"},
+ )
+
+ def test_overridden_unsupported_cross_gcc(self):
+ self.do_toolchain_test(
+ self.PATHS,
+ {"c_compiler": self.ARM_GCC_4_9_RESULT},
+ environ={"CC": "arm-linux-gnu-gcc-4.9", "CXX": "arm-linux-gnu-g++-4.9"},
+ )
+
+ def test_guess_cross_cxx(self):
+ # When CXX is not set, we guess it from CC.
+ self.do_toolchain_test(
+ self.PATHS,
+ {
+ "c_compiler": self.ARM_GCC_7_RESULT,
+ "cxx_compiler": self.ARM_GXX_7_RESULT,
+ "host_c_compiler": self.DEFAULT_GCC_RESULT,
+ "host_cxx_compiler": self.DEFAULT_GXX_RESULT,
+ },
+ environ={"CC": "arm-linux-gnu-gcc-7"},
+ )
+
+ self.do_toolchain_test(
+ self.PATHS,
+ {
+ "c_compiler": self.ARM_DEFAULT_GCC_RESULT,
+ "cxx_compiler": self.ARM_DEFAULT_GXX_RESULT,
+ "host_c_compiler": self.DEFAULT_CLANG_RESULT,
+ "host_cxx_compiler": self.DEFAULT_CLANGXX_RESULT,
+ },
+ environ={"CC": "arm-linux-gnu-gcc", "HOST_CC": "clang"},
+ )
+
+ self.do_toolchain_test(
+ self.PATHS,
+ {
+ "c_compiler": self.ARM_DEFAULT_GCC_RESULT,
+ "cxx_compiler": self.ARM_DEFAULT_GXX_RESULT,
+ "host_c_compiler": self.DEFAULT_CLANG_RESULT,
+ "host_cxx_compiler": self.DEFAULT_CLANGXX_RESULT,
+ },
+ environ={
+ "CC": "arm-linux-gnu-gcc",
+ "CXX": "arm-linux-gnu-g++",
+ "HOST_CC": "clang",
+ },
+ )
+
+ def test_cross_clang(self):
+ cross_clang_result = self.DEFAULT_CLANG_RESULT + {
+ "flags": ["--target=arm-linux-gnu"]
+ }
+ cross_clangxx_result = self.DEFAULT_CLANGXX_RESULT + {
+ "flags": ["--target=arm-linux-gnu"]
+ }
+ self.do_toolchain_test(
+ self.PATHS,
+ {
+ "c_compiler": cross_clang_result,
+ "cxx_compiler": cross_clangxx_result,
+ "host_c_compiler": self.DEFAULT_CLANG_RESULT,
+ "host_cxx_compiler": self.DEFAULT_CLANGXX_RESULT,
+ },
+ environ={"CC": "clang", "HOST_CC": "clang"},
+ )
+
+ self.do_toolchain_test(
+ self.PATHS,
+ {
+ "c_compiler": cross_clang_result,
+ "cxx_compiler": cross_clangxx_result,
+ "host_c_compiler": self.DEFAULT_CLANG_RESULT,
+ "host_cxx_compiler": self.DEFAULT_CLANGXX_RESULT,
+ },
+ environ={"CC": "clang"},
+ )
+
+ def test_cross_atypical_clang(self):
+ paths = dict(self.PATHS)
+ paths.update(
+ {
+ "/usr/bin/afl-clang-fast": paths["/usr/bin/clang"],
+ "/usr/bin/afl-clang-fast++": paths["/usr/bin/clang++"],
+ }
+ )
+ afl_clang_result = self.DEFAULT_CLANG_RESULT + {
+ "compiler": "/usr/bin/afl-clang-fast"
+ }
+ afl_clangxx_result = self.DEFAULT_CLANGXX_RESULT + {
+ "compiler": "/usr/bin/afl-clang-fast++"
+ }
+ self.do_toolchain_test(
+ paths,
+ {
+ "c_compiler": afl_clang_result + {"flags": ["--target=arm-linux-gnu"]},
+ "cxx_compiler": afl_clangxx_result
+ + {"flags": ["--target=arm-linux-gnu"]},
+ "host_c_compiler": afl_clang_result,
+ "host_cxx_compiler": afl_clangxx_result,
+ },
+ environ={"CC": "afl-clang-fast", "CXX": "afl-clang-fast++"},
+ )
+
+
+class OSXCrossToolchainTest(BaseToolchainTest):
+ TARGET = "i686-apple-darwin11.2.0"
+ PATHS = dict(LinuxToolchainTest.PATHS)
+ PATHS.update(
+ {
+ "/usr/bin/clang": CLANG_7_0 + CLANG_PLATFORM_X86_64_LINUX,
+ "/usr/bin/clang++": CLANGXX_7_0 + CLANG_PLATFORM_X86_64_LINUX,
+ }
+ )
+ DEFAULT_CLANG_RESULT = CompilerResult(
+ flags=["-std=gnu99"],
+ version="7.0.0",
+ type="clang",
+ compiler="/usr/bin/clang",
+ language="C",
+ )
+ DEFAULT_CLANGXX_RESULT = CompilerResult(
+ flags=["-std=gnu++17"],
+ version="7.0.0",
+ type="clang",
+ compiler="/usr/bin/clang++",
+ language="C++",
+ )
+
+ def test_osx_cross(self):
+ self.do_toolchain_test(
+ self.PATHS,
+ {
+ "c_compiler": self.DEFAULT_CLANG_RESULT
+ + OSXToolchainTest.SYSROOT_FLAGS
+ + {"flags": ["--target=i686-apple-darwin11.2.0"]},
+ "cxx_compiler": self.DEFAULT_CLANGXX_RESULT
+ + {"flags": PrependFlags(["-stdlib=libc++"])}
+ + OSXToolchainTest.SYSROOT_FLAGS
+ + {"flags": ["--target=i686-apple-darwin11.2.0"]},
+ "host_c_compiler": self.DEFAULT_CLANG_RESULT,
+ "host_cxx_compiler": self.DEFAULT_CLANGXX_RESULT,
+ },
+ environ={"CC": "clang"},
+ args=["--with-macos-sdk=%s" % OSXToolchainTest.SYSROOT_FLAGS["flags"][1]],
+ )
+
+ def test_cannot_osx_cross(self):
+ self.do_toolchain_test(
+ self.PATHS,
+ {
+ "c_compiler": "Target C compiler target kernel (Linux) does not "
+ "match --target kernel (Darwin)"
+ },
+ environ={"CC": "gcc"},
+ args=["--with-macos-sdk=%s" % OSXToolchainTest.SYSROOT_FLAGS["flags"][1]],
+ )
+
+
+class WindowsCrossToolchainTest(BaseToolchainTest):
+ TARGET = "x86_64-pc-windows-msvc"
+ DEFAULT_CLANG_RESULT = LinuxToolchainTest.DEFAULT_CLANG_RESULT
+ DEFAULT_CLANGXX_RESULT = LinuxToolchainTest.DEFAULT_CLANGXX_RESULT
+
+ def test_clang_cl_cross(self):
+ paths = {"/usr/bin/clang-cl": CLANG_CL_9_0 + CLANG_CL_PLATFORM_X86_64}
+ paths.update(LinuxToolchainTest.PATHS)
+ self.do_toolchain_test(
+ paths,
+ {
+ "c_compiler": MingwToolchainTest.CLANG_CL_9_0_RESULT,
+ "cxx_compiler": MingwToolchainTest.CLANGXX_CL_9_0_RESULT,
+ "host_c_compiler": self.DEFAULT_CLANG_RESULT,
+ "host_cxx_compiler": self.DEFAULT_CLANGXX_RESULT,
+ },
+ )
+
+
+class OpenBSDToolchainTest(BaseToolchainTest):
+ HOST = "x86_64-unknown-openbsd6.1"
+ TARGET = "x86_64-unknown-openbsd6.1"
+ PATHS = {
+ "/usr/bin/gcc": DEFAULT_GCC + GCC_PLATFORM_X86_64 + GCC_PLATFORM_OPENBSD,
+ "/usr/bin/g++": DEFAULT_GXX + GCC_PLATFORM_X86_64 + GCC_PLATFORM_OPENBSD,
+ }
+ DEFAULT_GCC_RESULT = LinuxToolchainTest.DEFAULT_GCC_RESULT
+ DEFAULT_GXX_RESULT = LinuxToolchainTest.DEFAULT_GXX_RESULT
+
+ def test_gcc(self):
+ self.do_toolchain_test(
+ self.PATHS,
+ {
+ "c_compiler": self.DEFAULT_GCC_RESULT,
+ "cxx_compiler": self.DEFAULT_GXX_RESULT,
+ },
+ )
+
+
+@memoize
+def gen_invoke_cargo(version, rustup_wrapper=False):
+ def invoke_cargo(stdin, args):
+ args = tuple(args)
+ if not rustup_wrapper and args == ("+stable",):
+ return (101, "", "we are the real thing")
+ if args == ("--version", "--verbose"):
+ return 0, "cargo %s\nrelease: %s" % (version, version), ""
+ raise NotImplementedError("unsupported arguments")
+
+ return invoke_cargo
+
+
+@memoize
+def gen_invoke_rustc(version, rustup_wrapper=False):
+ def invoke_rustc(stdin, args):
+ args = tuple(args)
+ # TODO: we don't have enough machinery set up to test the `rustup which`
+ # fallback yet.
+ if not rustup_wrapper and args == ("+stable",):
+ return (1, "", "error: couldn't read +stable: No such file or directory")
+ if args == ("--version", "--verbose"):
+ return (
+ 0,
+ "rustc %s\nrelease: %s\nhost: x86_64-unknown-linux-gnu"
+ % (version, version),
+ "",
+ )
+ if args == ("--print", "target-list"):
+ # Raw list returned by rustc version 1.32, + ios, which somehow
+ # don't appear in the default list.
+ # https://github.com/rust-lang/rust/issues/36156
+ rust_targets = [
+ "aarch64-apple-ios",
+ "aarch64-fuchsia",
+ "aarch64-linux-android",
+ "aarch64-pc-windows-msvc",
+ "aarch64-unknown-cloudabi",
+ "aarch64-unknown-freebsd",
+ "aarch64-unknown-hermit",
+ "aarch64-unknown-linux-gnu",
+ "aarch64-unknown-linux-musl",
+ "aarch64-unknown-netbsd",
+ "aarch64-unknown-none",
+ "aarch64-unknown-openbsd",
+ "arm-linux-androideabi",
+ "arm-unknown-linux-gnueabi",
+ "arm-unknown-linux-gnueabihf",
+ "arm-unknown-linux-musleabi",
+ "arm-unknown-linux-musleabihf",
+ "armebv7r-none-eabi",
+ "armebv7r-none-eabihf",
+ "armv4t-unknown-linux-gnueabi",
+ "armv5te-unknown-linux-gnueabi",
+ "armv5te-unknown-linux-musleabi",
+ "armv6-unknown-netbsd-eabihf",
+ "armv7-linux-androideabi",
+ "armv7-unknown-cloudabi-eabihf",
+ "armv7-unknown-linux-gnueabihf",
+ "armv7-unknown-linux-musleabihf",
+ "armv7-unknown-netbsd-eabihf",
+ "armv7r-none-eabi",
+ "armv7r-none-eabihf",
+ "armv7s-apple-ios",
+ "asmjs-unknown-emscripten",
+ "i386-apple-ios",
+ "i586-pc-windows-msvc",
+ "i586-unknown-linux-gnu",
+ "i586-unknown-linux-musl",
+ "i686-apple-darwin",
+ "i686-linux-android",
+ "i686-pc-windows-gnu",
+ "i686-pc-windows-msvc",
+ "i686-unknown-cloudabi",
+ "i686-unknown-dragonfly",
+ "i686-unknown-freebsd",
+ "i686-unknown-haiku",
+ "i686-unknown-linux-gnu",
+ "i686-unknown-linux-musl",
+ "i686-unknown-netbsd",
+ "i686-unknown-openbsd",
+ "mips-unknown-linux-gnu",
+ "mips-unknown-linux-musl",
+ "mips-unknown-linux-uclibc",
+ "mips64-unknown-linux-gnuabi64",
+ "mips64el-unknown-linux-gnuabi64",
+ "mipsel-unknown-linux-gnu",
+ "mipsel-unknown-linux-musl",
+ "mipsel-unknown-linux-uclibc",
+ "msp430-none-elf",
+ "powerpc-unknown-linux-gnu",
+ "powerpc-unknown-linux-gnuspe",
+ "powerpc-unknown-linux-musl",
+ "powerpc-unknown-netbsd",
+ "powerpc64-unknown-linux-gnu",
+ "powerpc64-unknown-linux-musl",
+ "powerpc64le-unknown-linux-gnu",
+ "powerpc64le-unknown-linux-musl",
+ "riscv32imac-unknown-none-elf",
+ "riscv32imc-unknown-none-elf",
+ "s390x-unknown-linux-gnu",
+ "sparc-unknown-linux-gnu",
+ "sparc64-unknown-linux-gnu",
+ "sparc64-unknown-netbsd",
+ "sparcv9-sun-solaris",
+ "thumbv6m-none-eabi",
+ "thumbv7a-pc-windows-msvc",
+ "thumbv7em-none-eabi",
+ "thumbv7em-none-eabihf",
+ "thumbv7m-none-eabi",
+ "thumbv8m.base-none-eabi",
+ "wasm32-experimental-emscripten",
+ "wasm32-unknown-emscripten",
+ "wasm32-unknown-unknown",
+ "x86_64-apple-darwin",
+ "x86_64-apple-ios",
+ "x86_64-fortanix-unknown-sgx",
+ "x86_64-fuchsia",
+ "x86_64-linux-android",
+ "x86_64-pc-windows-gnu",
+ "x86_64-pc-windows-msvc",
+ "x86_64-rumprun-netbsd",
+ "x86_64-sun-solaris",
+ "x86_64-unknown-bitrig",
+ "x86_64-unknown-cloudabi",
+ "x86_64-unknown-dragonfly",
+ "x86_64-unknown-freebsd",
+ "x86_64-unknown-haiku",
+ "x86_64-unknown-hermit",
+ "x86_64-unknown-l4re-uclibc",
+ "x86_64-unknown-linux-gnu",
+ "x86_64-unknown-linux-gnux32",
+ "x86_64-unknown-linux-musl",
+ "x86_64-unknown-netbsd",
+ "x86_64-unknown-openbsd",
+ "x86_64-unknown-redox",
+ ]
+ # Additional targets from 1.33
+ if Version(version) >= "1.33.0":
+ rust_targets += [
+ "thumbv7neon-linux-androideabi",
+ "thumbv7neon-unknown-linux-gnueabihf",
+ "x86_64-unknown-uefi",
+ "thumbv8m.main-none-eabi",
+ "thumbv8m.main-none-eabihf",
+ ]
+ # Additional targets from 1.34
+ if Version(version) >= "1.34.0":
+ rust_targets += [
+ "nvptx64-nvidia-cuda",
+ "powerpc64-unknown-freebsd",
+ "riscv64gc-unknown-none-elf",
+ "riscv64imac-unknown-none-elf",
+ ]
+ # Additional targets from 1.35
+ if Version(version) >= "1.35.0":
+ rust_targets += [
+ "armv6-unknown-freebsd",
+ "armv7-unknown-freebsd",
+ "mipsisa32r6-unknown-linux-gnu",
+ "mipsisa32r6el-unknown-linux-gnu",
+ "mipsisa64r6-unknown-linux-gnuabi64",
+ "mipsisa64r6el-unknown-linux-gnuabi64",
+ "wasm32-unknown-wasi",
+ ]
+ # Additional targets from 1.36
+ if Version(version) >= "1.36.0":
+ rust_targets += ["wasm32-wasi"]
+ rust_targets.remove("wasm32-unknown-wasi")
+ rust_targets.remove("x86_64-unknown-bitrig")
+ # Additional targets from 1.37
+ if Version(version) >= "1.37.0":
+ rust_targets += ["x86_64-pc-solaris"]
+ # Additional targets from 1.38
+ if Version(version) >= "1.38.0":
+ rust_targets += [
+ "aarch64-unknown-redox",
+ "aarch64-wrs-vxworks",
+ "armv7-unknown-linux-gnueabi",
+ "armv7-unknown-linux-musleabi",
+ "armv7-wrs-vxworks",
+ "hexagon-unknown-linux-musl",
+ "i586-wrs-vxworks",
+ "i686-uwp-windows-gnu",
+ "i686-wrs-vxworks",
+ "powerpc-wrs-vxworks",
+ "powerpc-wrs-vxworks-spe",
+ "powerpc64-wrs-vxworks",
+ "riscv32i-unknown-none-elf",
+ "x86_64-uwp-windows-gnu",
+ "x86_64-wrs-vxworks",
+ ]
+ # Additional targets from 1.38
+ if Version(version) >= "1.39.0":
+ rust_targets += [
+ "aarch64-uwp-windows-msvc",
+ "armv7-wrs-vxworks-eabihf",
+ "i686-unknown-uefi",
+ "i686-uwp-windows-msvc",
+ "mips64-unknown-linux-muslabi64",
+ "mips64el-unknown-linux-muslabi64",
+ "sparc64-unknown-openbsd",
+ "x86_64-linux-kernel",
+ "x86_64-uwp-windows-msvc",
+ ]
+ rust_targets.remove("armv7-wrs-vxworks")
+ rust_targets.remove("i586-wrs-vxworks")
+
+ return 0, "\n".join(sorted(rust_targets)), ""
+ if (
+ len(args) == 6
+ and args[:2] == ("--crate-type", "staticlib")
+ and args[2].startswith("--target=")
+ and args[3] == "-o"
+ ):
+ with open(args[4], "w") as fh:
+ fh.write("foo")
+ return 0, "", ""
+ raise NotImplementedError("unsupported arguments")
+
+ return invoke_rustc
+
+
+class RustTest(BaseConfigureTest):
+ def get_rust_target(
+ self, target, compiler_type="gcc", version=MINIMUM_RUST_VERSION, arm_target=None
+ ):
+ environ = {
+ "PATH": os.pathsep.join(mozpath.abspath(p) for p in ("/bin", "/usr/bin"))
+ }
+
+ paths = {
+ mozpath.abspath("/usr/bin/cargo"): gen_invoke_cargo(version),
+ mozpath.abspath("/usr/bin/rustc"): gen_invoke_rustc(version),
+ }
+
+ self.TARGET = target
+ sandbox = self.get_sandbox(paths, {}, [], environ)
+
+ # Trick the sandbox into not running the target compiler check
+ dep = sandbox._depends[sandbox["c_compiler"]]
+ getattr(sandbox, "__value_for_depends")[(dep,)] = CompilerResult(
+ type=compiler_type
+ )
+ # Same for the arm_target checks.
+ dep = sandbox._depends[sandbox["arm_target"]]
+ getattr(sandbox, "__value_for_depends")[
+ (dep,)
+ ] = arm_target or ReadOnlyNamespace(
+ arm_arch=7, thumb2=False, fpu="vfpv2", float_abi="softfp"
+ )
+ return sandbox._value_for(sandbox["rust_target_triple"])
+
+ def test_rust_target(self):
+ # Cases where the output of config.sub matches a rust target
+ for straightforward in (
+ "x86_64-unknown-dragonfly",
+ "aarch64-unknown-freebsd",
+ "i686-unknown-freebsd",
+ "x86_64-unknown-freebsd",
+ "sparc64-unknown-netbsd",
+ "i686-unknown-netbsd",
+ "x86_64-unknown-netbsd",
+ "i686-unknown-openbsd",
+ "x86_64-unknown-openbsd",
+ "aarch64-unknown-linux-gnu",
+ "sparc64-unknown-linux-gnu",
+ "i686-unknown-linux-gnu",
+ "i686-apple-darwin",
+ "x86_64-apple-darwin",
+ "mips-unknown-linux-gnu",
+ "mipsel-unknown-linux-gnu",
+ "mips64-unknown-linux-gnuabi64",
+ "mips64el-unknown-linux-gnuabi64",
+ "powerpc64-unknown-linux-gnu",
+ "powerpc64le-unknown-linux-gnu",
+ "i686-pc-windows-msvc",
+ "x86_64-pc-windows-msvc",
+ "aarch64-pc-windows-msvc",
+ "i686-pc-windows-gnu",
+ "x86_64-pc-windows-gnu",
+ ):
+ self.assertEqual(self.get_rust_target(straightforward), straightforward)
+
+ # Cases where the output of config.sub is different
+ for autoconf, rust in (
+ ("aarch64-unknown-linux-android", "aarch64-linux-android"),
+ ("arm-unknown-linux-androideabi", "armv7-linux-androideabi"),
+ ("armv7-unknown-linux-androideabi", "armv7-linux-androideabi"),
+ ("i386-unknown-linux-android", "i686-linux-android"),
+ ("i686-unknown-linux-android", "i686-linux-android"),
+ ("i686-pc-linux-gnu", "i686-unknown-linux-gnu"),
+ ("x86_64-unknown-linux-android", "x86_64-linux-android"),
+ ("x86_64-pc-linux-gnu", "x86_64-unknown-linux-gnu"),
+ ("sparcv9-sun-solaris2", "sparcv9-sun-solaris"),
+ ("x86_64-sun-solaris2", "x86_64-sun-solaris"),
+ ):
+ self.assertEqual(self.get_rust_target(autoconf), rust)
+
+ # Windows
+ for autoconf, building_with_gcc, rust in (
+ ("i686-pc-mingw32", "clang-cl", "i686-pc-windows-msvc"),
+ ("x86_64-pc-mingw32", "clang-cl", "x86_64-pc-windows-msvc"),
+ ("i686-pc-mingw32", "clang", "i686-pc-windows-gnu"),
+ ("x86_64-pc-mingw32", "clang", "x86_64-pc-windows-gnu"),
+ ("i686-w64-mingw32", "clang", "i686-pc-windows-gnu"),
+ ("x86_64-w64-mingw32", "clang", "x86_64-pc-windows-gnu"),
+ ("aarch64-windows-mingw32", "clang-cl", "aarch64-pc-windows-msvc"),
+ ):
+ self.assertEqual(self.get_rust_target(autoconf, building_with_gcc), rust)
+
+ # Arm special cases
+ self.assertEqual(
+ self.get_rust_target(
+ "arm-unknown-linux-androideabi",
+ arm_target=ReadOnlyNamespace(
+ arm_arch=7, fpu="neon", thumb2=True, float_abi="softfp"
+ ),
+ ),
+ "thumbv7neon-linux-androideabi",
+ )
+
+ self.assertEqual(
+ self.get_rust_target(
+ "arm-unknown-linux-androideabi",
+ arm_target=ReadOnlyNamespace(
+ arm_arch=7, fpu="neon", thumb2=False, float_abi="softfp"
+ ),
+ ),
+ "armv7-linux-androideabi",
+ )
+
+ self.assertEqual(
+ self.get_rust_target(
+ "arm-unknown-linux-androideabi",
+ arm_target=ReadOnlyNamespace(
+ arm_arch=7, fpu="vfpv2", thumb2=True, float_abi="softfp"
+ ),
+ ),
+ "armv7-linux-androideabi",
+ )
+
+ self.assertEqual(
+ self.get_rust_target(
+ "armv7-unknown-linux-gnueabihf",
+ arm_target=ReadOnlyNamespace(
+ arm_arch=7, fpu="neon", thumb2=True, float_abi="hard"
+ ),
+ ),
+ "thumbv7neon-unknown-linux-gnueabihf",
+ )
+
+ self.assertEqual(
+ self.get_rust_target(
+ "armv7-unknown-linux-gnueabihf",
+ arm_target=ReadOnlyNamespace(
+ arm_arch=7, fpu="neon", thumb2=False, float_abi="hard"
+ ),
+ ),
+ "armv7-unknown-linux-gnueabihf",
+ )
+
+ self.assertEqual(
+ self.get_rust_target(
+ "armv7-unknown-linux-gnueabihf",
+ arm_target=ReadOnlyNamespace(
+ arm_arch=7, fpu="vfpv2", thumb2=True, float_abi="hard"
+ ),
+ ),
+ "armv7-unknown-linux-gnueabihf",
+ )
+
+ self.assertEqual(
+ self.get_rust_target(
+ "arm-unknown-freebsd13.0-gnueabihf",
+ arm_target=ReadOnlyNamespace(
+ arm_arch=7, fpu="vfpv2", thumb2=True, float_abi="hard"
+ ),
+ ),
+ "armv7-unknown-freebsd",
+ )
+
+ self.assertEqual(
+ self.get_rust_target(
+ "arm-unknown-freebsd13.0-gnueabihf",
+ arm_target=ReadOnlyNamespace(
+ arm_arch=6, fpu=None, thumb2=False, float_abi="hard"
+ ),
+ ),
+ "armv6-unknown-freebsd",
+ )
+
+ self.assertEqual(
+ self.get_rust_target(
+ "arm-unknown-linux-gnueabi",
+ arm_target=ReadOnlyNamespace(
+ arm_arch=4, fpu=None, thumb2=False, float_abi="softfp"
+ ),
+ ),
+ "armv4t-unknown-linux-gnueabi",
+ )
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/test/configure/test_toolchain_helpers.py b/python/mozbuild/mozbuild/test/configure/test_toolchain_helpers.py
new file mode 100644
index 0000000000..f42778215b
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/test_toolchain_helpers.py
@@ -0,0 +1,433 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import copy
+import re
+import unittest
+from fnmatch import fnmatch
+from textwrap import dedent
+
+import six
+from mozpack import path as mozpath
+from mozunit import MockedOpen, main
+from six import StringIO
+
+from mozbuild.preprocessor import Preprocessor
+from mozbuild.util import ReadOnlyNamespace
+
+
+class CompilerPreprocessor(Preprocessor):
+ # The C preprocessor only expands macros when they are not in C strings.
+ # For now, we don't look very hard for C strings because they don't matter
+ # that much for our unit tests, but we at least avoid expanding in the
+ # simple "FOO" case.
+ VARSUBST = re.compile('(?<!")(?P<VAR>\w+)(?!")', re.U)
+ NON_WHITESPACE = re.compile("\S")
+ HAS_FEATURE_OR_BUILTIN = re.compile(
+ '(__has_(?:feature|builtin|attribute|warning))\("?([^"\)]*)"?\)'
+ )
+
+ def __init__(self, *args, **kwargs):
+ Preprocessor.__init__(self, *args, **kwargs)
+ self.do_filter("c_substitution")
+ self.setMarker("#\s*")
+
+ def do_if(self, expression, **kwargs):
+ # The C preprocessor handles numbers following C rules, which is a
+ # different handling than what our Preprocessor does out of the box.
+ # Hack around it enough that the configure tests work properly.
+ context = self.context
+
+ def normalize_numbers(value):
+ if isinstance(value, six.string_types):
+ if value[-1:] == "L" and value[:-1].isdigit():
+ value = int(value[:-1])
+ return value
+
+ # Our Preprocessor doesn't handle macros with parameters, so we hack
+ # around that for __has_feature()-like things.
+
+ def normalize_has_feature_or_builtin(expr):
+ return (
+ self.HAS_FEATURE_OR_BUILTIN.sub(r"\1\2", expr)
+ .replace("-", "_")
+ .replace("+", "_")
+ )
+
+ self.context = self.Context(
+ (normalize_has_feature_or_builtin(k), normalize_numbers(v))
+ for k, v in six.iteritems(context)
+ )
+ try:
+ return Preprocessor.do_if(
+ self, normalize_has_feature_or_builtin(expression), **kwargs
+ )
+ finally:
+ self.context = context
+
+ class Context(dict):
+ def __missing__(self, key):
+ return None
+
+ def filter_c_substitution(self, line):
+ def repl(matchobj):
+ varname = matchobj.group("VAR")
+ if varname in self.context:
+ result = six.text_type(self.context[varname])
+ # The C preprocessor inserts whitespaces around expanded
+ # symbols.
+ start, end = matchobj.span("VAR")
+ if self.NON_WHITESPACE.match(line[start - 1 : start]):
+ result = " " + result
+ if self.NON_WHITESPACE.match(line[end : end + 1]):
+ result = result + " "
+ return result
+ return matchobj.group(0)
+
+ return self.VARSUBST.sub(repl, line)
+
+
+class TestCompilerPreprocessor(unittest.TestCase):
+ def test_expansion(self):
+ pp = CompilerPreprocessor({"A": 1, "B": "2", "C": "c", "D": "d"})
+ pp.out = StringIO()
+ input = StringIO('A.B.C "D"')
+ input.name = "foo"
+ pp.do_include(input)
+
+ self.assertEqual(pp.out.getvalue(), '1 . 2 . c "D"')
+
+ def test_normalization(self):
+ pp = CompilerPreprocessor(
+ {"__has_attribute(bar)": 1, '__has_warning("-Wc++98-foo")': 1}
+ )
+ pp.out = StringIO()
+ input = StringIO(
+ dedent(
+ """\
+ #if __has_warning("-Wbar")
+ WBAR
+ #endif
+ #if __has_warning("-Wc++98-foo")
+ WFOO
+ #endif
+ #if !__has_warning("-Wc++98-foo")
+ NO_WFOO
+ #endif
+ #if __has_attribute(bar)
+ BAR
+ #else
+ NO_BAR
+ #endif
+ #if !__has_attribute(foo)
+ NO_FOO
+ #endif
+ """
+ )
+ )
+
+ input.name = "foo"
+ pp.do_include(input)
+
+ self.assertEqual(pp.out.getvalue(), "WFOO\nBAR\nNO_FOO\n")
+
+ def test_condition(self):
+ pp = CompilerPreprocessor({"A": 1, "B": "2", "C": "0L"})
+ pp.out = StringIO()
+ input = StringIO(
+ dedent(
+ """\
+ #ifdef A
+ IFDEF_A
+ #endif
+ #if A
+ IF_A
+ #endif
+ # if B
+ IF_B
+ # else
+ IF_NOT_B
+ # endif
+ #if !C
+ IF_NOT_C
+ #else
+ IF_C
+ #endif
+ """
+ )
+ )
+ input.name = "foo"
+ pp.do_include(input)
+
+ self.assertEqual("IFDEF_A\nIF_A\nIF_NOT_B\nIF_NOT_C\n", pp.out.getvalue())
+
+
+class FakeCompiler(dict):
+ """Defines a fake compiler for use in toolchain tests below.
+
+ The definitions given when creating an instance can have one of two
+ forms:
+ - a dict giving preprocessor symbols and their respective value, e.g.
+ { '__GNUC__': 4, '__STDC__': 1 }
+ - a dict associating flags to preprocessor symbols. An entry for `None`
+ is required in this case. Those are the baseline preprocessor symbols.
+ Additional entries describe additional flags to set or existing flags
+ to unset (with a value of `False`).
+ {
+ None: { '__GNUC__': 4, '__STDC__': 1, '__STRICT_ANSI__': 1 },
+ '-std=gnu99': { '__STDC_VERSION__': '199901L',
+ '__STRICT_ANSI__': False },
+ }
+ With the dict above, invoking the preprocessor with no additional flags
+ would define __GNUC__, __STDC__ and __STRICT_ANSI__, and with -std=gnu99,
+ __GNUC__, __STDC__, and __STDC_VERSION__ (__STRICT_ANSI__ would be
+ unset).
+ It is also possible to have different symbols depending on the source
+ file extension. In this case, the key is '*.ext'. e.g.
+ {
+ '*.c': { '__STDC__': 1 },
+ '*.cpp': { '__cplusplus': '199711L' },
+ }
+
+ All the given definitions are merged together.
+
+ A FakeCompiler instance itself can be used as a definition to create
+ another FakeCompiler.
+
+ For convenience, FakeCompiler instances can be added (+) to one another.
+ """
+
+ def __init__(self, *definitions):
+ for definition in definitions:
+ if all(not isinstance(d, dict) for d in six.itervalues(definition)):
+ definition = {None: definition}
+ for key, value in six.iteritems(definition):
+ self.setdefault(key, {}).update(value)
+
+ def __call__(self, stdin, args):
+ files = []
+ flags = []
+ args = iter(args)
+ while True:
+ arg = next(args, None)
+ if arg is None:
+ break
+ if arg.startswith("-"):
+ # Ignore -isysroot/--sysroot and the argument that follows it.
+ if arg in ("-isysroot", "--sysroot"):
+ next(args, None)
+ else:
+ flags.append(arg)
+ else:
+ files.append(arg)
+
+ if "-E" in flags:
+ assert len(files) == 1
+ file = files[0]
+ pp = CompilerPreprocessor(self[None])
+
+ def apply_defn(defn):
+ for k, v in six.iteritems(defn):
+ if v is False:
+ if k in pp.context:
+ del pp.context[k]
+ else:
+ pp.context[k] = v
+
+ for glob, defn in six.iteritems(self):
+ if glob and not glob.startswith("-") and fnmatch(file, glob):
+ apply_defn(defn)
+
+ for flag in flags:
+ apply_defn(self.get(flag, {}))
+
+ pp.out = StringIO()
+ pp.do_include(file)
+ return 0, pp.out.getvalue(), ""
+ elif "-c" in flags:
+ if "-funknown-flag" in flags:
+ return 1, "", ""
+ return 0, "", ""
+
+ return 1, "", ""
+
+ def __add__(self, other):
+ return FakeCompiler(self, other)
+
+
+class TestFakeCompiler(unittest.TestCase):
+ def test_fake_compiler(self):
+ with MockedOpen({"file": "A B C", "file.c": "A B C"}):
+ compiler = FakeCompiler({"A": "1", "B": "2"})
+ self.assertEqual(compiler(None, ["-E", "file"]), (0, "1 2 C", ""))
+
+ compiler = FakeCompiler(
+ {
+ None: {"A": "1", "B": "2"},
+ "-foo": {"C": "foo"},
+ "-bar": {"B": "bar", "C": "bar"},
+ "-qux": {"B": False},
+ "*.c": {"B": "42"},
+ }
+ )
+ self.assertEqual(compiler(None, ["-E", "file"]), (0, "1 2 C", ""))
+ self.assertEqual(compiler(None, ["-E", "-foo", "file"]), (0, "1 2 foo", ""))
+ self.assertEqual(
+ compiler(None, ["-E", "-bar", "file"]), (0, "1 bar bar", "")
+ )
+ self.assertEqual(compiler(None, ["-E", "-qux", "file"]), (0, "1 B C", ""))
+ self.assertEqual(
+ compiler(None, ["-E", "-foo", "-bar", "file"]), (0, "1 bar bar", "")
+ )
+ self.assertEqual(
+ compiler(None, ["-E", "-bar", "-foo", "file"]), (0, "1 bar foo", "")
+ )
+ self.assertEqual(
+ compiler(None, ["-E", "-bar", "-qux", "file"]), (0, "1 B bar", "")
+ )
+ self.assertEqual(
+ compiler(None, ["-E", "-qux", "-bar", "file"]), (0, "1 bar bar", "")
+ )
+ self.assertEqual(compiler(None, ["-E", "file.c"]), (0, "1 42 C", ""))
+ self.assertEqual(
+ compiler(None, ["-E", "-bar", "file.c"]), (0, "1 bar bar", "")
+ )
+
+ def test_multiple_definitions(self):
+ compiler = FakeCompiler({"A": 1, "B": 2}, {"C": 3})
+
+ self.assertEqual(compiler, {None: {"A": 1, "B": 2, "C": 3}})
+ compiler = FakeCompiler({"A": 1, "B": 2}, {"B": 4, "C": 3})
+
+ self.assertEqual(compiler, {None: {"A": 1, "B": 4, "C": 3}})
+ compiler = FakeCompiler(
+ {"A": 1, "B": 2}, {None: {"B": 4, "C": 3}, "-foo": {"D": 5}}
+ )
+
+ self.assertEqual(compiler, {None: {"A": 1, "B": 4, "C": 3}, "-foo": {"D": 5}})
+
+ compiler = FakeCompiler(
+ {None: {"A": 1, "B": 2}, "-foo": {"D": 5}},
+ {"-foo": {"D": 5}, "-bar": {"E": 6}},
+ )
+
+ self.assertEqual(
+ compiler, {None: {"A": 1, "B": 2}, "-foo": {"D": 5}, "-bar": {"E": 6}}
+ )
+
+
+class PrependFlags(list):
+ """Wrapper to allow to Prepend to flags instead of appending, in
+ CompilerResult.
+ """
+
+
+class CompilerResult(ReadOnlyNamespace):
+ """Helper of convenience to manipulate toolchain results in unit tests
+
+ When adding a dict, the result is a new CompilerResult with the values
+ from the dict replacing those from the CompilerResult, except for `flags`,
+ where the value from the dict extends the `flags` in `self`.
+ """
+
+ def __init__(
+ self, wrapper=None, compiler="", version="", type="", language="", flags=None
+ ):
+ if flags is None:
+ flags = []
+ if wrapper is None:
+ wrapper = []
+ super(CompilerResult, self).__init__(
+ flags=flags,
+ version=version,
+ type=type,
+ compiler=mozpath.abspath(compiler),
+ wrapper=wrapper,
+ language=language,
+ )
+
+ def __add__(self, other):
+ assert isinstance(other, dict)
+ result = copy.deepcopy(self.__dict__)
+ for k, v in six.iteritems(other):
+ if k == "flags":
+ flags = result.setdefault(k, [])
+ if isinstance(v, PrependFlags):
+ flags[:0] = v
+ else:
+ flags.extend(v)
+ else:
+ result[k] = v
+ return CompilerResult(**result)
+
+
+class TestCompilerResult(unittest.TestCase):
+ def test_compiler_result(self):
+ result = CompilerResult()
+ self.assertEqual(
+ result.__dict__,
+ {
+ "wrapper": [],
+ "compiler": mozpath.abspath(""),
+ "version": "",
+ "type": "",
+ "language": "",
+ "flags": [],
+ },
+ )
+
+ result = CompilerResult(
+ compiler="/usr/bin/gcc",
+ version="4.2.1",
+ type="gcc",
+ language="C",
+ flags=["-std=gnu99"],
+ )
+ self.assertEqual(
+ result.__dict__,
+ {
+ "wrapper": [],
+ "compiler": mozpath.abspath("/usr/bin/gcc"),
+ "version": "4.2.1",
+ "type": "gcc",
+ "language": "C",
+ "flags": ["-std=gnu99"],
+ },
+ )
+
+ result2 = result + {"flags": ["-m32"]}
+ self.assertEqual(
+ result2.__dict__,
+ {
+ "wrapper": [],
+ "compiler": mozpath.abspath("/usr/bin/gcc"),
+ "version": "4.2.1",
+ "type": "gcc",
+ "language": "C",
+ "flags": ["-std=gnu99", "-m32"],
+ },
+ )
+ # Original flags are untouched.
+ self.assertEqual(result.flags, ["-std=gnu99"])
+
+ result3 = result + {
+ "compiler": "/usr/bin/gcc-4.7",
+ "version": "4.7.3",
+ "flags": ["-m32"],
+ }
+ self.assertEqual(
+ result3.__dict__,
+ {
+ "wrapper": [],
+ "compiler": mozpath.abspath("/usr/bin/gcc-4.7"),
+ "version": "4.7.3",
+ "type": "gcc",
+ "language": "C",
+ "flags": ["-std=gnu99", "-m32"],
+ },
+ )
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/test/configure/test_toolkit_moz_configure.py b/python/mozbuild/mozbuild/test/configure/test_toolkit_moz_configure.py
new file mode 100644
index 0000000000..e6b96b3627
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/test_toolkit_moz_configure.py
@@ -0,0 +1,102 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+
+from buildconfig import topsrcdir
+from mozpack import path as mozpath
+from mozunit import MockedOpen, main
+
+from common import BaseConfigureTest
+from mozbuild.configure.options import InvalidOptionError
+
+
+class TestToolkitMozConfigure(BaseConfigureTest):
+ def test_moz_configure_options(self):
+ def get_value_for(args=[], environ={}, mozconfig=""):
+ sandbox = self.get_sandbox({}, {}, args, environ, mozconfig)
+
+ # Add a fake old-configure option
+ sandbox.option_impl(
+ "--with-foo", nargs="*", help="Help missing for old configure options"
+ )
+
+ # Remove all implied options, otherwise, getting
+ # all_configure_options below triggers them, and that triggers
+ # configure parts that aren't expected to run during this test.
+ del sandbox._implied_options[:]
+ result = sandbox._value_for(sandbox["all_configure_options"])
+ shell = mozpath.abspath("/bin/sh")
+ return result.replace("CONFIG_SHELL=%s " % shell, "")
+
+ self.assertEqual(
+ "--enable-application=browser",
+ get_value_for(["--enable-application=browser"]),
+ )
+
+ self.assertEqual(
+ "--enable-application=browser " "MOZ_VTUNE=1",
+ get_value_for(["--enable-application=browser", "MOZ_VTUNE=1"]),
+ )
+
+ value = get_value_for(
+ environ={"MOZ_VTUNE": "1"},
+ mozconfig="ac_add_options --enable-application=browser",
+ )
+
+ self.assertEqual("--enable-application=browser MOZ_VTUNE=1", value)
+
+ # --disable-js-shell is the default, so it's filtered out.
+ self.assertEqual(
+ "--enable-application=browser",
+ get_value_for(["--enable-application=browser", "--disable-js-shell"]),
+ )
+
+ # Normally, --without-foo would be filtered out because that's the
+ # default, but since it is a (fake) old-configure option, it always
+ # appears.
+ self.assertEqual(
+ "--enable-application=browser --without-foo",
+ get_value_for(["--enable-application=browser", "--without-foo"]),
+ )
+ self.assertEqual(
+ "--enable-application=browser --with-foo",
+ get_value_for(["--enable-application=browser", "--with-foo"]),
+ )
+
+ self.assertEqual(
+ "--enable-application=browser '--with-foo=foo bar'",
+ get_value_for(["--enable-application=browser", "--with-foo=foo bar"]),
+ )
+
+ def test_developer_options(self, milestone="42.0a1"):
+ def get_value(args=[], environ={}):
+ sandbox = self.get_sandbox({}, {}, args, environ)
+ return sandbox._value_for(sandbox["developer_options"])
+
+ milestone_path = os.path.join(topsrcdir, "config", "milestone.txt")
+ with MockedOpen({milestone_path: milestone}):
+ # developer options are enabled by default on "nightly" milestone
+ # only
+ self.assertEqual(get_value(), "a" in milestone or None)
+
+ self.assertEqual(get_value(["--enable-release"]), None)
+
+ self.assertEqual(get_value(environ={"MOZILLA_OFFICIAL": 1}), None)
+
+ self.assertEqual(
+ get_value(["--enable-release"], environ={"MOZILLA_OFFICIAL": 1}), None
+ )
+
+ with self.assertRaises(InvalidOptionError):
+ get_value(["--disable-release"], environ={"MOZILLA_OFFICIAL": 1})
+
+ self.assertEqual(get_value(environ={"MOZ_AUTOMATION": 1}), None)
+
+ def test_developer_options_release(self):
+ self.test_developer_options("42.0")
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/test/configure/test_util.py b/python/mozbuild/mozbuild/test/configure/test_util.py
new file mode 100644
index 0000000000..81c2e2a8bf
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/configure/test_util.py
@@ -0,0 +1,539 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import logging
+import os
+import sys
+import tempfile
+import textwrap
+import unittest
+
+import six
+from buildconfig import topsrcdir
+from mozpack import path as mozpath
+from mozunit import main
+from six import StringIO
+
+from common import ConfigureTestSandbox
+from mozbuild.configure import ConfigureSandbox
+from mozbuild.configure.util import (
+ ConfigureOutputHandler,
+ LineIO,
+ Version,
+ getpreferredencoding,
+)
+from mozbuild.util import exec_
+
+
+class TestConfigureOutputHandler(unittest.TestCase):
+ def test_separation(self):
+ out = StringIO()
+ err = StringIO()
+ name = "%s.test_separation" % self.__class__.__name__
+ logger = logging.getLogger(name)
+ logger.setLevel(logging.DEBUG)
+ logger.addHandler(ConfigureOutputHandler(out, err))
+
+ logger.error("foo")
+ logger.warning("bar")
+ logger.info("baz")
+ # DEBUG level is not printed out by this handler
+ logger.debug("qux")
+
+ self.assertEqual(out.getvalue(), "baz\n")
+ self.assertEqual(err.getvalue(), "foo\nbar\n")
+
+ def test_format(self):
+ out = StringIO()
+ err = StringIO()
+ name = "%s.test_format" % self.__class__.__name__
+ logger = logging.getLogger(name)
+ logger.setLevel(logging.DEBUG)
+ handler = ConfigureOutputHandler(out, err)
+ handler.setFormatter(logging.Formatter("%(levelname)s:%(message)s"))
+ logger.addHandler(handler)
+
+ logger.error("foo")
+ logger.warning("bar")
+ logger.info("baz")
+ # DEBUG level is not printed out by this handler
+ logger.debug("qux")
+
+ self.assertEqual(out.getvalue(), "baz\n")
+ self.assertEqual(err.getvalue(), "ERROR:foo\n" "WARNING:bar\n")
+
+ def test_continuation(self):
+ out = StringIO()
+ name = "%s.test_continuation" % self.__class__.__name__
+ logger = logging.getLogger(name)
+ logger.setLevel(logging.DEBUG)
+ handler = ConfigureOutputHandler(out, out)
+ handler.setFormatter(logging.Formatter("%(levelname)s:%(message)s"))
+ logger.addHandler(handler)
+
+ logger.info("foo")
+ logger.info("checking bar... ")
+ logger.info("yes")
+ logger.info("qux")
+
+ self.assertEqual(out.getvalue(), "foo\n" "checking bar... yes\n" "qux\n")
+
+ out.seek(0)
+ out.truncate()
+
+ logger.info("foo")
+ logger.info("checking bar... ")
+ logger.warning("hoge")
+ logger.info("no")
+ logger.info("qux")
+
+ self.assertEqual(
+ out.getvalue(),
+ "foo\n" "checking bar... \n" "WARNING:hoge\n" " ... no\n" "qux\n",
+ )
+
+ out.seek(0)
+ out.truncate()
+
+ logger.info("foo")
+ logger.info("checking bar... ")
+ logger.warning("hoge")
+ logger.warning("fuga")
+ logger.info("no")
+ logger.info("qux")
+
+ self.assertEqual(
+ out.getvalue(),
+ "foo\n"
+ "checking bar... \n"
+ "WARNING:hoge\n"
+ "WARNING:fuga\n"
+ " ... no\n"
+ "qux\n",
+ )
+
+ out.seek(0)
+ out.truncate()
+ err = StringIO()
+
+ logger.removeHandler(handler)
+ handler = ConfigureOutputHandler(out, err)
+ handler.setFormatter(logging.Formatter("%(levelname)s:%(message)s"))
+ logger.addHandler(handler)
+
+ logger.info("foo")
+ logger.info("checking bar... ")
+ logger.warning("hoge")
+ logger.warning("fuga")
+ logger.info("no")
+ logger.info("qux")
+
+ self.assertEqual(out.getvalue(), "foo\n" "checking bar... no\n" "qux\n")
+
+ self.assertEqual(err.getvalue(), "WARNING:hoge\n" "WARNING:fuga\n")
+
+ def test_queue_debug(self):
+ out = StringIO()
+ name = "%s.test_queue_debug" % self.__class__.__name__
+ logger = logging.getLogger(name)
+ logger.setLevel(logging.DEBUG)
+ handler = ConfigureOutputHandler(out, out, maxlen=3)
+ handler.setFormatter(logging.Formatter("%(levelname)s:%(message)s"))
+ logger.addHandler(handler)
+
+ with handler.queue_debug():
+ logger.info("checking bar... ")
+ logger.debug("do foo")
+ logger.info("yes")
+ logger.info("qux")
+
+ self.assertEqual(out.getvalue(), "checking bar... yes\n" "qux\n")
+
+ out.seek(0)
+ out.truncate()
+
+ with handler.queue_debug():
+ logger.info("checking bar... ")
+ logger.debug("do foo")
+ logger.info("no")
+ logger.error("fail")
+
+ self.assertEqual(
+ out.getvalue(), "checking bar... no\n" "DEBUG:do foo\n" "ERROR:fail\n"
+ )
+
+ out.seek(0)
+ out.truncate()
+
+ with handler.queue_debug():
+ logger.info("checking bar... ")
+ logger.debug("do foo")
+ logger.debug("do bar")
+ logger.debug("do baz")
+ logger.info("no")
+ logger.error("fail")
+
+ self.assertEqual(
+ out.getvalue(),
+ "checking bar... no\n"
+ "DEBUG:do foo\n"
+ "DEBUG:do bar\n"
+ "DEBUG:do baz\n"
+ "ERROR:fail\n",
+ )
+
+ out.seek(0)
+ out.truncate()
+
+ with handler.queue_debug():
+ logger.info("checking bar... ")
+ logger.debug("do foo")
+ logger.debug("do bar")
+ logger.debug("do baz")
+ logger.debug("do qux")
+ logger.debug("do hoge")
+ logger.info("no")
+ logger.error("fail")
+
+ self.assertEqual(
+ out.getvalue(),
+ "checking bar... no\n"
+ "DEBUG:<truncated - see config.log for full output>\n"
+ "DEBUG:do baz\n"
+ "DEBUG:do qux\n"
+ "DEBUG:do hoge\n"
+ "ERROR:fail\n",
+ )
+
+ out.seek(0)
+ out.truncate()
+
+ try:
+ with handler.queue_debug():
+ logger.info("checking bar... ")
+ logger.debug("do foo")
+ logger.debug("do bar")
+ logger.info("no")
+ e = Exception("fail")
+ raise e
+ except Exception as caught:
+ self.assertIs(caught, e)
+
+ self.assertEqual(
+ out.getvalue(), "checking bar... no\n" "DEBUG:do foo\n" "DEBUG:do bar\n"
+ )
+
+ def test_queue_debug_reentrant(self):
+ out = StringIO()
+ name = "%s.test_queue_debug_reentrant" % self.__class__.__name__
+ logger = logging.getLogger(name)
+ logger.setLevel(logging.DEBUG)
+ handler = ConfigureOutputHandler(out, out, maxlen=10)
+ handler.setFormatter(logging.Formatter("%(levelname)s| %(message)s"))
+ logger.addHandler(handler)
+
+ try:
+ with handler.queue_debug():
+ logger.info("outer info")
+ logger.debug("outer debug")
+ with handler.queue_debug():
+ logger.info("inner info")
+ logger.debug("inner debug")
+ e = Exception("inner exception")
+ raise e
+ except Exception as caught:
+ self.assertIs(caught, e)
+
+ self.assertEqual(
+ out.getvalue(),
+ "outer info\n" "inner info\n" "DEBUG| outer debug\n" "DEBUG| inner debug\n",
+ )
+
+ out.seek(0)
+ out.truncate()
+
+ try:
+ with handler.queue_debug():
+ logger.info("outer info")
+ logger.debug("outer debug")
+ with handler.queue_debug():
+ logger.info("inner info")
+ logger.debug("inner debug")
+ e = Exception("outer exception")
+ raise e
+ except Exception as caught:
+ self.assertIs(caught, e)
+
+ self.assertEqual(
+ out.getvalue(),
+ "outer info\n" "inner info\n" "DEBUG| outer debug\n" "DEBUG| inner debug\n",
+ )
+
+ out.seek(0)
+ out.truncate()
+
+ with handler.queue_debug():
+ logger.info("outer info")
+ logger.debug("outer debug")
+ with handler.queue_debug():
+ logger.info("inner info")
+ logger.debug("inner debug")
+ logger.error("inner error")
+ self.assertEqual(
+ out.getvalue(),
+ "outer info\n"
+ "inner info\n"
+ "DEBUG| outer debug\n"
+ "DEBUG| inner debug\n"
+ "ERROR| inner error\n",
+ )
+
+ out.seek(0)
+ out.truncate()
+
+ with handler.queue_debug():
+ logger.info("outer info")
+ logger.debug("outer debug")
+ with handler.queue_debug():
+ logger.info("inner info")
+ logger.debug("inner debug")
+ logger.error("outer error")
+ self.assertEqual(
+ out.getvalue(),
+ "outer info\n"
+ "inner info\n"
+ "DEBUG| outer debug\n"
+ "DEBUG| inner debug\n"
+ "ERROR| outer error\n",
+ )
+
+ def test_is_same_output(self):
+ fd1 = sys.stderr.fileno()
+ fd2 = os.dup(fd1)
+ try:
+ self.assertTrue(ConfigureOutputHandler._is_same_output(fd1, fd2))
+ finally:
+ os.close(fd2)
+
+ fd2, path = tempfile.mkstemp()
+ try:
+ self.assertFalse(ConfigureOutputHandler._is_same_output(fd1, fd2))
+
+ fd3 = os.dup(fd2)
+ try:
+ self.assertTrue(ConfigureOutputHandler._is_same_output(fd2, fd3))
+ finally:
+ os.close(fd3)
+
+ with open(path, "a") as fh:
+ fd3 = fh.fileno()
+ self.assertTrue(ConfigureOutputHandler._is_same_output(fd2, fd3))
+
+ finally:
+ os.close(fd2)
+ os.remove(path)
+
+
+class TestLineIO(unittest.TestCase):
+ def test_lineio(self):
+ lines = []
+ l = LineIO(lambda l: lines.append(l))
+
+ l.write("a")
+ self.assertEqual(lines, [])
+
+ l.write("b")
+ self.assertEqual(lines, [])
+
+ l.write("\n")
+ self.assertEqual(lines, ["ab"])
+
+ l.write("cdef")
+ self.assertEqual(lines, ["ab"])
+
+ l.write("\n")
+ self.assertEqual(lines, ["ab", "cdef"])
+
+ l.write("ghi\njklm")
+ self.assertEqual(lines, ["ab", "cdef", "ghi"])
+
+ l.write("nop\nqrst\nuv\n")
+ self.assertEqual(lines, ["ab", "cdef", "ghi", "jklmnop", "qrst", "uv"])
+
+ l.write("wx\nyz")
+ self.assertEqual(lines, ["ab", "cdef", "ghi", "jklmnop", "qrst", "uv", "wx"])
+
+ l.close()
+ self.assertEqual(
+ lines, ["ab", "cdef", "ghi", "jklmnop", "qrst", "uv", "wx", "yz"]
+ )
+
+ def test_lineio_contextmanager(self):
+ lines = []
+ with LineIO(lambda l: lines.append(l)) as l:
+ l.write("a\nb\nc")
+
+ self.assertEqual(lines, ["a", "b"])
+
+ self.assertEqual(lines, ["a", "b", "c"])
+
+
+class TestLogSubprocessOutput(unittest.TestCase):
+ def test_non_ascii_subprocess_output(self):
+ out = StringIO()
+ sandbox = ConfigureSandbox({}, {}, ["configure"], out, out)
+
+ sandbox.include_file(
+ mozpath.join(topsrcdir, "build", "moz.configure", "util.configure")
+ )
+ sandbox.include_file(
+ mozpath.join(
+ topsrcdir,
+ "python",
+ "mozbuild",
+ "mozbuild",
+ "test",
+ "configure",
+ "data",
+ "subprocess.configure",
+ )
+ )
+ status = 0
+ try:
+ sandbox.run()
+ except SystemExit as e:
+ status = e.code
+
+ self.assertEqual(status, 0)
+ quote_char = "'"
+ if getpreferredencoding().lower() == "utf-8":
+ quote_char = "\u00B4"
+ self.assertEqual(six.ensure_text(out.getvalue().strip()), quote_char)
+
+
+class TestVersion(unittest.TestCase):
+ def test_version_simple(self):
+ v = Version("1")
+ self.assertEqual(v, "1")
+ self.assertLess(v, "2")
+ self.assertGreater(v, "0.5")
+ self.assertEqual(v.major, 1)
+ self.assertEqual(v.minor, 0)
+ self.assertEqual(v.patch, 0)
+
+ def test_version_more(self):
+ v = Version("1.2.3b")
+ self.assertLess(v, "2")
+ self.assertEqual(v.major, 1)
+ self.assertEqual(v.minor, 2)
+ self.assertEqual(v.patch, 3)
+
+ def test_version_bad(self):
+ # A version with a letter in the middle doesn't really make sense,
+ # so everything after it should be ignored.
+ v = Version("1.2b.3")
+ self.assertLess(v, "2")
+ self.assertEqual(v.major, 1)
+ self.assertEqual(v.minor, 2)
+ self.assertEqual(v.patch, 0)
+
+ def test_version_badder(self):
+ v = Version("1b.2.3")
+ self.assertLess(v, "2")
+ self.assertEqual(v.major, 1)
+ self.assertEqual(v.minor, 0)
+ self.assertEqual(v.patch, 0)
+
+
+class TestCheckCmdOutput(unittest.TestCase):
+ def get_result(self, command="", paths=None):
+ paths = paths or {}
+ config = {}
+ out = StringIO()
+ sandbox = ConfigureTestSandbox(paths, config, {}, ["/bin/configure"], out, out)
+ sandbox.include_file(
+ mozpath.join(topsrcdir, "build", "moz.configure", "util.configure")
+ )
+ status = 0
+ try:
+ exec_(command, sandbox)
+ sandbox.run()
+ except SystemExit as e:
+ status = e.code
+ return config, out.getvalue(), status
+
+ def test_simple_program(self):
+ def mock_simple_prog(_, args):
+ if len(args) == 1 and args[0] == "--help":
+ return 0, "simple program help...", ""
+ self.fail("Unexpected arguments to mock_simple_program: %s" % args)
+
+ prog_path = mozpath.abspath("/simple/prog")
+ cmd = "log.info(check_cmd_output('%s', '--help'))" % prog_path
+ config, out, status = self.get_result(cmd, paths={prog_path: mock_simple_prog})
+ self.assertEqual(config, {})
+ self.assertEqual(status, 0)
+ self.assertEqual(out, "simple program help...\n")
+
+ def test_failing_program(self):
+ def mock_error_prog(_, args):
+ if len(args) == 1 and args[0] == "--error":
+ return (127, "simple program output", "simple program error output")
+ self.fail("Unexpected arguments to mock_error_program: %s" % args)
+
+ prog_path = mozpath.abspath("/simple/prog")
+ cmd = "log.info(check_cmd_output('%s', '--error'))" % prog_path
+ config, out, status = self.get_result(cmd, paths={prog_path: mock_error_prog})
+ self.assertEqual(config, {})
+ self.assertEqual(status, 1)
+ self.assertEqual(
+ out,
+ textwrap.dedent(
+ """\
+ DEBUG: Executing: `%s --error`
+ DEBUG: The command returned non-zero exit status 127.
+ DEBUG: Its output was:
+ DEBUG: | simple program output
+ DEBUG: Its error output was:
+ DEBUG: | simple program error output
+ ERROR: Command `%s --error` failed with exit status 127.
+ """
+ % (prog_path, prog_path)
+ ),
+ )
+
+ def test_error_callback(self):
+ def mock_error_prog(_, args):
+ if len(args) == 1 and args[0] == "--error":
+ return 127, "simple program error...", ""
+ self.fail("Unexpected arguments to mock_error_program: %s" % args)
+
+ prog_path = mozpath.abspath("/simple/prog")
+ cmd = textwrap.dedent(
+ """\
+ check_cmd_output('%s', '--error',
+ onerror=lambda: die('`prog` produced an error'))
+ """
+ % prog_path
+ )
+ config, out, status = self.get_result(cmd, paths={prog_path: mock_error_prog})
+ self.assertEqual(config, {})
+ self.assertEqual(status, 1)
+ self.assertEqual(
+ out,
+ textwrap.dedent(
+ """\
+ DEBUG: Executing: `%s --error`
+ DEBUG: The command returned non-zero exit status 127.
+ DEBUG: Its output was:
+ DEBUG: | simple program error...
+ ERROR: `prog` produced an error
+ """
+ % prog_path
+ ),
+ )
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/test/controller/__init__.py b/python/mozbuild/mozbuild/test/controller/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/controller/__init__.py
diff --git a/python/mozbuild/mozbuild/test/controller/test_ccachestats.py b/python/mozbuild/mozbuild/test/controller/test_ccachestats.py
new file mode 100644
index 0000000000..f1efa78c3a
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/controller/test_ccachestats.py
@@ -0,0 +1,866 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import time
+import unittest
+
+from mozunit import main
+
+from mozbuild.controller.building import CCacheStats
+
+TIMESTAMP = time.time()
+TIMESTAMP2 = time.time() + 10
+TIMESTAMP_STR = time.strftime("%c", time.localtime(TIMESTAMP))
+TIMESTAMP2_STR = time.strftime("%c", time.localtime(TIMESTAMP2))
+
+
+class TestCcacheStats(unittest.TestCase):
+ STAT_GARBAGE = """A garbage line which should be failed to parse"""
+
+ STAT0 = """
+ cache directory /home/tlin/.ccache
+ cache hit (direct) 0
+ cache hit (preprocessed) 0
+ cache miss 0
+ files in cache 0
+ cache size 0 Kbytes
+ max cache size 16.0 Gbytes"""
+
+ STAT1 = """
+ cache directory /home/tlin/.ccache
+ cache hit (direct) 100
+ cache hit (preprocessed) 200
+ cache miss 2500
+ called for link 180
+ called for preprocessing 6
+ compile failed 11
+ preprocessor error 3
+ bad compiler arguments 6
+ unsupported source language 9
+ autoconf compile/link 60
+ unsupported compiler option 2
+ no input file 21
+ files in cache 7344
+ cache size 1.9 Gbytes
+ max cache size 16.0 Gbytes"""
+
+ STAT2 = """
+ cache directory /home/tlin/.ccache
+ cache hit (direct) 1900
+ cache hit (preprocessed) 300
+ cache miss 2600
+ called for link 361
+ called for preprocessing 12
+ compile failed 22
+ preprocessor error 6
+ bad compiler arguments 12
+ unsupported source language 18
+ autoconf compile/link 120
+ unsupported compiler option 4
+ no input file 48
+ files in cache 7392
+ cache size 2.0 Gbytes
+ max cache size 16.0 Gbytes"""
+
+ STAT3 = """
+ cache directory /Users/tlin/.ccache
+ primary config /Users/tlin/.ccache/ccache.conf
+ secondary config (readonly) /usr/local/Cellar/ccache/3.2/etc/ccache.conf
+ cache hit (direct) 12004
+ cache hit (preprocessed) 1786
+ cache miss 26348
+ called for link 2338
+ called for preprocessing 6313
+ compile failed 399
+ preprocessor error 390
+ bad compiler arguments 86
+ unsupported source language 66
+ autoconf compile/link 2439
+ unsupported compiler option 187
+ no input file 1068
+ files in cache 18044
+ cache size 7.5 GB
+ max cache size 8.6 GB
+ """
+
+ STAT4 = """
+ cache directory /Users/tlin/.ccache
+ primary config /Users/tlin/.ccache/ccache.conf
+ secondary config (readonly) /usr/local/Cellar/ccache/3.2.1/etc/ccache.conf
+ cache hit (direct) 21039
+ cache hit (preprocessed) 2315
+ cache miss 39370
+ called for link 3651
+ called for preprocessing 6693
+ compile failed 723
+ ccache internal error 1
+ preprocessor error 588
+ bad compiler arguments 128
+ unsupported source language 99
+ autoconf compile/link 3669
+ unsupported compiler option 187
+ no input file 1711
+ files in cache 18313
+ cache size 6.3 GB
+ max cache size 6.0 GB
+ """
+
+ STAT5 = """
+ cache directory /Users/tlin/.ccache
+ primary config /Users/tlin/.ccache/ccache.conf
+ secondary config (readonly) /usr/local/Cellar/ccache/3.2.1/etc/ccache.conf
+ cache hit (direct) 21039
+ cache hit (preprocessed) 2315
+ cache miss 39372
+ called for link 3653
+ called for preprocessing 6693
+ compile failed 723
+ ccache internal error 1
+ preprocessor error 588
+ bad compiler arguments 128
+ unsupported source language 99
+ autoconf compile/link 3669
+ unsupported compiler option 187
+ no input file 1711
+ files in cache 17411
+ cache size 6.0 GB
+ max cache size 6.0 GB
+ """
+
+ STAT6 = """
+ cache directory /Users/tlin/.ccache
+ primary config /Users/tlin/.ccache/ccache.conf
+ secondary config (readonly) /usr/local/Cellar/ccache/3.3.2/etc/ccache.conf
+ cache hit (direct) 319287
+ cache hit (preprocessed) 125987
+ cache miss 749959
+ cache hit rate 37.25 %
+ called for link 87978
+ called for preprocessing 418591
+ multiple source files 1861
+ compiler produced no output 122
+ compiler produced empty output 174
+ compile failed 14330
+ ccache internal error 1
+ preprocessor error 9459
+ can't use precompiled header 4
+ bad compiler arguments 2077
+ unsupported source language 18195
+ autoconf compile/link 51485
+ unsupported compiler option 322
+ no input file 309538
+ cleanups performed 1
+ files in cache 17358
+ cache size 15.4 GB
+ max cache size 17.2 GB
+ """
+
+ STAT7 = """
+ cache directory /Users/tlin/.ccache
+ primary config /Users/tlin/.ccache/ccache.conf
+ secondary config (readonly) /usr/local/Cellar/ccache/3.3.3/etc/ccache.conf
+ cache hit (direct) 27035
+ cache hit (preprocessed) 13939
+ cache miss 62630
+ cache hit rate 39.55 %
+ called for link 1280
+ called for preprocessing 736
+ compile failed 550
+ preprocessor error 638
+ bad compiler arguments 20
+ autoconf compile/link 1751
+ unsupported code directive 2
+ no input file 2378
+ cleanups performed 1792
+ files in cache 3479
+ cache size 4.4 GB
+ max cache size 5.0 GB
+ """
+
+ # Substitute a locally-generated timestamp because the timestamp format is
+ # locale-dependent.
+ STAT8 = f"""
+ cache directory /home/psimonyi/.ccache
+ primary config /home/psimonyi/.ccache/ccache.conf
+ secondary config (readonly) /etc/ccache.conf
+ stats zero time {TIMESTAMP_STR}
+ cache hit (direct) 571
+ cache hit (preprocessed) 1203
+ cache miss 11747
+ cache hit rate 13.12 %
+ called for link 623
+ called for preprocessing 7194
+ compile failed 32
+ preprocessor error 137
+ bad compiler arguments 4
+ autoconf compile/link 348
+ no input file 162
+ cleanups performed 77
+ files in cache 13464
+ cache size 6.2 GB
+ max cache size 7.0 GB
+ """
+
+ STAT9 = f"""
+ cache directory /Users/tlin/.ccache
+ primary config /Users/tlin/.ccache/ccache.conf
+ secondary config (readonly) /usr/local/Cellar/ccache/3.5/etc/ccache.conf
+ stats updated {TIMESTAMP2_STR}
+ stats zeroed {TIMESTAMP_STR}
+ cache hit (direct) 80147
+ cache hit (preprocessed) 21413
+ cache miss 191128
+ cache hit rate 34.70 %
+ called for link 5194
+ called for preprocessing 1721
+ compile failed 825
+ preprocessor error 3838
+ cache file missing 4863
+ bad compiler arguments 32
+ autoconf compile/link 3554
+ unsupported code directive 4
+ no input file 5545
+ cleanups performed 3154
+ files in cache 18525
+ cache size 13.4 GB
+ max cache size 15.0 GB
+ """
+
+ VERSION_3_5_GIT = """
+ ccache version 3.5.1+2_gf5309092_dirty
+
+ Copyright (C) 2002-2007 Andrew Tridgell
+ Copyright (C) 2009-2019 Joel Rosdahl
+
+ This program is free software; you can redistribute it and/or modify it under
+ the terms of the GNU General Public License as published by the Free Software
+ Foundation; either version 3 of the License, or (at your option) any later
+ version.
+ """
+
+ VERSION_4_2 = """
+ ccache version 4.2.1
+
+ Copyright (C) 2002-2007 Andrew Tridgell
+ Copyright (C) 2009-2021 Joel Rosdahl and other contributors
+
+ See <https://ccache.dev/credits.html> for a complete list of contributors.
+
+ This program is free software; you can redistribute it and/or modify it under
+ the terms of the GNU General Public License as published by the Free Software
+ Foundation; either version 3 of the License, or (at your option) any later
+ version.
+ """
+
+ VERSION_4_4 = """
+ ccache version 4.4
+ Features: file-storage http-storage
+
+ Copyright (C) 2002-2007 Andrew Tridgell
+ Copyright (C) 2009-2021 Joel Rosdahl and other contributors
+
+ See <https://ccache.dev/credits.html> for a complete list of contributors.
+
+ This program is free software; you can redistribute it and/or modify it under
+ the terms of the GNU General Public License as published by the Free Software
+ Foundation; either version 3 of the License, or (at your option) any later
+ version.
+ """
+
+ VERSION_4_4_2 = """
+ ccache version 4.4.2
+ Features: file-storage http-storage
+
+ Copyright (C) 2002-2007 Andrew Tridgell
+ Copyright (C) 2009-2021 Joel Rosdahl and other contributors
+
+ See <https://ccache.dev/credits.html> for a complete list of contributors.
+
+ This program is free software; you can redistribute it and/or modify it under
+ the terms of the GNU General Public License as published by the Free Software
+ Foundation; either version 3 of the License, or (at your option) any later
+ version.
+ """
+
+ VERSION_4_5 = """
+ ccache version 4.5.1
+ Features: file-storage http-storage redis-storage
+
+ Copyright (C) 2002-2007 Andrew Tridgell
+ Copyright (C) 2009-2021 Joel Rosdahl and other contributors
+
+ See <https://ccache.dev/credits.html> for a complete list of contributors.
+
+ This program is free software; you can redistribute it and/or modify it under
+ the terms of the GNU General Public License as published by the Free Software
+ Foundation; either version 3 of the License, or (at your option) any later
+ version.
+ """
+
+ STAT10 = f"""\
+stats_updated_timestamp\t{int(TIMESTAMP)}
+stats_zeroed_timestamp\t0
+direct_cache_hit\t197
+preprocessed_cache_hit\t719
+cache_miss\t8427
+called_for_link\t569
+called_for_preprocessing\t110
+multiple_source_files\t0
+compiler_produced_stdout\t0
+compiler_produced_no_output\t0
+compiler_produced_empty_output\t0
+compile_failed\t49
+internal_error\t1
+preprocessor_error\t90
+could_not_use_precompiled_header\t0
+could_not_use_modules\t0
+could_not_find_compiler\t0
+missing_cache_file\t1
+bad_compiler_arguments\t6
+unsupported_source_language\t0
+compiler_check_failed\t0
+autoconf_test\t418
+unsupported_compiler_option\t0
+unsupported_code_directive\t1
+output_to_stdout\t0
+bad_output_file\t0
+no_input_file\t9
+error_hashing_extra_file\t0
+cleanups_performed\t161
+files_in_cache\t4425
+cache_size_kibibyte\t4624220
+"""
+
+ STAT11 = f"""\
+stats_updated_timestamp\t{int(TIMESTAMP)}
+stats_zeroed_timestamp\t{int(TIMESTAMP2)}
+direct_cache_hit\t0
+preprocessed_cache_hit\t0
+cache_miss\t0
+called_for_link\t0
+called_for_preprocessing\t0
+multiple_source_files\t0
+compiler_produced_stdout\t0
+compiler_produced_no_output\t0
+compiler_produced_empty_output\t0
+compile_failed\t0
+internal_error\t0
+preprocessor_error\t0
+could_not_use_precompiled_header\t0
+could_not_use_modules\t0
+could_not_find_compiler\t0
+missing_cache_file\t0
+bad_compiler_arguments\t0
+unsupported_source_language\t0
+compiler_check_failed\t0
+autoconf_test\t0
+unsupported_compiler_option\t0
+unsupported_code_directive\t0
+output_to_stdout\t0
+bad_output_file\t0
+no_input_file\t0
+error_hashing_extra_file\t0
+cleanups_performed\t16
+files_in_cache\t0
+cache_size_kibibyte\t0
+"""
+
+ STAT12 = """\
+stats_updated_timestamp\t0
+stats_zeroed_timestamp\t0
+direct_cache_hit\t0
+preprocessed_cache_hit\t0
+cache_miss\t0
+called_for_link\t0
+called_for_preprocessing\t0
+multiple_source_files\t0
+compiler_produced_stdout\t0
+compiler_produced_no_output\t0
+compiler_produced_empty_output\t0
+compile_failed\t0
+internal_error\t0
+preprocessor_error\t0
+could_not_use_precompiled_header\t0
+could_not_use_modules\t0
+could_not_find_compiler\t0
+missing_cache_file\t0
+bad_compiler_arguments\t0
+unsupported_source_language\t0
+compiler_check_failed\t0
+autoconf_test\t0
+unsupported_compiler_option\t0
+unsupported_code_directive\t0
+output_to_stdout\t0
+bad_output_file\t0
+no_input_file\t0
+error_hashing_extra_file\t0
+cleanups_performed\t16
+files_in_cache\t0
+cache_size_kibibyte\t0
+"""
+
+ STAT13 = f"""\
+stats_updated_timestamp\t{int(TIMESTAMP)}
+stats_zeroed_timestamp\t{int(TIMESTAMP2)}
+direct_cache_hit\t280542
+preprocessed_cache_hit\t0
+cache_miss\t387653
+called_for_link\t0
+called_for_preprocessing\t0
+multiple_source_files\t0
+compiler_produced_stdout\t0
+compiler_produced_no_output\t0
+compiler_produced_empty_output\t0
+compile_failed\t1665
+internal_error\t1
+preprocessor_error\t0
+could_not_use_precompiled_header\t0
+could_not_use_modules\t0
+could_not_find_compiler\t0
+missing_cache_file\t0
+bad_compiler_arguments\t0
+unsupported_source_language\t0
+compiler_check_failed\t0
+autoconf_test\t0
+unsupported_compiler_option\t0
+unsupported_code_directive\t0
+output_to_stdout\t0
+bad_output_file\t0
+no_input_file\t2
+error_hashing_extra_file\t0
+cleanups_performed\t364
+files_in_cache\t335104
+cache_size_kibibyte\t18224250
+"""
+
+ maxDiff = None
+
+ def test_parse_garbage_stats_message(self):
+ self.assertRaises(ValueError, CCacheStats, self.STAT_GARBAGE)
+
+ def test_parse_zero_stats_message(self):
+ stats = CCacheStats(self.STAT0)
+ self.assertEqual(stats.hit_rates(), (0, 0, 0))
+
+ def test_hit_rate_of_diff_stats(self):
+ stats1 = CCacheStats(self.STAT1)
+ stats2 = CCacheStats(self.STAT2)
+ stats_diff = stats2 - stats1
+ self.assertEqual(stats_diff.hit_rates(), (0.9, 0.05, 0.05))
+
+ def test_stats_contains_data(self):
+ stats0 = CCacheStats(self.STAT0)
+ stats1 = CCacheStats(self.STAT1)
+ stats2 = CCacheStats(self.STAT2)
+ stats_diff_zero = stats1 - stats1
+ stats_diff_negative1 = stats0 - stats1
+ stats_diff_negative2 = stats1 - stats2
+
+ self.assertFalse(stats0)
+ self.assertTrue(stats1)
+ self.assertTrue(stats2)
+ self.assertFalse(stats_diff_zero)
+ self.assertFalse(stats_diff_negative1)
+ self.assertFalse(stats_diff_negative2)
+
+ def test_stats_version32(self):
+ stat2 = CCacheStats(self.STAT2)
+ stat3 = CCacheStats(self.STAT3)
+ stats_diff = stat3 - stat2
+ self.assertEqual(
+ str(stat3),
+ "cache hit (direct) 12004\n"
+ "cache hit (preprocessed) 1786\n"
+ "cache miss 26348\n"
+ "called for link 2338\n"
+ "called for preprocessing 6313\n"
+ "compile failed 399\n"
+ "preprocessor error 390\n"
+ "bad compiler arguments 86\n"
+ "unsupported source language 66\n"
+ "autoconf compile/link 2439\n"
+ "unsupported compiler option 187\n"
+ "no input file 1068\n"
+ "files in cache 18044\n"
+ "cache size 7.5 Gbytes\n"
+ "max cache size 8.6 Gbytes",
+ )
+ self.assertEqual(
+ str(stats_diff),
+ "cache hit (direct) 10104\n"
+ "cache hit (preprocessed) 1486\n"
+ "cache miss 23748\n"
+ "called for link 1977\n"
+ "called for preprocessing 6301\n"
+ "compile failed 377\n"
+ "preprocessor error 384\n"
+ "bad compiler arguments 74\n"
+ "unsupported source language 48\n"
+ "autoconf compile/link 2319\n"
+ "unsupported compiler option 183\n"
+ "no input file 1020\n"
+ "files in cache 18044\n"
+ "cache size 7.5 Gbytes\n"
+ "max cache size 8.6 Gbytes",
+ )
+
+ def test_cache_size_shrinking(self):
+ stat4 = CCacheStats(self.STAT4)
+ stat5 = CCacheStats(self.STAT5)
+ stats_diff = stat5 - stat4
+ self.assertEqual(
+ str(stat4),
+ "cache hit (direct) 21039\n"
+ "cache hit (preprocessed) 2315\n"
+ "cache miss 39370\n"
+ "called for link 3651\n"
+ "called for preprocessing 6693\n"
+ "compile failed 723\n"
+ "ccache internal error 1\n"
+ "preprocessor error 588\n"
+ "bad compiler arguments 128\n"
+ "unsupported source language 99\n"
+ "autoconf compile/link 3669\n"
+ "unsupported compiler option 187\n"
+ "no input file 1711\n"
+ "files in cache 18313\n"
+ "cache size 6.3 Gbytes\n"
+ "max cache size 6.0 Gbytes",
+ )
+ self.assertEqual(
+ str(stat5),
+ "cache hit (direct) 21039\n"
+ "cache hit (preprocessed) 2315\n"
+ "cache miss 39372\n"
+ "called for link 3653\n"
+ "called for preprocessing 6693\n"
+ "compile failed 723\n"
+ "ccache internal error 1\n"
+ "preprocessor error 588\n"
+ "bad compiler arguments 128\n"
+ "unsupported source language 99\n"
+ "autoconf compile/link 3669\n"
+ "unsupported compiler option 187\n"
+ "no input file 1711\n"
+ "files in cache 17411\n"
+ "cache size 6.0 Gbytes\n"
+ "max cache size 6.0 Gbytes",
+ )
+ self.assertEqual(
+ str(stats_diff),
+ "cache hit (direct) 0\n"
+ "cache hit (preprocessed) 0\n"
+ "cache miss 2\n"
+ "called for link 2\n"
+ "called for preprocessing 0\n"
+ "compile failed 0\n"
+ "ccache internal error 0\n"
+ "preprocessor error 0\n"
+ "bad compiler arguments 0\n"
+ "unsupported source language 0\n"
+ "autoconf compile/link 0\n"
+ "unsupported compiler option 0\n"
+ "no input file 0\n"
+ "files in cache 17411\n"
+ "cache size 6.0 Gbytes\n"
+ "max cache size 6.0 Gbytes",
+ )
+
+ def test_stats_version33(self):
+ # Test stats for 3.3.2.
+ stat3 = CCacheStats(self.STAT3)
+ stat6 = CCacheStats(self.STAT6)
+ stats_diff = stat6 - stat3
+ self.assertEqual(
+ str(stat6),
+ "cache hit (direct) 319287\n"
+ "cache hit (preprocessed) 125987\n"
+ "cache hit rate 37\n"
+ "cache miss 749959\n"
+ "called for link 87978\n"
+ "called for preprocessing 418591\n"
+ "multiple source files 1861\n"
+ "compiler produced no output 122\n"
+ "compiler produced empty output 174\n"
+ "compile failed 14330\n"
+ "ccache internal error 1\n"
+ "preprocessor error 9459\n"
+ "can't use precompiled header 4\n"
+ "bad compiler arguments 2077\n"
+ "unsupported source language 18195\n"
+ "autoconf compile/link 51485\n"
+ "unsupported compiler option 322\n"
+ "no input file 309538\n"
+ "cleanups performed 1\n"
+ "files in cache 17358\n"
+ "cache size 15.4 Gbytes\n"
+ "max cache size 17.2 Gbytes",
+ )
+ self.assertEqual(
+ str(stat3),
+ "cache hit (direct) 12004\n"
+ "cache hit (preprocessed) 1786\n"
+ "cache miss 26348\n"
+ "called for link 2338\n"
+ "called for preprocessing 6313\n"
+ "compile failed 399\n"
+ "preprocessor error 390\n"
+ "bad compiler arguments 86\n"
+ "unsupported source language 66\n"
+ "autoconf compile/link 2439\n"
+ "unsupported compiler option 187\n"
+ "no input file 1068\n"
+ "files in cache 18044\n"
+ "cache size 7.5 Gbytes\n"
+ "max cache size 8.6 Gbytes",
+ )
+ self.assertEqual(
+ str(stats_diff),
+ "cache hit (direct) 307283\n"
+ "cache hit (preprocessed) 124201\n"
+ "cache hit rate 37\n"
+ "cache miss 723611\n"
+ "called for link 85640\n"
+ "called for preprocessing 412278\n"
+ "multiple source files 1861\n"
+ "compiler produced no output 122\n"
+ "compiler produced empty output 174\n"
+ "compile failed 13931\n"
+ "ccache internal error 1\n"
+ "preprocessor error 9069\n"
+ "can't use precompiled header 4\n"
+ "bad compiler arguments 1991\n"
+ "unsupported source language 18129\n"
+ "autoconf compile/link 49046\n"
+ "unsupported compiler option 135\n"
+ "no input file 308470\n"
+ "cleanups performed 1\n"
+ "files in cache 17358\n"
+ "cache size 15.4 Gbytes\n"
+ "max cache size 17.2 Gbytes",
+ )
+
+ # Test stats for 3.3.3.
+ stat7 = CCacheStats(self.STAT7)
+ self.assertEqual(
+ str(stat7),
+ "cache hit (direct) 27035\n"
+ "cache hit (preprocessed) 13939\n"
+ "cache hit rate 39\n"
+ "cache miss 62630\n"
+ "called for link 1280\n"
+ "called for preprocessing 736\n"
+ "compile failed 550\n"
+ "preprocessor error 638\n"
+ "bad compiler arguments 20\n"
+ "autoconf compile/link 1751\n"
+ "unsupported code directive 2\n"
+ "no input file 2378\n"
+ "cleanups performed 1792\n"
+ "files in cache 3479\n"
+ "cache size 4.4 Gbytes\n"
+ "max cache size 5.0 Gbytes",
+ )
+
+ def test_stats_version34(self):
+ # Test parsing 3.4 output.
+ stat8 = CCacheStats(self.STAT8)
+ self.assertEqual(
+ str(stat8),
+ f"stats zeroed {int(TIMESTAMP)}\n"
+ "cache hit (direct) 571\n"
+ "cache hit (preprocessed) 1203\n"
+ "cache hit rate 13\n"
+ "cache miss 11747\n"
+ "called for link 623\n"
+ "called for preprocessing 7194\n"
+ "compile failed 32\n"
+ "preprocessor error 137\n"
+ "bad compiler arguments 4\n"
+ "autoconf compile/link 348\n"
+ "no input file 162\n"
+ "cleanups performed 77\n"
+ "files in cache 13464\n"
+ "cache size 6.2 Gbytes\n"
+ "max cache size 7.0 Gbytes",
+ )
+
+ def test_stats_version35(self):
+ # Test parsing 3.5 output.
+ stat9 = CCacheStats(self.STAT9)
+ self.assertEqual(
+ str(stat9),
+ f"stats zeroed {int(TIMESTAMP)}\n"
+ f"stats updated {int(TIMESTAMP2)}\n"
+ "cache hit (direct) 80147\n"
+ "cache hit (preprocessed) 21413\n"
+ "cache hit rate 34\n"
+ "cache miss 191128\n"
+ "called for link 5194\n"
+ "called for preprocessing 1721\n"
+ "compile failed 825\n"
+ "preprocessor error 3838\n"
+ "cache file missing 4863\n"
+ "bad compiler arguments 32\n"
+ "autoconf compile/link 3554\n"
+ "unsupported code directive 4\n"
+ "no input file 5545\n"
+ "cleanups performed 3154\n"
+ "files in cache 18525\n"
+ "cache size 13.4 Gbytes\n"
+ "max cache size 15.0 Gbytes",
+ )
+
+ def test_stats_version37(self):
+ # verify version checks
+ self.assertFalse(CCacheStats._is_version_3_7_or_newer(self.VERSION_3_5_GIT))
+ self.assertTrue(CCacheStats._is_version_3_7_or_newer(self.VERSION_4_2))
+ self.assertTrue(CCacheStats._is_version_3_7_or_newer(self.VERSION_4_4))
+ self.assertTrue(CCacheStats._is_version_3_7_or_newer(self.VERSION_4_4_2))
+ self.assertTrue(CCacheStats._is_version_3_7_or_newer(self.VERSION_4_5))
+
+ # Test parsing 3.7+ output.
+ stat10 = CCacheStats(self.STAT10, True)
+ self.assertEqual(
+ str(stat10),
+ "stats zeroed 0\n"
+ f"stats updated {int(TIMESTAMP)}\n"
+ "cache hit (direct) 197\n"
+ "cache hit (preprocessed) 719\n"
+ "cache hit rate 9\n"
+ "cache miss 8427\n"
+ "called for link 569\n"
+ "called for preprocessing 110\n"
+ "multiple source files 0\n"
+ "compiler produced stdout 0\n"
+ "compiler produced no output 0\n"
+ "compiler produced empty output 0\n"
+ "compile failed 49\n"
+ "ccache internal error 1\n"
+ "preprocessor error 90\n"
+ "can't use precompiled header 0\n"
+ "couldn't find the compiler 0\n"
+ "cache file missing 1\n"
+ "bad compiler arguments 6\n"
+ "unsupported source language 0\n"
+ "compiler check failed 0\n"
+ "autoconf compile/link 418\n"
+ "unsupported code directive 1\n"
+ "unsupported compiler option 0\n"
+ "output to stdout 0\n"
+ "no input file 9\n"
+ "error hashing extra file 0\n"
+ "cleanups performed 161\n"
+ "files in cache 4425\n"
+ "cache size 4.4 Gbytes",
+ )
+
+ stat11 = CCacheStats(self.STAT11, True)
+ self.assertEqual(
+ str(stat11),
+ f"stats zeroed {int(TIMESTAMP2)}\n"
+ f"stats updated {int(TIMESTAMP)}\n"
+ "cache hit (direct) 0\n"
+ "cache hit (preprocessed) 0\n"
+ "cache hit rate 0\n"
+ "cache miss 0\n"
+ "called for link 0\n"
+ "called for preprocessing 0\n"
+ "multiple source files 0\n"
+ "compiler produced stdout 0\n"
+ "compiler produced no output 0\n"
+ "compiler produced empty output 0\n"
+ "compile failed 0\n"
+ "ccache internal error 0\n"
+ "preprocessor error 0\n"
+ "can't use precompiled header 0\n"
+ "couldn't find the compiler 0\n"
+ "cache file missing 0\n"
+ "bad compiler arguments 0\n"
+ "unsupported source language 0\n"
+ "compiler check failed 0\n"
+ "autoconf compile/link 0\n"
+ "unsupported code directive 0\n"
+ "unsupported compiler option 0\n"
+ "output to stdout 0\n"
+ "no input file 0\n"
+ "error hashing extra file 0\n"
+ "cleanups performed 16\n"
+ "files in cache 0\n"
+ "cache size 0.0 Kbytes",
+ )
+
+ stat12 = CCacheStats(self.STAT12, True)
+ self.assertEqual(
+ str(stat12),
+ "stats zeroed 0\n"
+ "stats updated 0\n"
+ "cache hit (direct) 0\n"
+ "cache hit (preprocessed) 0\n"
+ "cache hit rate 0\n"
+ "cache miss 0\n"
+ "called for link 0\n"
+ "called for preprocessing 0\n"
+ "multiple source files 0\n"
+ "compiler produced stdout 0\n"
+ "compiler produced no output 0\n"
+ "compiler produced empty output 0\n"
+ "compile failed 0\n"
+ "ccache internal error 0\n"
+ "preprocessor error 0\n"
+ "can't use precompiled header 0\n"
+ "couldn't find the compiler 0\n"
+ "cache file missing 0\n"
+ "bad compiler arguments 0\n"
+ "unsupported source language 0\n"
+ "compiler check failed 0\n"
+ "autoconf compile/link 0\n"
+ "unsupported code directive 0\n"
+ "unsupported compiler option 0\n"
+ "output to stdout 0\n"
+ "no input file 0\n"
+ "error hashing extra file 0\n"
+ "cleanups performed 16\n"
+ "files in cache 0\n"
+ "cache size 0.0 Kbytes",
+ )
+
+ stat13 = CCacheStats(self.STAT13, True)
+ self.assertEqual(
+ str(stat13),
+ f"stats zeroed {int(TIMESTAMP2)}\n"
+ f"stats updated {int(TIMESTAMP)}\n"
+ "cache hit (direct) 280542\n"
+ "cache hit (preprocessed) 0\n"
+ "cache hit rate 41\n"
+ "cache miss 387653\n"
+ "called for link 0\n"
+ "called for preprocessing 0\n"
+ "multiple source files 0\n"
+ "compiler produced stdout 0\n"
+ "compiler produced no output 0\n"
+ "compiler produced empty output 0\n"
+ "compile failed 1665\n"
+ "ccache internal error 1\n"
+ "preprocessor error 0\n"
+ "can't use precompiled header 0\n"
+ "couldn't find the compiler 0\n"
+ "cache file missing 0\n"
+ "bad compiler arguments 0\n"
+ "unsupported source language 0\n"
+ "compiler check failed 0\n"
+ "autoconf compile/link 0\n"
+ "unsupported code directive 0\n"
+ "unsupported compiler option 0\n"
+ "output to stdout 0\n"
+ "no input file 2\n"
+ "error hashing extra file 0\n"
+ "cleanups performed 364\n"
+ "files in cache 335104\n"
+ "cache size 17.4 Gbytes",
+ )
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/test/controller/test_clobber.py b/python/mozbuild/mozbuild/test/controller/test_clobber.py
new file mode 100644
index 0000000000..fff3c5a438
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/controller/test_clobber.py
@@ -0,0 +1,214 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import shutil
+import tempfile
+import unittest
+
+from mozunit import main
+
+from mozbuild.base import MozbuildObject
+from mozbuild.controller.building import BuildDriver
+from mozbuild.controller.clobber import Clobberer
+from mozbuild.test.common import prepare_tmp_topsrcdir
+
+
+class TestClobberer(unittest.TestCase):
+ def setUp(self):
+ self._temp_dirs = []
+ self._old_env = dict(os.environ)
+ os.environ.pop("MOZCONFIG", None)
+ os.environ.pop("MOZ_OBJDIR", None)
+
+ return unittest.TestCase.setUp(self)
+
+ def tearDown(self):
+ os.environ.clear()
+ os.environ.update(self._old_env)
+
+ for d in self._temp_dirs:
+ shutil.rmtree(d, ignore_errors=True)
+
+ return unittest.TestCase.tearDown(self)
+
+ def get_tempdir(self):
+ t = tempfile.mkdtemp()
+ self._temp_dirs.append(t)
+ return t
+
+ def get_topsrcdir(self):
+ t = self.get_tempdir()
+ prepare_tmp_topsrcdir(t)
+ p = os.path.join(t, "CLOBBER")
+ with open(p, "a"):
+ pass
+
+ return t
+
+ def test_no_objdir(self):
+ """If topobjdir does not exist, no clobber is needed."""
+
+ tmp = os.path.join(self.get_tempdir(), "topobjdir")
+ self.assertFalse(os.path.exists(tmp))
+
+ c = Clobberer(self.get_topsrcdir(), tmp)
+ self.assertFalse(c.clobber_needed())
+
+ required, performed, reason = c.maybe_do_clobber(os.getcwd(), True)
+ self.assertFalse(required)
+ self.assertFalse(performed)
+ self.assertIsNone(reason)
+
+ self.assertFalse(os.path.isdir(tmp))
+ self.assertFalse(os.path.exists(os.path.join(tmp, "CLOBBER")))
+
+ def test_objdir_no_clobber_file(self):
+ """If CLOBBER does not exist in topobjdir, treat as empty."""
+
+ c = Clobberer(self.get_topsrcdir(), self.get_tempdir())
+ self.assertFalse(c.clobber_needed())
+
+ required, performed, reason = c.maybe_do_clobber(os.getcwd(), True)
+ self.assertFalse(required)
+ self.assertFalse(performed)
+ self.assertIsNone(reason)
+
+ self.assertFalse(os.path.exists(os.path.join(c.topobjdir, "CLOBBER")))
+
+ def test_objdir_clobber_newer(self):
+ """If CLOBBER in topobjdir is newer, do nothing."""
+
+ c = Clobberer(self.get_topsrcdir(), self.get_tempdir())
+ with open(c.obj_clobber, "a"):
+ pass
+
+ required, performed, reason = c.maybe_do_clobber(os.getcwd(), True)
+ self.assertFalse(required)
+ self.assertFalse(performed)
+ self.assertIsNone(reason)
+
+ def test_objdir_clobber_older(self):
+ """If CLOBBER in topobjdir is older, we clobber."""
+
+ c = Clobberer(self.get_topsrcdir(), self.get_tempdir())
+ with open(c.obj_clobber, "a"):
+ pass
+
+ dummy_path = os.path.join(c.topobjdir, "foo")
+ with open(dummy_path, "a"):
+ pass
+
+ self.assertTrue(os.path.exists(dummy_path))
+
+ old_time = os.path.getmtime(c.src_clobber) - 60
+ os.utime(c.obj_clobber, (old_time, old_time))
+
+ self.assertTrue(c.clobber_needed())
+
+ required, performed, reason = c.maybe_do_clobber(os.getcwd(), True)
+ self.assertTrue(required)
+ self.assertTrue(performed)
+
+ self.assertFalse(os.path.exists(dummy_path))
+ self.assertFalse(os.path.exists(c.obj_clobber))
+
+ def test_objdir_is_srcdir(self):
+ """If topobjdir is the topsrcdir, refuse to clobber."""
+
+ tmp = self.get_topsrcdir()
+ c = Clobberer(tmp, tmp)
+
+ self.assertFalse(c.clobber_needed())
+
+ def test_cwd_is_topobjdir(self):
+ """If cwd is topobjdir, we can still clobber."""
+ c = Clobberer(self.get_topsrcdir(), self.get_tempdir())
+
+ with open(c.obj_clobber, "a"):
+ pass
+
+ dummy_file = os.path.join(c.topobjdir, "dummy_file")
+ with open(dummy_file, "a"):
+ pass
+
+ dummy_dir = os.path.join(c.topobjdir, "dummy_dir")
+ os.mkdir(dummy_dir)
+
+ self.assertTrue(os.path.exists(dummy_file))
+ self.assertTrue(os.path.isdir(dummy_dir))
+
+ old_time = os.path.getmtime(c.src_clobber) - 60
+ os.utime(c.obj_clobber, (old_time, old_time))
+
+ self.assertTrue(c.clobber_needed())
+
+ required, performed, reason = c.maybe_do_clobber(c.topobjdir, True)
+ self.assertTrue(required)
+ self.assertTrue(performed)
+
+ self.assertFalse(os.path.exists(dummy_file))
+ self.assertFalse(os.path.exists(dummy_dir))
+
+ def test_cwd_under_topobjdir(self):
+ """If cwd is under topobjdir, we can't clobber."""
+
+ c = Clobberer(self.get_topsrcdir(), self.get_tempdir())
+
+ with open(c.obj_clobber, "a"):
+ pass
+
+ old_time = os.path.getmtime(c.src_clobber) - 60
+ os.utime(c.obj_clobber, (old_time, old_time))
+
+ d = os.path.join(c.topobjdir, "dummy_dir")
+ os.mkdir(d)
+
+ required, performed, reason = c.maybe_do_clobber(d, True)
+ self.assertTrue(required)
+ self.assertFalse(performed)
+ self.assertIn("Cannot clobber while the shell is inside", reason)
+
+ def test_mozconfig_opt_in(self):
+ """Auto clobber iff AUTOCLOBBER is in the environment."""
+
+ topsrcdir = self.get_topsrcdir()
+ topobjdir = self.get_tempdir()
+
+ obj_clobber = os.path.join(topobjdir, "CLOBBER")
+ with open(obj_clobber, "a"):
+ pass
+
+ dummy_file = os.path.join(topobjdir, "dummy_file")
+ with open(dummy_file, "a"):
+ pass
+
+ self.assertTrue(os.path.exists(dummy_file))
+
+ old_time = os.path.getmtime(os.path.join(topsrcdir, "CLOBBER")) - 60
+ os.utime(obj_clobber, (old_time, old_time))
+
+ # Check auto clobber is off by default
+ env = dict(os.environ)
+ if env.get("AUTOCLOBBER", False):
+ del env["AUTOCLOBBER"]
+
+ mbo = MozbuildObject(topsrcdir, None, None, topobjdir)
+ build = mbo._spawn(BuildDriver)
+
+ status = build._check_clobber(build.mozconfig, env)
+
+ self.assertEqual(status, True)
+ self.assertTrue(os.path.exists(dummy_file))
+
+ # Check auto clobber opt-in works
+ env["AUTOCLOBBER"] = "1"
+
+ status = build._check_clobber(build.mozconfig, env)
+ self.assertFalse(status)
+ self.assertFalse(os.path.exists(dummy_file))
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/test/data/Makefile b/python/mozbuild/mozbuild/test/data/Makefile
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/data/Makefile
diff --git a/python/mozbuild/mozbuild/test/data/bad.properties b/python/mozbuild/mozbuild/test/data/bad.properties
new file mode 100644
index 0000000000..d4d8109b69
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/data/bad.properties
@@ -0,0 +1,12 @@
+# A region.properties file with invalid unicode byte sequences. The
+# sequences were cribbed from Markus Kuhn's "UTF-8 decoder capability
+# and stress test", available at
+# http://www.cl.cam.ac.uk/~mgk25/ucs/examples/UTF-8-test.txt
+
+# 3.5 Impossible bytes |
+# |
+# The following two bytes cannot appear in a correct UTF-8 string |
+# |
+# 3.5.1 fe = "þ" |
+# 3.5.2 ff = "ÿ" |
+# 3.5.3 fe fe ff ff = "þþÿÿ" |
diff --git a/python/mozbuild/mozbuild/test/data/test-dir/Makefile b/python/mozbuild/mozbuild/test/data/test-dir/Makefile
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/data/test-dir/Makefile
diff --git a/python/mozbuild/mozbuild/test/data/test-dir/with/Makefile b/python/mozbuild/mozbuild/test/data/test-dir/with/Makefile
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/data/test-dir/with/Makefile
diff --git a/python/mozbuild/mozbuild/test/data/test-dir/with/without/with/Makefile b/python/mozbuild/mozbuild/test/data/test-dir/with/without/with/Makefile
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/data/test-dir/with/without/with/Makefile
diff --git a/python/mozbuild/mozbuild/test/data/test-dir/without/with/Makefile b/python/mozbuild/mozbuild/test/data/test-dir/without/with/Makefile
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/data/test-dir/without/with/Makefile
diff --git a/python/mozbuild/mozbuild/test/data/valid.properties b/python/mozbuild/mozbuild/test/data/valid.properties
new file mode 100644
index 0000000000..db64bf2eed
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/data/valid.properties
@@ -0,0 +1,11 @@
+# A region.properties file with unicode characters.
+
+# Danish.
+# #### ~~ Søren Munk Skrøder, sskroeder - 2009-05-30 @ #mozmae
+
+# Korean.
+A.title=한메ì¼
+
+# Russian.
+list.0 = test
+list.1 = ЯндекÑ
diff --git a/python/mozbuild/mozbuild/test/frontend/__init__.py b/python/mozbuild/mozbuild/test/frontend/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/__init__.py
diff --git a/python/mozbuild/mozbuild/test/frontend/data/allow-compiler-warnings/moz.build b/python/mozbuild/mozbuild/test/frontend/data/allow-compiler-warnings/moz.build
new file mode 100644
index 0000000000..0bf5b55ecb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/allow-compiler-warnings/moz.build
@@ -0,0 +1,20 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def AllowCompilerWarnings():
+ COMPILE_FLAGS["WARNINGS_AS_ERRORS"] = []
+
+
+@template
+def Library(name):
+ """Template for libraries."""
+ LIBRARY_NAME = name
+
+
+Library("dummy")
+
+UNIFIED_SOURCES += ["test1.c"]
+
+AllowCompilerWarnings()
diff --git a/python/mozbuild/mozbuild/test/frontend/data/allow-compiler-warnings/test1.c b/python/mozbuild/mozbuild/test/frontend/data/allow-compiler-warnings/test1.c
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/allow-compiler-warnings/test1.c
diff --git a/python/mozbuild/mozbuild/test/frontend/data/asflags/moz.build b/python/mozbuild/mozbuild/test/frontend/data/asflags/moz.build
new file mode 100644
index 0000000000..80f48a7d81
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/asflags/moz.build
@@ -0,0 +1,15 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def Library(name):
+ """Template for libraries."""
+ LIBRARY_NAME = name
+
+
+Library("dummy")
+
+SOURCES += ["test1.c", "test2.S"]
+
+ASFLAGS += ["-no-integrated-as"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/asflags/test1.c b/python/mozbuild/mozbuild/test/frontend/data/asflags/test1.c
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/asflags/test1.c
diff --git a/python/mozbuild/mozbuild/test/frontend/data/asflags/test2.S b/python/mozbuild/mozbuild/test/frontend/data/asflags/test2.S
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/asflags/test2.S
diff --git a/python/mozbuild/mozbuild/test/frontend/data/branding-files/bar.ico b/python/mozbuild/mozbuild/test/frontend/data/branding-files/bar.ico
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/branding-files/bar.ico
diff --git a/python/mozbuild/mozbuild/test/frontend/data/branding-files/baz.png b/python/mozbuild/mozbuild/test/frontend/data/branding-files/baz.png
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/branding-files/baz.png
diff --git a/python/mozbuild/mozbuild/test/frontend/data/branding-files/foo.xpm b/python/mozbuild/mozbuild/test/frontend/data/branding-files/foo.xpm
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/branding-files/foo.xpm
diff --git a/python/mozbuild/mozbuild/test/frontend/data/branding-files/moz.build b/python/mozbuild/mozbuild/test/frontend/data/branding-files/moz.build
new file mode 100644
index 0000000000..65f22d578b
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/branding-files/moz.build
@@ -0,0 +1,12 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+BRANDING_FILES += [
+ "bar.ico",
+ "baz.png",
+ "foo.xpm",
+]
+
+BRANDING_FILES.icons += [
+ "quux.icns",
+]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/branding-files/quux.icns b/python/mozbuild/mozbuild/test/frontend/data/branding-files/quux.icns
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/branding-files/quux.icns
diff --git a/python/mozbuild/mozbuild/test/frontend/data/compile-defines/moz.build b/python/mozbuild/mozbuild/test/frontend/data/compile-defines/moz.build
new file mode 100644
index 0000000000..65d71dae2b
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/compile-defines/moz.build
@@ -0,0 +1,16 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def Library(name):
+ """Template for libraries."""
+ LIBRARY_NAME = name
+
+
+Library("dummy")
+
+UNIFIED_SOURCES += ["test1.c"]
+
+DEFINES["MOZ_TEST_DEFINE"] = True
+LIBRARY_DEFINES["MOZ_LIBRARY_DEFINE"] = "MOZ_TEST"
diff --git a/python/mozbuild/mozbuild/test/frontend/data/compile-defines/test1.c b/python/mozbuild/mozbuild/test/frontend/data/compile-defines/test1.c
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/compile-defines/test1.c
diff --git a/python/mozbuild/mozbuild/test/frontend/data/compile-flags-field-validation/moz.build b/python/mozbuild/mozbuild/test/frontend/data/compile-flags-field-validation/moz.build
new file mode 100644
index 0000000000..70622bc4e1
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/compile-flags-field-validation/moz.build
@@ -0,0 +1,15 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def Library(name):
+ """Template for libraries."""
+ LIBRARY_NAME = name
+
+
+Library("dummy")
+
+COMPILE_FLAGS["STL_FLAGS"] = []
+
+UNIFIED_SOURCES += ["test1.c"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/compile-flags-field-validation/test1.c b/python/mozbuild/mozbuild/test/frontend/data/compile-flags-field-validation/test1.c
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/compile-flags-field-validation/test1.c
diff --git a/python/mozbuild/mozbuild/test/frontend/data/compile-flags-templates/moz.build b/python/mozbuild/mozbuild/test/frontend/data/compile-flags-templates/moz.build
new file mode 100644
index 0000000000..6e611fc598
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/compile-flags-templates/moz.build
@@ -0,0 +1,27 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def Library(name):
+ """Template for libraries."""
+ LIBRARY_NAME = name
+
+
+Library("dummy")
+
+
+@template
+def DisableStlWrapping():
+ COMPILE_FLAGS["STL"] = []
+
+
+@template
+def NoVisibilityFlags():
+ COMPILE_FLAGS["VISIBILITY"] = []
+
+
+UNIFIED_SOURCES += ["test1.c"]
+
+DisableStlWrapping()
+NoVisibilityFlags()
diff --git a/python/mozbuild/mozbuild/test/frontend/data/compile-flags-templates/test1.c b/python/mozbuild/mozbuild/test/frontend/data/compile-flags-templates/test1.c
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/compile-flags-templates/test1.c
diff --git a/python/mozbuild/mozbuild/test/frontend/data/compile-flags-type-validation/moz.build b/python/mozbuild/mozbuild/test/frontend/data/compile-flags-type-validation/moz.build
new file mode 100644
index 0000000000..31094736a7
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/compile-flags-type-validation/moz.build
@@ -0,0 +1,15 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def Library(name):
+ """Template for libraries."""
+ LIBRARY_NAME = name
+
+
+Library("dummy")
+
+COMPILE_FLAGS["STL"] = [None, 123]
+
+UNIFIED_SOURCES += ["test1.c"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/compile-flags-type-validation/test1.c b/python/mozbuild/mozbuild/test/frontend/data/compile-flags-type-validation/test1.c
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/compile-flags-type-validation/test1.c
diff --git a/python/mozbuild/mozbuild/test/frontend/data/compile-flags/moz.build b/python/mozbuild/mozbuild/test/frontend/data/compile-flags/moz.build
new file mode 100644
index 0000000000..0e6f75cfa1
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/compile-flags/moz.build
@@ -0,0 +1,22 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def Library(name):
+ """Template for libraries."""
+ LIBRARY_NAME = name
+
+
+Library("dummy")
+
+
+@template
+def DisableStlWrapping():
+ COMPILE_FLAGS["STL"] = []
+
+
+UNIFIED_SOURCES += ["test1.c"]
+
+CXXFLAGS += ["-funroll-loops", "-Wall"]
+CFLAGS += ["-Wall", "-funroll-loops"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/compile-flags/test1.c b/python/mozbuild/mozbuild/test/frontend/data/compile-flags/test1.c
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/compile-flags/test1.c
diff --git a/python/mozbuild/mozbuild/test/frontend/data/compile-includes/moz.build b/python/mozbuild/mozbuild/test/frontend/data/compile-includes/moz.build
new file mode 100644
index 0000000000..10c28e2833
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/compile-includes/moz.build
@@ -0,0 +1,15 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def Library(name):
+ """Template for libraries."""
+ LIBRARY_NAME = name
+
+
+Library("dummy")
+
+UNIFIED_SOURCES += ["test1.c"]
+
+LOCAL_INCLUDES += ["subdir"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/compile-includes/subdir/header.h b/python/mozbuild/mozbuild/test/frontend/data/compile-includes/subdir/header.h
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/compile-includes/subdir/header.h
diff --git a/python/mozbuild/mozbuild/test/frontend/data/compile-includes/test1.c b/python/mozbuild/mozbuild/test/frontend/data/compile-includes/test1.c
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/compile-includes/test1.c
diff --git a/python/mozbuild/mozbuild/test/frontend/data/config-file-substitution/moz.build b/python/mozbuild/mozbuild/test/frontend/data/config-file-substitution/moz.build
new file mode 100644
index 0000000000..f42dc0a517
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/config-file-substitution/moz.build
@@ -0,0 +1,6 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+CONFIGURE_SUBST_FILES += ["foo"]
+CONFIGURE_SUBST_FILES += ["bar"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/Cargo.toml b/python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/Cargo.toml
new file mode 100644
index 0000000000..b080d53b5a
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/Cargo.toml
@@ -0,0 +1,18 @@
+[package]
+name = "random-crate"
+version = "0.1.0"
+authors = [
+ "The Mozilla Project Developers",
+]
+
+[lib]
+crate-type = ["staticlib"]
+
+[dependencies]
+deep-crate = { version = "0.1.0", path = "the/depths" }
+
+[profile.dev]
+panic = "abort"
+
+[profile.release]
+panic = "abort"
diff --git a/python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/moz.build b/python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/moz.build
new file mode 100644
index 0000000000..de1967c519
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/moz.build
@@ -0,0 +1,19 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def Library(name):
+ """Template for libraries."""
+ LIBRARY_NAME = name
+
+
+@template
+def RustLibrary(name):
+ """Template for Rust libraries."""
+ Library(name)
+
+ IS_RUST_LIBRARY = True
+
+
+RustLibrary("random-crate")
diff --git a/python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/shallow/Cargo.toml b/python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/shallow/Cargo.toml
new file mode 100644
index 0000000000..e918f9228d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/shallow/Cargo.toml
@@ -0,0 +1,6 @@
+[package]
+name = "shallow-crate"
+version = "0.1.0"
+authors = [
+ "The Mozilla Project Developers",
+]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/the/depths/Cargo.toml b/python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/the/depths/Cargo.toml
new file mode 100644
index 0000000000..cebcb38ab7
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/crate-dependency-path-resolution/the/depths/Cargo.toml
@@ -0,0 +1,9 @@
+[package]
+name = "deep-crate"
+version = "0.1.0"
+authors = [
+ "The Mozilla Project Developers",
+]
+
+[dependencies]
+shallow-crate = { path = "../../shallow" }
diff --git a/python/mozbuild/mozbuild/test/frontend/data/defines/moz.build b/python/mozbuild/mozbuild/test/frontend/data/defines/moz.build
new file mode 100644
index 0000000000..6085619c58
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/defines/moz.build
@@ -0,0 +1,9 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+value = "xyz"
+DEFINES["FOO"] = True
+DEFINES["BAZ"] = '"abcd"'
+DEFINES["BAR"] = 7
+DEFINES["VALUE"] = value
+DEFINES["QUX"] = False
diff --git a/python/mozbuild/mozbuild/test/frontend/data/disable-compiler-warnings/moz.build b/python/mozbuild/mozbuild/test/frontend/data/disable-compiler-warnings/moz.build
new file mode 100644
index 0000000000..064fa09893
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/disable-compiler-warnings/moz.build
@@ -0,0 +1,20 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def DisableCompilerWarnings():
+ COMPILE_FLAGS["WARNINGS_CFLAGS"] = []
+
+
+@template
+def Library(name):
+ """Template for libraries."""
+ LIBRARY_NAME = name
+
+
+Library("dummy")
+
+UNIFIED_SOURCES += ["test1.c"]
+
+DisableCompilerWarnings()
diff --git a/python/mozbuild/mozbuild/test/frontend/data/disable-compiler-warnings/test1.c b/python/mozbuild/mozbuild/test/frontend/data/disable-compiler-warnings/test1.c
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/disable-compiler-warnings/test1.c
diff --git a/python/mozbuild/mozbuild/test/frontend/data/disable-stl-wrapping/moz.build b/python/mozbuild/mozbuild/test/frontend/data/disable-stl-wrapping/moz.build
new file mode 100644
index 0000000000..40cb3e7781
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/disable-stl-wrapping/moz.build
@@ -0,0 +1,21 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def Library(name):
+ """Template for libraries."""
+ LIBRARY_NAME = name
+
+
+Library("dummy")
+
+
+@template
+def DisableStlWrapping():
+ COMPILE_FLAGS["STL"] = []
+
+
+UNIFIED_SOURCES += ["test1.c"]
+
+DisableStlWrapping()
diff --git a/python/mozbuild/mozbuild/test/frontend/data/disable-stl-wrapping/test1.c b/python/mozbuild/mozbuild/test/frontend/data/disable-stl-wrapping/test1.c
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/disable-stl-wrapping/test1.c
diff --git a/python/mozbuild/mozbuild/test/frontend/data/dist-files-missing/install.rdf b/python/mozbuild/mozbuild/test/frontend/data/dist-files-missing/install.rdf
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/dist-files-missing/install.rdf
diff --git a/python/mozbuild/mozbuild/test/frontend/data/dist-files-missing/moz.build b/python/mozbuild/mozbuild/test/frontend/data/dist-files-missing/moz.build
new file mode 100644
index 0000000000..25961f149f
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/dist-files-missing/moz.build
@@ -0,0 +1,8 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+FINAL_TARGET_PP_FILES += [
+ "install.rdf",
+ "main.js",
+]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/dist-files/install.rdf b/python/mozbuild/mozbuild/test/frontend/data/dist-files/install.rdf
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/dist-files/install.rdf
diff --git a/python/mozbuild/mozbuild/test/frontend/data/dist-files/main.js b/python/mozbuild/mozbuild/test/frontend/data/dist-files/main.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/dist-files/main.js
diff --git a/python/mozbuild/mozbuild/test/frontend/data/dist-files/moz.build b/python/mozbuild/mozbuild/test/frontend/data/dist-files/moz.build
new file mode 100644
index 0000000000..25961f149f
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/dist-files/moz.build
@@ -0,0 +1,8 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+FINAL_TARGET_PP_FILES += [
+ "install.rdf",
+ "main.js",
+]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports-generated/foo.h b/python/mozbuild/mozbuild/test/frontend/data/exports-generated/foo.h
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/exports-generated/foo.h
diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports-generated/moz.build b/python/mozbuild/mozbuild/test/frontend/data/exports-generated/moz.build
new file mode 100644
index 0000000000..bd3507c97b
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/exports-generated/moz.build
@@ -0,0 +1,8 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+EXPORTS += ["foo.h"]
+EXPORTS.mozilla += ["mozilla1.h"]
+EXPORTS.mozilla += ["!mozilla2.h"]
+
+GENERATED_FILES += ["mozilla2.h"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports-generated/mozilla1.h b/python/mozbuild/mozbuild/test/frontend/data/exports-generated/mozilla1.h
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/exports-generated/mozilla1.h
diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports-missing-generated/foo.h b/python/mozbuild/mozbuild/test/frontend/data/exports-missing-generated/foo.h
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/exports-missing-generated/foo.h
diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports-missing-generated/moz.build b/python/mozbuild/mozbuild/test/frontend/data/exports-missing-generated/moz.build
new file mode 100644
index 0000000000..d81109d37d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/exports-missing-generated/moz.build
@@ -0,0 +1,5 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+EXPORTS += ["foo.h"]
+EXPORTS += ["!bar.h"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports-missing/foo.h b/python/mozbuild/mozbuild/test/frontend/data/exports-missing/foo.h
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/exports-missing/foo.h
diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports-missing/moz.build b/python/mozbuild/mozbuild/test/frontend/data/exports-missing/moz.build
new file mode 100644
index 0000000000..3f94fbdccd
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/exports-missing/moz.build
@@ -0,0 +1,6 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+EXPORTS += ["foo.h"]
+EXPORTS.mozilla += ["mozilla1.h"]
+EXPORTS.mozilla += ["mozilla2.h"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports-missing/mozilla1.h b/python/mozbuild/mozbuild/test/frontend/data/exports-missing/mozilla1.h
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/exports-missing/mozilla1.h
diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports/bar.h b/python/mozbuild/mozbuild/test/frontend/data/exports/bar.h
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/exports/bar.h
diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports/baz.h b/python/mozbuild/mozbuild/test/frontend/data/exports/baz.h
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/exports/baz.h
diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports/dom1.h b/python/mozbuild/mozbuild/test/frontend/data/exports/dom1.h
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/exports/dom1.h
diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports/dom2.h b/python/mozbuild/mozbuild/test/frontend/data/exports/dom2.h
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/exports/dom2.h
diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports/dom3.h b/python/mozbuild/mozbuild/test/frontend/data/exports/dom3.h
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/exports/dom3.h
diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports/foo.h b/python/mozbuild/mozbuild/test/frontend/data/exports/foo.h
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/exports/foo.h
diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports/gfx.h b/python/mozbuild/mozbuild/test/frontend/data/exports/gfx.h
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/exports/gfx.h
diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports/mem.h b/python/mozbuild/mozbuild/test/frontend/data/exports/mem.h
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/exports/mem.h
diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports/mem2.h b/python/mozbuild/mozbuild/test/frontend/data/exports/mem2.h
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/exports/mem2.h
diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports/moz.build b/python/mozbuild/mozbuild/test/frontend/data/exports/moz.build
new file mode 100644
index 0000000000..64253b1cf0
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/exports/moz.build
@@ -0,0 +1,13 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+EXPORTS += ["foo.h"]
+EXPORTS += ["bar.h", "baz.h"]
+EXPORTS.mozilla += ["mozilla1.h"]
+EXPORTS.mozilla += ["mozilla2.h"]
+EXPORTS.mozilla.dom += ["dom1.h"]
+EXPORTS.mozilla.dom += ["dom2.h", "dom3.h"]
+EXPORTS.mozilla.gfx += ["gfx.h"]
+EXPORTS.vpx = ["mem.h"]
+EXPORTS.vpx += ["mem2.h"]
+EXPORTS.nspr.private = ["pprio.h", "pprthred.h"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports/mozilla1.h b/python/mozbuild/mozbuild/test/frontend/data/exports/mozilla1.h
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/exports/mozilla1.h
diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports/mozilla2.h b/python/mozbuild/mozbuild/test/frontend/data/exports/mozilla2.h
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/exports/mozilla2.h
diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports/pprio.h b/python/mozbuild/mozbuild/test/frontend/data/exports/pprio.h
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/exports/pprio.h
diff --git a/python/mozbuild/mozbuild/test/frontend/data/exports/pprthred.h b/python/mozbuild/mozbuild/test/frontend/data/exports/pprthred.h
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/exports/pprthred.h
diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/bad-assignment/moz.build b/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/bad-assignment/moz.build
new file mode 100644
index 0000000000..693b6cc962
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/bad-assignment/moz.build
@@ -0,0 +1,2 @@
+with Files("*"):
+ BUG_COMPONENT = "bad value"
diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/different-matchers/moz.build b/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/different-matchers/moz.build
new file mode 100644
index 0000000000..ca5c74fd6a
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/different-matchers/moz.build
@@ -0,0 +1,4 @@
+with Files("*.jsm"):
+ BUG_COMPONENT = ("Firefox", "JS")
+with Files("*.cpp"):
+ BUG_COMPONENT = ("Firefox", "C++")
diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/final/moz.build b/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/final/moz.build
new file mode 100644
index 0000000000..9b1d05a9b0
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/final/moz.build
@@ -0,0 +1,3 @@
+with Files("**/Makefile.in"):
+ BUG_COMPONENT = ("Firefox Build System", "General")
+ FINAL = True
diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/final/subcomponent/moz.build b/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/final/subcomponent/moz.build
new file mode 100644
index 0000000000..9b21529812
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/final/subcomponent/moz.build
@@ -0,0 +1,2 @@
+with Files("**"):
+ BUG_COMPONENT = ("Another", "Component")
diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/moz.build b/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/moz.build
new file mode 100644
index 0000000000..4bbca3dc09
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/moz.build
@@ -0,0 +1,2 @@
+with Files("**"):
+ BUG_COMPONENT = ("default_product", "default_component")
diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/simple/moz.build b/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/simple/moz.build
new file mode 100644
index 0000000000..e8b99df68d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/simple/moz.build
@@ -0,0 +1,2 @@
+with Files("*"):
+ BUG_COMPONENT = ("Firefox Build System", "General")
diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/static/moz.build b/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/static/moz.build
new file mode 100644
index 0000000000..49acf29196
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/files-info/bug_component/static/moz.build
@@ -0,0 +1,5 @@
+with Files("foo"):
+ BUG_COMPONENT = ("FooProduct", "FooComponent")
+
+with Files("bar"):
+ BUG_COMPONENT = ("BarProduct", "BarComponent")
diff --git a/python/mozbuild/mozbuild/test/frontend/data/files-info/moz.build b/python/mozbuild/mozbuild/test/frontend/data/files-info/moz.build
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/files-info/moz.build
diff --git a/python/mozbuild/mozbuild/test/frontend/data/final-target-pp-files-non-srcdir/moz.build b/python/mozbuild/mozbuild/test/frontend/data/final-target-pp-files-non-srcdir/moz.build
new file mode 100644
index 0000000000..67e5fb5dce
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/final-target-pp-files-non-srcdir/moz.build
@@ -0,0 +1,7 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+FINAL_TARGET_PP_FILES += [
+ "!foo.js",
+]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-files-absolute-script/moz.build b/python/mozbuild/mozbuild/test/frontend/data/generated-files-absolute-script/moz.build
new file mode 100644
index 0000000000..860f025eac
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/generated-files-absolute-script/moz.build
@@ -0,0 +1,9 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+GENERATED_FILES += ["bar.c"]
+
+bar = GENERATED_FILES["bar.c"]
+bar.script = "/script.py:make_bar"
+bar.inputs = []
diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-files-absolute-script/script.py b/python/mozbuild/mozbuild/test/frontend/data/generated-files-absolute-script/script.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/generated-files-absolute-script/script.py
diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-files-force/moz.build b/python/mozbuild/mozbuild/test/frontend/data/generated-files-force/moz.build
new file mode 100644
index 0000000000..33f54a17e8
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/generated-files-force/moz.build
@@ -0,0 +1,11 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+GENERATED_FILES += [
+ "bar.c",
+ "foo.c",
+ ("xpidllex.py", "xpidlyacc.py"),
+]
+GENERATED_FILES["bar.c"].force = True
+GENERATED_FILES["foo.c"].force = False
diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-files-method-names/moz.build b/python/mozbuild/mozbuild/test/frontend/data/generated-files-method-names/moz.build
new file mode 100644
index 0000000000..298513383b
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/generated-files-method-names/moz.build
@@ -0,0 +1,13 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+GENERATED_FILES += ["bar.c", "foo.c"]
+
+bar = GENERATED_FILES["bar.c"]
+bar.script = "script.py:make_bar"
+bar.inputs = []
+
+foo = GENERATED_FILES["foo.c"]
+foo.script = "script.py"
+foo.inputs = []
diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-files-method-names/script.py b/python/mozbuild/mozbuild/test/frontend/data/generated-files-method-names/script.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/generated-files-method-names/script.py
diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-files-no-inputs/moz.build b/python/mozbuild/mozbuild/test/frontend/data/generated-files-no-inputs/moz.build
new file mode 100644
index 0000000000..50f703c696
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/generated-files-no-inputs/moz.build
@@ -0,0 +1,9 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+GENERATED_FILES += ["bar.c", "foo.c"]
+
+foo = GENERATED_FILES["foo.c"]
+foo.script = "script.py"
+foo.inputs = ["datafile"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-files-no-inputs/script.py b/python/mozbuild/mozbuild/test/frontend/data/generated-files-no-inputs/script.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/generated-files-no-inputs/script.py
diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-files-no-python-script/moz.build b/python/mozbuild/mozbuild/test/frontend/data/generated-files-no-python-script/moz.build
new file mode 100644
index 0000000000..ebdb7bfaf5
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/generated-files-no-python-script/moz.build
@@ -0,0 +1,8 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+GENERATED_FILES += ["bar.c", "foo.c"]
+
+bar = GENERATED_FILES["bar.c"]
+bar.script = "script.rb"
diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-files-no-python-script/script.rb b/python/mozbuild/mozbuild/test/frontend/data/generated-files-no-python-script/script.rb
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/generated-files-no-python-script/script.rb
diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-files-no-script/moz.build b/python/mozbuild/mozbuild/test/frontend/data/generated-files-no-script/moz.build
new file mode 100644
index 0000000000..258a0f2325
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/generated-files-no-script/moz.build
@@ -0,0 +1,8 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+GENERATED_FILES += ["bar.c", "foo.c"]
+
+bar = GENERATED_FILES["bar.c"]
+bar.script = "nonexistent-script.py"
diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-files/moz.build b/python/mozbuild/mozbuild/test/frontend/data/generated-files/moz.build
new file mode 100644
index 0000000000..97267c5d26
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/generated-files/moz.build
@@ -0,0 +1,9 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+GENERATED_FILES += [
+ "bar.c",
+ "foo.c",
+ ("xpidllex.py", "xpidlyacc.py"),
+]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-sources/a.cpp b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/a.cpp
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/a.cpp
diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-sources/b.cc b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/b.cc
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/b.cc
diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-sources/c.cxx b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/c.cxx
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/c.cxx
diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-sources/d.c b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/d.c
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/d.c
diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-sources/e.m b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/e.m
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/e.m
diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-sources/f.mm b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/f.mm
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/f.mm
diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-sources/g.S b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/g.S
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/g.S
diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-sources/h.s b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/h.s
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/h.s
diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-sources/i.asm b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/i.asm
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/i.asm
diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated-sources/moz.build b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/moz.build
new file mode 100644
index 0000000000..e305d9d32f
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/generated-sources/moz.build
@@ -0,0 +1,39 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def Library(name):
+ """Template for libraries."""
+ LIBRARY_NAME = name
+
+
+Library("dummy")
+
+SOURCES += [
+ "!a.cpp",
+ "!b.cc",
+ "!c.cxx",
+]
+
+SOURCES += [
+ "!d.c",
+]
+
+SOURCES += [
+ "!e.m",
+]
+
+SOURCES += [
+ "!f.mm",
+]
+
+SOURCES += [
+ "!g.S",
+]
+
+SOURCES += [
+ "!h.s",
+ "!i.asm",
+]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/generated_includes/moz.build b/python/mozbuild/mozbuild/test/frontend/data/generated_includes/moz.build
new file mode 100644
index 0000000000..31f9042c0a
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/generated_includes/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+LOCAL_INCLUDES += ["!/bar/baz", "!foo"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/host-compile-flags/moz.build b/python/mozbuild/mozbuild/test/frontend/data/host-compile-flags/moz.build
new file mode 100644
index 0000000000..4225234c65
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/host-compile-flags/moz.build
@@ -0,0 +1,22 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def HostLibrary(name):
+ """Template for libraries."""
+ HOST_LIBRARY_NAME = name
+
+
+HostLibrary("dummy")
+
+HOST_SOURCES += ["test1.c"]
+
+value = "xyz"
+HOST_DEFINES["FOO"] = True
+HOST_DEFINES["BAZ"] = '"abcd"'
+HOST_DEFINES["BAR"] = 7
+HOST_DEFINES["VALUE"] = value
+HOST_DEFINES["QUX"] = False
+
+HOST_CFLAGS += ["-funroll-loops", "-host-arg"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/host-compile-flags/test1.c b/python/mozbuild/mozbuild/test/frontend/data/host-compile-flags/test1.c
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/host-compile-flags/test1.c
diff --git a/python/mozbuild/mozbuild/test/frontend/data/host-program-paths/final-target/moz.build b/python/mozbuild/mozbuild/test/frontend/data/host-program-paths/final-target/moz.build
new file mode 100644
index 0000000000..a2136749dc
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/host-program-paths/final-target/moz.build
@@ -0,0 +1,5 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+FINAL_TARGET = "final/target"
+HostProgram("final-target")
diff --git a/python/mozbuild/mozbuild/test/frontend/data/host-program-paths/installed/moz.build b/python/mozbuild/mozbuild/test/frontend/data/host-program-paths/installed/moz.build
new file mode 100644
index 0000000000..0d10d35508
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/host-program-paths/installed/moz.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+HostProgram("dist-host-bin")
diff --git a/python/mozbuild/mozbuild/test/frontend/data/host-program-paths/moz.build b/python/mozbuild/mozbuild/test/frontend/data/host-program-paths/moz.build
new file mode 100644
index 0000000000..ef9175fa54
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/host-program-paths/moz.build
@@ -0,0 +1,14 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def HostProgram(name):
+ HOST_PROGRAM = name
+
+
+DIRS += [
+ "final-target",
+ "installed",
+ "not-installed",
+]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/host-program-paths/not-installed/moz.build b/python/mozbuild/mozbuild/test/frontend/data/host-program-paths/not-installed/moz.build
new file mode 100644
index 0000000000..4a8451bc8f
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/host-program-paths/not-installed/moz.build
@@ -0,0 +1,5 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIST_INSTALL = False
+HostProgram("not-installed")
diff --git a/python/mozbuild/mozbuild/test/frontend/data/host-rust-libraries/Cargo.toml b/python/mozbuild/mozbuild/test/frontend/data/host-rust-libraries/Cargo.toml
new file mode 100644
index 0000000000..aefcab3ddb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/host-rust-libraries/Cargo.toml
@@ -0,0 +1,15 @@
+[package]
+name = "host-lib"
+version = "0.1.0"
+authors = [
+ "The Mozilla Project Developers",
+]
+
+[lib]
+crate-type = ["staticlib"]
+
+[profile.dev]
+panic = "abort"
+
+[profile.release]
+panic = "abort"
diff --git a/python/mozbuild/mozbuild/test/frontend/data/host-rust-libraries/moz.build b/python/mozbuild/mozbuild/test/frontend/data/host-rust-libraries/moz.build
new file mode 100644
index 0000000000..37b6728ae3
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/host-rust-libraries/moz.build
@@ -0,0 +1,22 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def HostLibrary(name):
+ """Template for libraries."""
+ HOST_LIBRARY_NAME = name
+
+
+@template
+def HostRustLibrary(name, features=None):
+ """Template for Rust libraries."""
+ HostLibrary(name)
+
+ IS_RUST_LIBRARY = True
+
+ if features:
+ RUST_LIBRARY_FEATURES = features
+
+
+HostRustLibrary("host-lib")
diff --git a/python/mozbuild/mozbuild/test/frontend/data/host-rust-program-no-cargo-toml/moz.build b/python/mozbuild/mozbuild/test/frontend/data/host-rust-program-no-cargo-toml/moz.build
new file mode 100644
index 0000000000..c60e731d99
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/host-rust-program-no-cargo-toml/moz.build
@@ -0,0 +1 @@
+HOST_RUST_PROGRAMS += ["none"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/host-rust-program-nonexistent-name/Cargo.toml b/python/mozbuild/mozbuild/test/frontend/data/host-rust-program-nonexistent-name/Cargo.toml
new file mode 100644
index 0000000000..dee335937f
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/host-rust-program-nonexistent-name/Cargo.toml
@@ -0,0 +1,7 @@
+[package]
+authors = ["The Mozilla Project Developers"]
+name = "testing"
+version = "0.0.1"
+
+[[bin]]
+name = "some"
diff --git a/python/mozbuild/mozbuild/test/frontend/data/host-rust-program-nonexistent-name/moz.build b/python/mozbuild/mozbuild/test/frontend/data/host-rust-program-nonexistent-name/moz.build
new file mode 100644
index 0000000000..c60e731d99
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/host-rust-program-nonexistent-name/moz.build
@@ -0,0 +1 @@
+HOST_RUST_PROGRAMS += ["none"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/host-rust-programs/Cargo.toml b/python/mozbuild/mozbuild/test/frontend/data/host-rust-programs/Cargo.toml
new file mode 100644
index 0000000000..dee335937f
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/host-rust-programs/Cargo.toml
@@ -0,0 +1,7 @@
+[package]
+authors = ["The Mozilla Project Developers"]
+name = "testing"
+version = "0.0.1"
+
+[[bin]]
+name = "some"
diff --git a/python/mozbuild/mozbuild/test/frontend/data/host-rust-programs/moz.build b/python/mozbuild/mozbuild/test/frontend/data/host-rust-programs/moz.build
new file mode 100644
index 0000000000..2d75958b07
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/host-rust-programs/moz.build
@@ -0,0 +1 @@
+HOST_RUST_PROGRAMS += ["some"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/host-sources/a.cpp b/python/mozbuild/mozbuild/test/frontend/data/host-sources/a.cpp
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/host-sources/a.cpp
diff --git a/python/mozbuild/mozbuild/test/frontend/data/host-sources/b.cc b/python/mozbuild/mozbuild/test/frontend/data/host-sources/b.cc
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/host-sources/b.cc
diff --git a/python/mozbuild/mozbuild/test/frontend/data/host-sources/c.cxx b/python/mozbuild/mozbuild/test/frontend/data/host-sources/c.cxx
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/host-sources/c.cxx
diff --git a/python/mozbuild/mozbuild/test/frontend/data/host-sources/d.c b/python/mozbuild/mozbuild/test/frontend/data/host-sources/d.c
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/host-sources/d.c
diff --git a/python/mozbuild/mozbuild/test/frontend/data/host-sources/e.mm b/python/mozbuild/mozbuild/test/frontend/data/host-sources/e.mm
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/host-sources/e.mm
diff --git a/python/mozbuild/mozbuild/test/frontend/data/host-sources/f.mm b/python/mozbuild/mozbuild/test/frontend/data/host-sources/f.mm
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/host-sources/f.mm
diff --git a/python/mozbuild/mozbuild/test/frontend/data/host-sources/moz.build b/python/mozbuild/mozbuild/test/frontend/data/host-sources/moz.build
new file mode 100644
index 0000000000..b1f5b98039
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/host-sources/moz.build
@@ -0,0 +1,27 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def HostLibrary(name):
+ """Template for libraries."""
+ HOST_LIBRARY_NAME = name
+
+
+HostLibrary("dummy")
+
+HOST_SOURCES += [
+ "a.cpp",
+ "b.cc",
+ "c.cxx",
+]
+
+HOST_SOURCES += [
+ "d.c",
+]
+
+HOST_SOURCES += [
+ "e.mm",
+ "f.mm",
+]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/include-basic/included.build b/python/mozbuild/mozbuild/test/frontend/data/include-basic/included.build
new file mode 100644
index 0000000000..3532347e27
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/include-basic/included.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS += ["bar"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/include-basic/moz.build b/python/mozbuild/mozbuild/test/frontend/data/include-basic/moz.build
new file mode 100644
index 0000000000..b8e37c69ea
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/include-basic/moz.build
@@ -0,0 +1,7 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS = ["foo"]
+
+include("included.build")
diff --git a/python/mozbuild/mozbuild/test/frontend/data/include-file-stack/included-1.build b/python/mozbuild/mozbuild/test/frontend/data/include-file-stack/included-1.build
new file mode 100644
index 0000000000..b5dc2728c6
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/include-file-stack/included-1.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+include("included-2.build")
diff --git a/python/mozbuild/mozbuild/test/frontend/data/include-file-stack/included-2.build b/python/mozbuild/mozbuild/test/frontend/data/include-file-stack/included-2.build
new file mode 100644
index 0000000000..9bfc65481d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/include-file-stack/included-2.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+ILLEGAL = True
diff --git a/python/mozbuild/mozbuild/test/frontend/data/include-file-stack/moz.build b/python/mozbuild/mozbuild/test/frontend/data/include-file-stack/moz.build
new file mode 100644
index 0000000000..def43513c7
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/include-file-stack/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+include("included-1.build")
diff --git a/python/mozbuild/mozbuild/test/frontend/data/include-missing/moz.build b/python/mozbuild/mozbuild/test/frontend/data/include-missing/moz.build
new file mode 100644
index 0000000000..34129f7c93
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/include-missing/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+include("missing.build")
diff --git a/python/mozbuild/mozbuild/test/frontend/data/include-outside-topsrcdir/relative.build b/python/mozbuild/mozbuild/test/frontend/data/include-outside-topsrcdir/relative.build
new file mode 100644
index 0000000000..714a044436
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/include-outside-topsrcdir/relative.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+include("../moz.build")
diff --git a/python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/child/child.build b/python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/child/child.build
new file mode 100644
index 0000000000..ecae03ca7d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/child/child.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+include("../parent.build")
diff --git a/python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/child/child2.build b/python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/child/child2.build
new file mode 100644
index 0000000000..36210ba96b
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/child/child2.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+include("grandchild/grandchild.build")
diff --git a/python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/child/grandchild/grandchild.build b/python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/child/grandchild/grandchild.build
new file mode 100644
index 0000000000..76dcdb899f
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/child/grandchild/grandchild.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+include("../../parent.build")
diff --git a/python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/parent.build b/python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/parent.build
new file mode 100644
index 0000000000..eb1477d0df
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/include-relative-from-child/parent.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS = ["foo"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/include-topsrcdir-relative/moz.build b/python/mozbuild/mozbuild/test/frontend/data/include-topsrcdir-relative/moz.build
new file mode 100644
index 0000000000..879b832ed8
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/include-topsrcdir-relative/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+include("/sibling.build")
diff --git a/python/mozbuild/mozbuild/test/frontend/data/include-topsrcdir-relative/sibling.build b/python/mozbuild/mozbuild/test/frontend/data/include-topsrcdir-relative/sibling.build
new file mode 100644
index 0000000000..eb1477d0df
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/include-topsrcdir-relative/sibling.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS = ["foo"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/bar/moz.build b/python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/bar/moz.build
new file mode 100644
index 0000000000..568f361a54
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/bar/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
diff --git a/python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/foo/baz/moz.build b/python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/foo/baz/moz.build
new file mode 100644
index 0000000000..9c392681c7
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/foo/baz/moz.build
@@ -0,0 +1,7 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+XPIDL_MODULE = "baz"
diff --git a/python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/foo/moz.build b/python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/foo/moz.build
new file mode 100644
index 0000000000..f3368867ad
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/foo/moz.build
@@ -0,0 +1,7 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+DIRS += ["baz"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/moz.build b/python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/moz.build
new file mode 100644
index 0000000000..169e9d1554
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/inheriting-variables/moz.build
@@ -0,0 +1,10 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+XPIDL_MODULE = "foobar"
+export("XPIDL_MODULE")
+
+DIRS += ["foo", "bar"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/ipdl_sources/bar/moz.build b/python/mozbuild/mozbuild/test/frontend/data/ipdl_sources/bar/moz.build
new file mode 100644
index 0000000000..b49ec1216b
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/ipdl_sources/bar/moz.build
@@ -0,0 +1,14 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+PREPROCESSED_IPDL_SOURCES += [
+ "bar1.ipdl",
+]
+
+IPDL_SOURCES += [
+ "bar.ipdl",
+ "bar2.ipdlh",
+]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/ipdl_sources/foo/moz.build b/python/mozbuild/mozbuild/test/frontend/data/ipdl_sources/foo/moz.build
new file mode 100644
index 0000000000..c2e891572b
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/ipdl_sources/foo/moz.build
@@ -0,0 +1,14 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+PREPROCESSED_IPDL_SOURCES += [
+ "foo1.ipdl",
+]
+
+IPDL_SOURCES += [
+ "foo.ipdl",
+ "foo2.ipdlh",
+]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/ipdl_sources/moz.build b/python/mozbuild/mozbuild/test/frontend/data/ipdl_sources/moz.build
new file mode 100644
index 0000000000..9fe7699519
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/ipdl_sources/moz.build
@@ -0,0 +1,10 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+DIRS += [
+ "bar",
+ "foo",
+]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/jar-manifests-multiple-files/moz.build b/python/mozbuild/mozbuild/test/frontend/data/jar-manifests-multiple-files/moz.build
new file mode 100644
index 0000000000..fa61c94006
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/jar-manifests-multiple-files/moz.build
@@ -0,0 +1,7 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+JAR_MANIFESTS += ["jar.mn", "other.jar"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/jar-manifests/moz.build b/python/mozbuild/mozbuild/test/frontend/data/jar-manifests/moz.build
new file mode 100644
index 0000000000..d988c0ff9b
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/jar-manifests/moz.build
@@ -0,0 +1,7 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+JAR_MANIFESTS += ["jar.mn"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/library-defines/liba/moz.build b/python/mozbuild/mozbuild/test/frontend/data/library-defines/liba/moz.build
new file mode 100644
index 0000000000..65fcc6d08e
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/library-defines/liba/moz.build
@@ -0,0 +1,5 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+Library("liba")
+LIBRARY_DEFINES["IN_LIBA"] = True
diff --git a/python/mozbuild/mozbuild/test/frontend/data/library-defines/libb/moz.build b/python/mozbuild/mozbuild/test/frontend/data/library-defines/libb/moz.build
new file mode 100644
index 0000000000..f4cf7b31a0
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/library-defines/libb/moz.build
@@ -0,0 +1,7 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+Library("libb")
+FINAL_LIBRARY = "liba"
+LIBRARY_DEFINES["IN_LIBB"] = True
+USE_LIBS += ["libd"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/library-defines/libc/moz.build b/python/mozbuild/mozbuild/test/frontend/data/library-defines/libc/moz.build
new file mode 100644
index 0000000000..022a67559d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/library-defines/libc/moz.build
@@ -0,0 +1,5 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+Library("libc")
+FINAL_LIBRARY = "libb"
diff --git a/python/mozbuild/mozbuild/test/frontend/data/library-defines/libd/moz.build b/python/mozbuild/mozbuild/test/frontend/data/library-defines/libd/moz.build
new file mode 100644
index 0000000000..0bd94be069
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/library-defines/libd/moz.build
@@ -0,0 +1,5 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+Library("libd")
+FORCE_STATIC_LIB = True
diff --git a/python/mozbuild/mozbuild/test/frontend/data/library-defines/moz.build b/python/mozbuild/mozbuild/test/frontend/data/library-defines/moz.build
new file mode 100644
index 0000000000..dcc955cf28
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/library-defines/moz.build
@@ -0,0 +1,11 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def Library(name):
+ """Template for libraries."""
+ LIBRARY_NAME = name
+
+
+DIRS = ["liba", "libb", "libc", "libd"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/link-flags/moz.build b/python/mozbuild/mozbuild/test/frontend/data/link-flags/moz.build
new file mode 100644
index 0000000000..9e25efdcbf
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/link-flags/moz.build
@@ -0,0 +1,16 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def Library(name):
+ """Template for libraries."""
+ LIBRARY_NAME = name
+
+
+Library("dummy")
+
+UNIFIED_SOURCES += ["test1.c"]
+
+LDFLAGS += ["-Wl,-U_foo"]
+LDFLAGS += ["-framework Foo", "-x"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/link-flags/test1.c b/python/mozbuild/mozbuild/test/frontend/data/link-flags/test1.c
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/link-flags/test1.c
diff --git a/python/mozbuild/mozbuild/test/frontend/data/local_includes-filename/foo.h b/python/mozbuild/mozbuild/test/frontend/data/local_includes-filename/foo.h
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/local_includes-filename/foo.h
diff --git a/python/mozbuild/mozbuild/test/frontend/data/local_includes-filename/moz.build b/python/mozbuild/mozbuild/test/frontend/data/local_includes-filename/moz.build
new file mode 100644
index 0000000000..70259db75b
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/local_includes-filename/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+LOCAL_INCLUDES += ["foo.h"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/local_includes-invalid/objdir/moz.build b/python/mozbuild/mozbuild/test/frontend/data/local_includes-invalid/objdir/moz.build
new file mode 100644
index 0000000000..6dcbab537d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/local_includes-invalid/objdir/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+LOCAL_INCLUDES += ["!/"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/local_includes-invalid/srcdir/moz.build b/python/mozbuild/mozbuild/test/frontend/data/local_includes-invalid/srcdir/moz.build
new file mode 100644
index 0000000000..6d8f6cd2af
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/local_includes-invalid/srcdir/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+LOCAL_INCLUDES += ["/"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/local_includes/bar/baz/dummy_file_for_nonempty_directory b/python/mozbuild/mozbuild/test/frontend/data/local_includes/bar/baz/dummy_file_for_nonempty_directory
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/local_includes/bar/baz/dummy_file_for_nonempty_directory
diff --git a/python/mozbuild/mozbuild/test/frontend/data/local_includes/foo/dummy_file_for_nonempty_directory b/python/mozbuild/mozbuild/test/frontend/data/local_includes/foo/dummy_file_for_nonempty_directory
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/local_includes/foo/dummy_file_for_nonempty_directory
diff --git a/python/mozbuild/mozbuild/test/frontend/data/local_includes/moz.build b/python/mozbuild/mozbuild/test/frontend/data/local_includes/moz.build
new file mode 100644
index 0000000000..1c29ac2ea2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/local_includes/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+LOCAL_INCLUDES += ["/bar/baz", "foo"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/localized-files-from-generated/moz.build b/python/mozbuild/mozbuild/test/frontend/data/localized-files-from-generated/moz.build
new file mode 100644
index 0000000000..491a026419
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/localized-files-from-generated/moz.build
@@ -0,0 +1,6 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+LOCALIZED_GENERATED_FILES += ["abc.ini"]
+LOCALIZED_FILES += ["!abc.ini"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/localized-files-no-en-us/en-US/bar.ini b/python/mozbuild/mozbuild/test/frontend/data/localized-files-no-en-us/en-US/bar.ini
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/localized-files-no-en-us/en-US/bar.ini
diff --git a/python/mozbuild/mozbuild/test/frontend/data/localized-files-no-en-us/foo.js b/python/mozbuild/mozbuild/test/frontend/data/localized-files-no-en-us/foo.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/localized-files-no-en-us/foo.js
diff --git a/python/mozbuild/mozbuild/test/frontend/data/localized-files-no-en-us/inner/locales/en-US/bar.ini b/python/mozbuild/mozbuild/test/frontend/data/localized-files-no-en-us/inner/locales/en-US/bar.ini
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/localized-files-no-en-us/inner/locales/en-US/bar.ini
diff --git a/python/mozbuild/mozbuild/test/frontend/data/localized-files-no-en-us/moz.build b/python/mozbuild/mozbuild/test/frontend/data/localized-files-no-en-us/moz.build
new file mode 100644
index 0000000000..5c3efc8117
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/localized-files-no-en-us/moz.build
@@ -0,0 +1,9 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+LOCALIZED_FILES.foo += [
+ "en-US/bar.ini",
+ "foo.js",
+ "inner/locales/en-US/bar.ini",
+]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/localized-files-not-localized-generated/moz.build b/python/mozbuild/mozbuild/test/frontend/data/localized-files-not-localized-generated/moz.build
new file mode 100644
index 0000000000..678f503174
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/localized-files-not-localized-generated/moz.build
@@ -0,0 +1,6 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+GENERATED_FILES += ["abc.ini"]
+LOCALIZED_FILES += ["!abc.ini"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/localized-files/en-US/bar.ini b/python/mozbuild/mozbuild/test/frontend/data/localized-files/en-US/bar.ini
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/localized-files/en-US/bar.ini
diff --git a/python/mozbuild/mozbuild/test/frontend/data/localized-files/en-US/foo.js b/python/mozbuild/mozbuild/test/frontend/data/localized-files/en-US/foo.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/localized-files/en-US/foo.js
diff --git a/python/mozbuild/mozbuild/test/frontend/data/localized-files/moz.build b/python/mozbuild/mozbuild/test/frontend/data/localized-files/moz.build
new file mode 100644
index 0000000000..25a9030881
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/localized-files/moz.build
@@ -0,0 +1,9 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+LOCALIZED_FILES.foo += [
+ "en-US/bar.ini",
+ "en-US/code/*.js",
+ "en-US/foo.js",
+]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/localized-generated-files-final-target-files/moz.build b/python/mozbuild/mozbuild/test/frontend/data/localized-generated-files-final-target-files/moz.build
new file mode 100644
index 0000000000..48acff1447
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/localized-generated-files-final-target-files/moz.build
@@ -0,0 +1,6 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+LOCALIZED_GENERATED_FILES += ["abc.ini"]
+FINAL_TARGET_FILES += ["!abc.ini"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/localized-generated-files-force/moz.build b/python/mozbuild/mozbuild/test/frontend/data/localized-generated-files-force/moz.build
new file mode 100644
index 0000000000..73685545de
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/localized-generated-files-force/moz.build
@@ -0,0 +1,6 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+LOCALIZED_GENERATED_FILES += ["abc.ini", ("bar", "baz")]
+LOCALIZED_GENERATED_FILES["abc.ini"].force = True
diff --git a/python/mozbuild/mozbuild/test/frontend/data/localized-generated-files/moz.build b/python/mozbuild/mozbuild/test/frontend/data/localized-generated-files/moz.build
new file mode 100644
index 0000000000..cc306d5991
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/localized-generated-files/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+LOCALIZED_GENERATED_FILES += ["abc.ini", ("bar", "baz")]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/localized-pp-files/en-US/bar.ini b/python/mozbuild/mozbuild/test/frontend/data/localized-pp-files/en-US/bar.ini
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/localized-pp-files/en-US/bar.ini
diff --git a/python/mozbuild/mozbuild/test/frontend/data/localized-pp-files/en-US/foo.js b/python/mozbuild/mozbuild/test/frontend/data/localized-pp-files/en-US/foo.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/localized-pp-files/en-US/foo.js
diff --git a/python/mozbuild/mozbuild/test/frontend/data/localized-pp-files/moz.build b/python/mozbuild/mozbuild/test/frontend/data/localized-pp-files/moz.build
new file mode 100644
index 0000000000..b2916a1226
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/localized-pp-files/moz.build
@@ -0,0 +1,8 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+LOCALIZED_PP_FILES.foo += [
+ "en-US/bar.ini",
+ "en-US/foo.js",
+]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/missing-local-includes/moz.build b/python/mozbuild/mozbuild/test/frontend/data/missing-local-includes/moz.build
new file mode 100644
index 0000000000..1c29ac2ea2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/missing-local-includes/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+LOCAL_INCLUDES += ["/bar/baz", "foo"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/missing-xpidl/moz.build b/python/mozbuild/mozbuild/test/frontend/data/missing-xpidl/moz.build
new file mode 100644
index 0000000000..e3a2a69d07
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/missing-xpidl/moz.build
@@ -0,0 +1,6 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+XPIDL_MODULE = "my_module"
+XPIDL_SOURCES = ["nonexistant.idl"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/moz.build b/python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/moz.build
new file mode 100644
index 0000000000..7956580d14
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/moz.build
@@ -0,0 +1,29 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def Library(name):
+ """Template for libraries."""
+ LIBRARY_NAME = name
+
+
+@template
+def RustLibrary(name):
+ """Template for Rust libraries."""
+ Library(name)
+
+ IS_RUST_LIBRARY = True
+
+
+Library("test")
+
+DIRS += [
+ "rust1",
+ "rust2",
+]
+
+USE_LIBS += [
+ "rust1",
+ "rust2",
+]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust1/Cargo.toml b/python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust1/Cargo.toml
new file mode 100644
index 0000000000..56273d5cf7
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust1/Cargo.toml
@@ -0,0 +1,15 @@
+[package]
+name = "rust1"
+version = "0.1.0"
+authors = [
+ "The Mozilla Project Developers",
+]
+
+[lib]
+crate-type = ["staticlib"]
+
+[profile.dev]
+panic = "abort"
+
+[profile.release]
+panic = "abort"
diff --git a/python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust1/moz.build b/python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust1/moz.build
new file mode 100644
index 0000000000..0cc01e1e24
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust1/moz.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+RustLibrary("rust1")
diff --git a/python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust2/Cargo.toml b/python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust2/Cargo.toml
new file mode 100644
index 0000000000..9c557f6c08
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust2/Cargo.toml
@@ -0,0 +1,15 @@
+[package]
+name = "rust2"
+version = "0.1.0"
+authors = [
+ "The Mozilla Project Developers",
+]
+
+[lib]
+crate-type = ["staticlib"]
+
+[profile.dev]
+panic = "abort"
+
+[profile.release]
+panic = "abort"
diff --git a/python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust2/moz.build b/python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust2/moz.build
new file mode 100644
index 0000000000..4ec4ea9c79
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/multiple-rust-libraries/rust2/moz.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+RustLibrary("rust2")
diff --git a/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/1/Test.c b/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/1/Test.c
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/1/Test.c
diff --git a/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/1/Test.cpp b/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/1/Test.cpp
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/1/Test.cpp
diff --git a/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/1/moz.build b/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/1/moz.build
new file mode 100644
index 0000000000..44610a781c
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/1/moz.build
@@ -0,0 +1,4 @@
+SOURCES += [
+ "Test.c",
+ "Test.cpp",
+]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/2/Test.cpp b/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/2/Test.cpp
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/2/Test.cpp
diff --git a/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/2/moz.build b/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/2/moz.build
new file mode 100644
index 0000000000..b1064ae0c0
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/2/moz.build
@@ -0,0 +1,4 @@
+SOURCES += [
+ "subdir/Test.cpp",
+ "Test.cpp",
+]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/2/subdir/Test.cpp b/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/2/subdir/Test.cpp
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/2/subdir/Test.cpp
diff --git a/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/3/Test.c b/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/3/Test.c
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/3/Test.c
diff --git a/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/3/Test.cpp b/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/3/Test.cpp
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/3/Test.cpp
diff --git a/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/3/moz.build b/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/3/moz.build
new file mode 100644
index 0000000000..a225907cae
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/3/moz.build
@@ -0,0 +1,7 @@
+SOURCES += [
+ "Test.c",
+]
+
+UNIFIED_SOURCES += [
+ "Test.cpp",
+]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/4/Test.c b/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/4/Test.c
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/4/Test.c
diff --git a/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/4/Test.cpp b/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/4/Test.cpp
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/4/Test.cpp
diff --git a/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/4/moz.build b/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/4/moz.build
new file mode 100644
index 0000000000..ea5da28d88
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/object-conflicts/4/moz.build
@@ -0,0 +1,4 @@
+UNIFIED_SOURCES += [
+ "Test.c",
+ "Test.cpp",
+]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/program-paths/dist-bin/moz.build b/python/mozbuild/mozbuild/test/frontend/data/program-paths/dist-bin/moz.build
new file mode 100644
index 0000000000..d8b952c014
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/program-paths/dist-bin/moz.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+Program("dist-bin")
diff --git a/python/mozbuild/mozbuild/test/frontend/data/program-paths/dist-subdir/moz.build b/python/mozbuild/mozbuild/test/frontend/data/program-paths/dist-subdir/moz.build
new file mode 100644
index 0000000000..fc2f664c01
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/program-paths/dist-subdir/moz.build
@@ -0,0 +1,5 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIST_SUBDIR = "foo"
+Program("dist-subdir")
diff --git a/python/mozbuild/mozbuild/test/frontend/data/program-paths/final-target/moz.build b/python/mozbuild/mozbuild/test/frontend/data/program-paths/final-target/moz.build
new file mode 100644
index 0000000000..a0d5805262
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/program-paths/final-target/moz.build
@@ -0,0 +1,5 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+FINAL_TARGET = "final/target"
+Program("final-target")
diff --git a/python/mozbuild/mozbuild/test/frontend/data/program-paths/moz.build b/python/mozbuild/mozbuild/test/frontend/data/program-paths/moz.build
new file mode 100644
index 0000000000..d1d087fd45
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/program-paths/moz.build
@@ -0,0 +1,15 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def Program(name):
+ PROGRAM = name
+
+
+DIRS += [
+ "dist-bin",
+ "dist-subdir",
+ "final-target",
+ "not-installed",
+]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/program-paths/not-installed/moz.build b/python/mozbuild/mozbuild/test/frontend/data/program-paths/not-installed/moz.build
new file mode 100644
index 0000000000..c725ab7326
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/program-paths/not-installed/moz.build
@@ -0,0 +1,5 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIST_INSTALL = False
+Program("not-installed")
diff --git a/python/mozbuild/mozbuild/test/frontend/data/program/moz.build b/python/mozbuild/mozbuild/test/frontend/data/program/moz.build
new file mode 100644
index 0000000000..b3f7062732
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/program/moz.build
@@ -0,0 +1,18 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def Program(name):
+ PROGRAM = name
+
+
+@template
+def SimplePrograms(names, ext=".cpp"):
+ SIMPLE_PROGRAMS += names
+ SOURCES += ["%s%s" % (name, ext) for name in names]
+
+
+Program("test_program")
+
+SimplePrograms(["test_program1", "test_program2"])
diff --git a/python/mozbuild/mozbuild/test/frontend/data/program/test_program1.cpp b/python/mozbuild/mozbuild/test/frontend/data/program/test_program1.cpp
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/program/test_program1.cpp
diff --git a/python/mozbuild/mozbuild/test/frontend/data/program/test_program2.cpp b/python/mozbuild/mozbuild/test/frontend/data/program/test_program2.cpp
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/program/test_program2.cpp
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-error-bad-dir/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-error-bad-dir/moz.build
new file mode 100644
index 0000000000..68581574b1
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-error-bad-dir/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS = ["foo"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-error-basic/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-error-basic/moz.build
new file mode 100644
index 0000000000..0a91c4692b
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-error-basic/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+ILLEGAL = True
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-error-empty-list/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-error-empty-list/moz.build
new file mode 100644
index 0000000000..4dfba1c60f
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-error-empty-list/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS = []
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-error-error-func/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-error-error-func/moz.build
new file mode 100644
index 0000000000..d0f35c4c1d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-error-error-func/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+error("Some error.")
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-error-included-from/child.build b/python/mozbuild/mozbuild/test/frontend/data/reader-error-included-from/child.build
new file mode 100644
index 0000000000..9bfc65481d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-error-included-from/child.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+ILLEGAL = True
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-error-included-from/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-error-included-from/moz.build
new file mode 100644
index 0000000000..603f3a7204
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-error-included-from/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+include("child.build")
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-error-missing-include/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-error-missing-include/moz.build
new file mode 100644
index 0000000000..34129f7c93
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-error-missing-include/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+include("missing.build")
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-error-outside-topsrcdir/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-error-outside-topsrcdir/moz.build
new file mode 100644
index 0000000000..040c1f5df1
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-error-outside-topsrcdir/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+include("../include-basic/moz.build")
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-error-read-unknown-global/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-error-read-unknown-global/moz.build
new file mode 100644
index 0000000000..6fc10f766a
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-error-read-unknown-global/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+l = FOO
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-error-repeated-dir/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-error-repeated-dir/moz.build
new file mode 100644
index 0000000000..91845b337f
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-error-repeated-dir/moz.build
@@ -0,0 +1,7 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS = ["foo"]
+
+DIRS += ["foo"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-error-script-error/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-error-script-error/moz.build
new file mode 100644
index 0000000000..a91d38b415
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-error-script-error/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+foo = True + None
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-error-syntax/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-error-syntax/moz.build
new file mode 100644
index 0000000000..70a0d2c066
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-error-syntax/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+foo =
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-error-write-bad-value/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-error-write-bad-value/moz.build
new file mode 100644
index 0000000000..2e8194b223
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-error-write-bad-value/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS = "dir"
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-error-write-unknown-global/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-error-write-unknown-global/moz.build
new file mode 100644
index 0000000000..5675031753
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-error-write-unknown-global/moz.build
@@ -0,0 +1,7 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS = ["dir1", "dir2"]
+
+FOO = "bar"
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/a/file b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/a/file
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/a/file
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/a/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/a/moz.build
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/a/moz.build
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/b/file b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/b/file
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/b/file
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/b/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/b/moz.build
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/b/moz.build
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/moz.build
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/every-level/moz.build
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/file1 b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/file1
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/file1
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/file2 b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/file2
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/file2
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/moz.build
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/moz.build
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/no-intermediate-moz-build/child/file b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/no-intermediate-moz-build/child/file
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/no-intermediate-moz-build/child/file
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/no-intermediate-moz-build/child/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/no-intermediate-moz-build/child/moz.build
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/no-intermediate-moz-build/child/moz.build
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/parent-is-far/dir1/dir2/dir3/file b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/parent-is-far/dir1/dir2/dir3/file
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/parent-is-far/dir1/dir2/dir3/file
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/parent-is-far/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/parent-is-far/moz.build
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d1/parent-is-far/moz.build
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/dir1/file b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/dir1/file
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/dir1/file
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/dir1/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/dir1/moz.build
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/dir1/moz.build
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/dir2/file b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/dir2/file
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/dir2/file
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/dir2/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/dir2/moz.build
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/dir2/moz.build
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/moz.build
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/d2/moz.build
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/file b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/file
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/file
diff --git a/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/moz.build b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/moz.build
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/reader-relevant-mozbuild/moz.build
diff --git a/python/mozbuild/mozbuild/test/frontend/data/resolved-flags-error/moz.build b/python/mozbuild/mozbuild/test/frontend/data/resolved-flags-error/moz.build
new file mode 100644
index 0000000000..d4b9a3075d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/resolved-flags-error/moz.build
@@ -0,0 +1,17 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def Library(name):
+ """Template for libraries."""
+ LIBRARY_NAME = name
+
+
+Library("dummy")
+
+UNIFIED_SOURCES += ["test1.c"]
+
+DEFINES["MOZ_TEST_DEFINE"] = True
+LIBRARY_DEFINES["MOZ_LIBRARY_DEFINE"] = "MOZ_TEST"
+COMPILE_FLAGS["DEFINES"] = ["-DFOO"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/resolved-flags-error/test1.c b/python/mozbuild/mozbuild/test/frontend/data/resolved-flags-error/test1.c
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/resolved-flags-error/test1.c
diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-library-dash-folding/Cargo.toml b/python/mozbuild/mozbuild/test/frontend/data/rust-library-dash-folding/Cargo.toml
new file mode 100644
index 0000000000..fbb4ae087d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/rust-library-dash-folding/Cargo.toml
@@ -0,0 +1,15 @@
+[package]
+name = "random-crate"
+version = "0.1.0"
+authors = [
+ "The Mozilla Project Developers",
+]
+
+[lib]
+crate-type = ["staticlib"]
+
+[profile.dev]
+panic = "abort"
+
+[profile.release]
+panic = "abort"
diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-library-dash-folding/moz.build b/python/mozbuild/mozbuild/test/frontend/data/rust-library-dash-folding/moz.build
new file mode 100644
index 0000000000..de1967c519
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/rust-library-dash-folding/moz.build
@@ -0,0 +1,19 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def Library(name):
+ """Template for libraries."""
+ LIBRARY_NAME = name
+
+
+@template
+def RustLibrary(name):
+ """Template for Rust libraries."""
+ Library(name)
+
+ IS_RUST_LIBRARY = True
+
+
+RustLibrary("random-crate")
diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-library-duplicate-features/Cargo.toml b/python/mozbuild/mozbuild/test/frontend/data/rust-library-duplicate-features/Cargo.toml
new file mode 100644
index 0000000000..fbb4ae087d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/rust-library-duplicate-features/Cargo.toml
@@ -0,0 +1,15 @@
+[package]
+name = "random-crate"
+version = "0.1.0"
+authors = [
+ "The Mozilla Project Developers",
+]
+
+[lib]
+crate-type = ["staticlib"]
+
+[profile.dev]
+panic = "abort"
+
+[profile.release]
+panic = "abort"
diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-library-duplicate-features/moz.build b/python/mozbuild/mozbuild/test/frontend/data/rust-library-duplicate-features/moz.build
new file mode 100644
index 0000000000..ccd8ede3c0
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/rust-library-duplicate-features/moz.build
@@ -0,0 +1,20 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def Library(name):
+ """Template for libraries."""
+ LIBRARY_NAME = name
+
+
+@template
+def RustLibrary(name, features):
+ """Template for Rust libraries."""
+ Library(name)
+
+ IS_RUST_LIBRARY = True
+ RUST_LIBRARY_FEATURES = features
+
+
+RustLibrary("random-crate", ["musthave", "cantlivewithout", "musthave"])
diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-library-features/Cargo.toml b/python/mozbuild/mozbuild/test/frontend/data/rust-library-features/Cargo.toml
new file mode 100644
index 0000000000..fbb4ae087d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/rust-library-features/Cargo.toml
@@ -0,0 +1,15 @@
+[package]
+name = "random-crate"
+version = "0.1.0"
+authors = [
+ "The Mozilla Project Developers",
+]
+
+[lib]
+crate-type = ["staticlib"]
+
+[profile.dev]
+panic = "abort"
+
+[profile.release]
+panic = "abort"
diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-library-features/moz.build b/python/mozbuild/mozbuild/test/frontend/data/rust-library-features/moz.build
new file mode 100644
index 0000000000..9d88bdea08
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/rust-library-features/moz.build
@@ -0,0 +1,20 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def Library(name):
+ """Template for libraries."""
+ LIBRARY_NAME = name
+
+
+@template
+def RustLibrary(name, features):
+ """Template for Rust libraries."""
+ Library(name)
+
+ IS_RUST_LIBRARY = True
+ RUST_LIBRARY_FEATURES = features
+
+
+RustLibrary("random-crate", ["musthave", "cantlivewithout"])
diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-library-invalid-crate-type/Cargo.toml b/python/mozbuild/mozbuild/test/frontend/data/rust-library-invalid-crate-type/Cargo.toml
new file mode 100644
index 0000000000..3572550b76
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/rust-library-invalid-crate-type/Cargo.toml
@@ -0,0 +1,15 @@
+[package]
+name = "random-crate"
+version = "0.1.0"
+authors = [
+ "The Mozilla Project Developers",
+]
+
+[lib]
+crate-type = ["dylib"]
+
+[profile.dev]
+panic = "abort"
+
+[profile.release]
+panic = "abort"
diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-library-invalid-crate-type/moz.build b/python/mozbuild/mozbuild/test/frontend/data/rust-library-invalid-crate-type/moz.build
new file mode 100644
index 0000000000..de1967c519
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/rust-library-invalid-crate-type/moz.build
@@ -0,0 +1,19 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def Library(name):
+ """Template for libraries."""
+ LIBRARY_NAME = name
+
+
+@template
+def RustLibrary(name):
+ """Template for Rust libraries."""
+ Library(name)
+
+ IS_RUST_LIBRARY = True
+
+
+RustLibrary("random-crate")
diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-library-name-mismatch/Cargo.toml b/python/mozbuild/mozbuild/test/frontend/data/rust-library-name-mismatch/Cargo.toml
new file mode 100644
index 0000000000..9e05fe5cb1
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/rust-library-name-mismatch/Cargo.toml
@@ -0,0 +1,12 @@
+[package]
+name = "deterministic-crate"
+version = "0.1.0"
+authors = [
+ "The Mozilla Project Developers",
+]
+
+[profile.dev]
+panic = "abort"
+
+[profile.release]
+panic = "abort"
diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-library-name-mismatch/moz.build b/python/mozbuild/mozbuild/test/frontend/data/rust-library-name-mismatch/moz.build
new file mode 100644
index 0000000000..de1967c519
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/rust-library-name-mismatch/moz.build
@@ -0,0 +1,19 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def Library(name):
+ """Template for libraries."""
+ LIBRARY_NAME = name
+
+
+@template
+def RustLibrary(name):
+ """Template for Rust libraries."""
+ Library(name)
+
+ IS_RUST_LIBRARY = True
+
+
+RustLibrary("random-crate")
diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-library-no-cargo-toml/moz.build b/python/mozbuild/mozbuild/test/frontend/data/rust-library-no-cargo-toml/moz.build
new file mode 100644
index 0000000000..de1967c519
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/rust-library-no-cargo-toml/moz.build
@@ -0,0 +1,19 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def Library(name):
+ """Template for libraries."""
+ LIBRARY_NAME = name
+
+
+@template
+def RustLibrary(name):
+ """Template for Rust libraries."""
+ Library(name)
+
+ IS_RUST_LIBRARY = True
+
+
+RustLibrary("random-crate")
diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-library-no-lib-section/Cargo.toml b/python/mozbuild/mozbuild/test/frontend/data/rust-library-no-lib-section/Cargo.toml
new file mode 100644
index 0000000000..0934afcc4f
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/rust-library-no-lib-section/Cargo.toml
@@ -0,0 +1,12 @@
+[package]
+name = "random-crate"
+version = "0.1.0"
+authors = [
+ "The Mozilla Project Developers",
+]
+
+[profile.dev]
+panic = "abort"
+
+[profile.release]
+panic = "abort"
diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-library-no-lib-section/moz.build b/python/mozbuild/mozbuild/test/frontend/data/rust-library-no-lib-section/moz.build
new file mode 100644
index 0000000000..de1967c519
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/rust-library-no-lib-section/moz.build
@@ -0,0 +1,19 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def Library(name):
+ """Template for libraries."""
+ LIBRARY_NAME = name
+
+
+@template
+def RustLibrary(name):
+ """Template for Rust libraries."""
+ Library(name)
+
+ IS_RUST_LIBRARY = True
+
+
+RustLibrary("random-crate")
diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-program-no-cargo-toml/moz.build b/python/mozbuild/mozbuild/test/frontend/data/rust-program-no-cargo-toml/moz.build
new file mode 100644
index 0000000000..56601854f9
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/rust-program-no-cargo-toml/moz.build
@@ -0,0 +1 @@
+RUST_PROGRAMS += ["none"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-program-nonexistent-name/Cargo.toml b/python/mozbuild/mozbuild/test/frontend/data/rust-program-nonexistent-name/Cargo.toml
new file mode 100644
index 0000000000..dee335937f
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/rust-program-nonexistent-name/Cargo.toml
@@ -0,0 +1,7 @@
+[package]
+authors = ["The Mozilla Project Developers"]
+name = "testing"
+version = "0.0.1"
+
+[[bin]]
+name = "some"
diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-program-nonexistent-name/moz.build b/python/mozbuild/mozbuild/test/frontend/data/rust-program-nonexistent-name/moz.build
new file mode 100644
index 0000000000..56601854f9
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/rust-program-nonexistent-name/moz.build
@@ -0,0 +1 @@
+RUST_PROGRAMS += ["none"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-programs/Cargo.toml b/python/mozbuild/mozbuild/test/frontend/data/rust-programs/Cargo.toml
new file mode 100644
index 0000000000..dee335937f
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/rust-programs/Cargo.toml
@@ -0,0 +1,7 @@
+[package]
+authors = ["The Mozilla Project Developers"]
+name = "testing"
+version = "0.0.1"
+
+[[bin]]
+name = "some"
diff --git a/python/mozbuild/mozbuild/test/frontend/data/rust-programs/moz.build b/python/mozbuild/mozbuild/test/frontend/data/rust-programs/moz.build
new file mode 100644
index 0000000000..80dc15120a
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/rust-programs/moz.build
@@ -0,0 +1 @@
+RUST_PROGRAMS += ["some"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/schedules/moz.build b/python/mozbuild/mozbuild/test/frontend/data/schedules/moz.build
new file mode 100644
index 0000000000..3f4f450d37
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/schedules/moz.build
@@ -0,0 +1,19 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+with Files("*.win"):
+ SCHEDULES.exclusive = ["windows"]
+
+with Files("*.osx"):
+ SCHEDULES.exclusive = ["macosx"]
+
+with Files("win.and.osx"):
+ # this conflicts with the previous clause and will cause an error
+ # when read
+ SCHEDULES.exclusive = ["macosx", "windows"]
+
+with Files("subd/**.py"):
+ SCHEDULES.inclusive += ["py-lint"]
+
+with Files("**/*.js"):
+ SCHEDULES.inclusive += ["js-lint"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/schedules/subd/moz.build b/python/mozbuild/mozbuild/test/frontend/data/schedules/subd/moz.build
new file mode 100644
index 0000000000..b9c3bf6c74
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/schedules/subd/moz.build
@@ -0,0 +1,5 @@
+with Files("yaml.py"):
+ SCHEDULES.inclusive += ["yaml-lint"]
+
+with Files("win.js"):
+ SCHEDULES.exclusive = ["windows"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/d.c b/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/d.c
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/d.c
diff --git a/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/e.m b/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/e.m
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/e.m
diff --git a/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/g.S b/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/g.S
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/g.S
diff --git a/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/h.s b/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/h.s
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/h.s
diff --git a/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/i.asm b/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/i.asm
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/i.asm
diff --git a/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/moz.build b/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/moz.build
new file mode 100644
index 0000000000..29abd6de5d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/sources-just-c/moz.build
@@ -0,0 +1,29 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def Library(name):
+ """Template for libraries."""
+ LIBRARY_NAME = name
+
+
+Library("dummy")
+
+SOURCES += [
+ "d.c",
+]
+
+SOURCES += [
+ "e.m",
+]
+
+SOURCES += [
+ "g.S",
+]
+
+SOURCES += [
+ "h.s",
+ "i.asm",
+]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/sources/a.cpp b/python/mozbuild/mozbuild/test/frontend/data/sources/a.cpp
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/sources/a.cpp
diff --git a/python/mozbuild/mozbuild/test/frontend/data/sources/b.cc b/python/mozbuild/mozbuild/test/frontend/data/sources/b.cc
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/sources/b.cc
diff --git a/python/mozbuild/mozbuild/test/frontend/data/sources/c.cxx b/python/mozbuild/mozbuild/test/frontend/data/sources/c.cxx
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/sources/c.cxx
diff --git a/python/mozbuild/mozbuild/test/frontend/data/sources/d.c b/python/mozbuild/mozbuild/test/frontend/data/sources/d.c
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/sources/d.c
diff --git a/python/mozbuild/mozbuild/test/frontend/data/sources/e.m b/python/mozbuild/mozbuild/test/frontend/data/sources/e.m
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/sources/e.m
diff --git a/python/mozbuild/mozbuild/test/frontend/data/sources/f.mm b/python/mozbuild/mozbuild/test/frontend/data/sources/f.mm
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/sources/f.mm
diff --git a/python/mozbuild/mozbuild/test/frontend/data/sources/g.S b/python/mozbuild/mozbuild/test/frontend/data/sources/g.S
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/sources/g.S
diff --git a/python/mozbuild/mozbuild/test/frontend/data/sources/h.s b/python/mozbuild/mozbuild/test/frontend/data/sources/h.s
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/sources/h.s
diff --git a/python/mozbuild/mozbuild/test/frontend/data/sources/i.asm b/python/mozbuild/mozbuild/test/frontend/data/sources/i.asm
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/sources/i.asm
diff --git a/python/mozbuild/mozbuild/test/frontend/data/sources/moz.build b/python/mozbuild/mozbuild/test/frontend/data/sources/moz.build
new file mode 100644
index 0000000000..e25f865f72
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/sources/moz.build
@@ -0,0 +1,39 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def Library(name):
+ """Template for libraries."""
+ LIBRARY_NAME = name
+
+
+Library("dummy")
+
+SOURCES += [
+ "a.cpp",
+ "b.cc",
+ "c.cxx",
+]
+
+SOURCES += [
+ "d.c",
+]
+
+SOURCES += [
+ "e.m",
+]
+
+SOURCES += [
+ "f.mm",
+]
+
+SOURCES += [
+ "g.S",
+]
+
+SOURCES += [
+ "h.s",
+ "i.asm",
+]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/templates/templates.mozbuild b/python/mozbuild/mozbuild/test/frontend/data/templates/templates.mozbuild
new file mode 100644
index 0000000000..290104bc72
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/templates/templates.mozbuild
@@ -0,0 +1,21 @@
+@template
+def Template(foo, bar=[]):
+ SOURCES += foo
+ DIRS += bar
+
+@template
+def TemplateError(foo):
+ ILLEGAL = foo
+
+@template
+def TemplateGlobalVariable():
+ SOURCES += illegal
+
+@template
+def TemplateGlobalUPPERVariable():
+ SOURCES += DIRS
+
+@template
+def TemplateInherit(foo):
+ USE_LIBS += ['foo']
+ Template(foo)
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-harness-files-root/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-harness-files-root/moz.build
new file mode 100644
index 0000000000..d7f6377d0d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-harness-files-root/moz.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+TEST_HARNESS_FILES += ["foo.py"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-harness-files/mochitest.ini b/python/mozbuild/mozbuild/test/frontend/data/test-harness-files/mochitest.ini
new file mode 100644
index 0000000000..d87114ac7d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-harness-files/mochitest.ini
@@ -0,0 +1 @@
+# dummy file so the existence checks for TEST_HARNESS_FILES succeed
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-harness-files/mochitest.py b/python/mozbuild/mozbuild/test/frontend/data/test-harness-files/mochitest.py
new file mode 100644
index 0000000000..d87114ac7d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-harness-files/mochitest.py
@@ -0,0 +1 @@
+# dummy file so the existence checks for TEST_HARNESS_FILES succeed
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-harness-files/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-harness-files/moz.build
new file mode 100644
index 0000000000..ff3fed0ee0
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-harness-files/moz.build
@@ -0,0 +1,7 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+TEST_HARNESS_FILES.mochitest += ["runtests.py"]
+TEST_HARNESS_FILES.mochitest += ["utils.py"]
+TEST_HARNESS_FILES.testing.mochitest += ["mochitest.py"]
+TEST_HARNESS_FILES.testing.mochitest += ["mochitest.ini"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-harness-files/runtests.py b/python/mozbuild/mozbuild/test/frontend/data/test-harness-files/runtests.py
new file mode 100644
index 0000000000..d87114ac7d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-harness-files/runtests.py
@@ -0,0 +1 @@
+# dummy file so the existence checks for TEST_HARNESS_FILES succeed
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-harness-files/utils.py b/python/mozbuild/mozbuild/test/frontend/data/test-harness-files/utils.py
new file mode 100644
index 0000000000..d87114ac7d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-harness-files/utils.py
@@ -0,0 +1 @@
+# dummy file so the existence checks for TEST_HARNESS_FILES succeed
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-install-shared-lib/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-install-shared-lib/moz.build
new file mode 100644
index 0000000000..fa592c72a3
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-install-shared-lib/moz.build
@@ -0,0 +1,16 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def SharedLibrary(name):
+ LIBRARY_NAME = name
+ FORCE_SHARED_LIB = True
+
+
+DIST_INSTALL = False
+SharedLibrary("foo")
+
+TEST_HARNESS_FILES.foo.bar += [
+ "!%sfoo%s" % (CONFIG["DLL_PREFIX"], CONFIG["DLL_SUFFIX"])
+]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/moz.build
new file mode 100644
index 0000000000..0f84eb5554
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/moz.build
@@ -0,0 +1,14 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS = ["one", "two", "three"]
+
+
+@template
+def SharedLibrary(name):
+ LIBRARY_NAME = name
+ FORCE_SHARED_LIB = True
+
+
+SharedLibrary("cxx_shared")
+USE_LIBS += ["cxx_static"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/one/foo.cpp b/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/one/foo.cpp
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/one/foo.cpp
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/one/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/one/moz.build
new file mode 100644
index 0000000000..f03a34c33f
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/one/moz.build
@@ -0,0 +1,11 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def Library(name):
+ LIBRARY_NAME = name
+
+
+Library("cxx_static")
+SOURCES += ["foo.cpp"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/three/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/three/moz.build
new file mode 100644
index 0000000000..08e26c4eb3
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/three/moz.build
@@ -0,0 +1,5 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+SharedLibrary("just_c_shared")
+USE_LIBS += ["just_c_static"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/two/foo.c b/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/two/foo.c
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/two/foo.c
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/two/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/two/moz.build
new file mode 100644
index 0000000000..d3bb738ba4
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-linkables-cxx-link/two/moz.build
@@ -0,0 +1,11 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def Library(name):
+ LIBRARY_NAME = name
+
+
+Library("just_c_static")
+SOURCES += ["foo.c"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/absolute-support.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/absolute-support.ini
new file mode 100644
index 0000000000..900f421584
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/absolute-support.ini
@@ -0,0 +1,4 @@
+[DEFAULT]
+support-files = /.well-known/foo.txt
+
+[test_file.js]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/foo.txt b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/foo.txt
new file mode 100644
index 0000000000..ce01362503
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/foo.txt
@@ -0,0 +1 @@
+hello
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/moz.build
new file mode 100644
index 0000000000..5ccb97c1bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/moz.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+MOCHITEST_MANIFESTS += ["absolute-support.ini"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/test_file.js b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/test_file.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-absolute-support/test_file.js
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/bar.js b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/bar.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/bar.js
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/foo.js b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/foo.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/foo.js
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/mochitest.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/mochitest.ini
new file mode 100644
index 0000000000..2f1fc406a0
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/mochitest.ini
@@ -0,0 +1,7 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+[DEFAULT]
+support-files = bar.js foo.js bar.js
+
+[test_baz.js]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/moz.build
new file mode 100644
index 0000000000..4cc0c3d4cf
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/moz.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+MOCHITEST_MANIFESTS += ["mochitest.ini"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/test_baz.js b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/test_baz.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-dupes/test_baz.js
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-emitted-includes/included-reftest.list b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-emitted-includes/included-reftest.list
new file mode 100644
index 0000000000..1caf9cc391
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-emitted-includes/included-reftest.list
@@ -0,0 +1 @@
+!= reftest2.html reftest2-ref.html \ No newline at end of file
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-emitted-includes/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-emitted-includes/moz.build
new file mode 100644
index 0000000000..8f321387af
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-emitted-includes/moz.build
@@ -0,0 +1 @@
+REFTEST_MANIFESTS += ["reftest.list"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-emitted-includes/reftest.list b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-emitted-includes/reftest.list
new file mode 100644
index 0000000000..80caf8ffa4
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-emitted-includes/reftest.list
@@ -0,0 +1,2 @@
+== reftest1.html reftest1-ref.html
+include included-reftest.list
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-empty/empty.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-empty/empty.ini
new file mode 100644
index 0000000000..83a0cec0c6
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-empty/empty.ini
@@ -0,0 +1,2 @@
+[DEFAULT]
+foo = bar
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-empty/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-empty/moz.build
new file mode 100644
index 0000000000..486e879241
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-empty/moz.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+MOCHITEST_MANIFESTS += ["empty.ini"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-inactive-ignored/test_inactive.html b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-inactive-ignored/test_inactive.html
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-inactive-ignored/test_inactive.html
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/common.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/common.ini
new file mode 100644
index 0000000000..753cd0ec0d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/common.ini
@@ -0,0 +1 @@
+[test_foo.html]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/mochitest.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/mochitest.ini
new file mode 100644
index 0000000000..fe0af1cd86
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/mochitest.ini
@@ -0,0 +1,3 @@
+[DEFAULT]
+
+[include:common.ini]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/moz.build
new file mode 100644
index 0000000000..4cc0c3d4cf
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/moz.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+MOCHITEST_MANIFESTS += ["mochitest.ini"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/test_foo.html b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/test_foo.html
new file mode 100644
index 0000000000..18ecdcb795
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-install-includes/test_foo.html
@@ -0,0 +1 @@
+<html></html>
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-just-support/foo.txt b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-just-support/foo.txt
new file mode 100644
index 0000000000..ce01362503
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-just-support/foo.txt
@@ -0,0 +1 @@
+hello
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-just-support/just-support.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-just-support/just-support.ini
new file mode 100644
index 0000000000..efa2d4bc05
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-just-support/just-support.ini
@@ -0,0 +1,2 @@
+[DEFAULT]
+support-files = foo.txt
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-just-support/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-just-support/moz.build
new file mode 100644
index 0000000000..adf2a0d91c
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-just-support/moz.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+MOCHITEST_MANIFESTS += ["just-support.ini"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/a11y-support/dir1/bar b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/a11y-support/dir1/bar
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/a11y-support/dir1/bar
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/a11y-support/foo b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/a11y-support/foo
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/a11y-support/foo
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/a11y.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/a11y.ini
new file mode 100644
index 0000000000..9cf7989185
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/a11y.ini
@@ -0,0 +1,4 @@
+[DEFAULT]
+support-files = a11y-support/**
+
+[test_a11y.js]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/browser.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/browser.ini
new file mode 100644
index 0000000000..a81ee3acbb
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/browser.ini
@@ -0,0 +1,4 @@
+[DEFAULT]
+support-files = support1 support2
+
+[test_browser.js]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/chrome.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/chrome.ini
new file mode 100644
index 0000000000..1db07cfac9
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/chrome.ini
@@ -0,0 +1,3 @@
+[DEFAULT]
+
+[test_chrome.js]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/crashtest.list b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/crashtest.list
new file mode 100644
index 0000000000..b9d7f2685a
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/crashtest.list
@@ -0,0 +1 @@
+== crashtest1.html crashtest1-ref.html
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/metro.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/metro.ini
new file mode 100644
index 0000000000..a7eb6def41
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/metro.ini
@@ -0,0 +1,3 @@
+[DEFAULT]
+
+[test_metro.js]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/mochitest.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/mochitest.ini
new file mode 100644
index 0000000000..69fd71de0b
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/mochitest.ini
@@ -0,0 +1,5 @@
+[DEFAULT]
+support-files = external1 external2
+generated-files = external1 external2
+
+[test_mochitest.js]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/moz.build
new file mode 100644
index 0000000000..9de10add3c
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/moz.build
@@ -0,0 +1,12 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+A11Y_MANIFESTS += ["a11y.ini"]
+BROWSER_CHROME_MANIFESTS += ["browser.ini"]
+METRO_CHROME_MANIFESTS += ["metro.ini"]
+MOCHITEST_MANIFESTS += ["mochitest.ini"]
+MOCHITEST_CHROME_MANIFESTS += ["chrome.ini"]
+XPCSHELL_TESTS_MANIFESTS += ["xpcshell.ini"]
+REFTEST_MANIFESTS += ["reftest.list"]
+CRASHTEST_MANIFESTS += ["crashtest.list"]
+PYTHON_UNITTEST_MANIFESTS += ["python.ini"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/python.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/python.ini
new file mode 100644
index 0000000000..97a9db6920
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/python.ini
@@ -0,0 +1 @@
+[test_foo.py]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/reftest.list b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/reftest.list
new file mode 100644
index 0000000000..3fc25b2966
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/reftest.list
@@ -0,0 +1 @@
+== reftest1.html reftest1-ref.html
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_a11y.js b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_a11y.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_a11y.js
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_browser.js b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_browser.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_browser.js
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_chrome.js b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_chrome.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_chrome.js
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_foo.py b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_foo.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_foo.py
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_metro.js b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_metro.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_metro.js
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_mochitest.js b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_mochitest.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_mochitest.js
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_xpcshell.js b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_xpcshell.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/test_xpcshell.js
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/xpcshell.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/xpcshell.ini
new file mode 100644
index 0000000000..c228c24ac1
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-keys-extracted/xpcshell.ini
@@ -0,0 +1,5 @@
+[DEFAULT]
+head = head1 head2
+dupe-manifest =
+
+[test_xpcshell.js]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-manifest/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-manifest/moz.build
new file mode 100644
index 0000000000..ec33a37d3d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-manifest/moz.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+XPCSHELL_TESTS_MANIFESTS += ["does_not_exist.ini"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file-unfiltered/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file-unfiltered/moz.build
new file mode 100644
index 0000000000..d3878746bd
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file-unfiltered/moz.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+XPCSHELL_TESTS_MANIFESTS += ["xpcshell.ini"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file-unfiltered/xpcshell.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file-unfiltered/xpcshell.ini
new file mode 100644
index 0000000000..9ab85c0cef
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file-unfiltered/xpcshell.ini
@@ -0,0 +1,4 @@
+[DEFAULT]
+support-files = support/**
+
+[missing.js]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file/mochitest.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file/mochitest.ini
new file mode 100644
index 0000000000..e3ef6216b7
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file/mochitest.ini
@@ -0,0 +1 @@
+[test_missing.html]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file/moz.build
new file mode 100644
index 0000000000..4cc0c3d4cf
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-missing-test-file/moz.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+MOCHITEST_MANIFESTS += ["mochitest.ini"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/child/mochitest.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/child/mochitest.ini
new file mode 100644
index 0000000000..c788224291
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/child/mochitest.ini
@@ -0,0 +1,4 @@
+[DEFAULT]
+support-files = ../support-file.txt
+
+[test_foo.js]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/child/test_foo.js b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/child/test_foo.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/child/test_foo.js
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/moz.build
new file mode 100644
index 0000000000..275a810a5e
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/moz.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+MOCHITEST_MANIFESTS += ["child/mochitest.ini"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/support-file.txt b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/support-file.txt
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-parent-support-files-dir/support-file.txt
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/another-file.sjs b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/another-file.sjs
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/another-file.sjs
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/browser.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/browser.ini
new file mode 100644
index 0000000000..4f1335d6b1
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/browser.ini
@@ -0,0 +1,6 @@
+[DEFAULT]
+support-files =
+ another-file.sjs
+ data/**
+
+[test_sub.js] \ No newline at end of file
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/data/one.txt b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/data/one.txt
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/data/one.txt
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/data/two.txt b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/data/two.txt
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/data/two.txt
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/test_sub.js b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/test_sub.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/child/test_sub.js
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/mochitest.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/mochitest.ini
new file mode 100644
index 0000000000..ada59d387d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/mochitest.ini
@@ -0,0 +1,9 @@
+[DEFAULT]
+support-files =
+ support-file.txt
+ !/child/test_sub.js
+ !/child/another-file.sjs
+ !/child/data/**
+ !/does/not/exist.sjs
+
+[test_foo.js]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/moz.build
new file mode 100644
index 0000000000..9df54dbc99
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/moz.build
@@ -0,0 +1,5 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+MOCHITEST_MANIFESTS += ["mochitest.ini"]
+BROWSER_CHROME_MANIFESTS += ["child/browser.ini"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/support-file.txt b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/support-file.txt
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/support-file.txt
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/test_foo.js b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/test_foo.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-missing/test_foo.js
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/another-file.sjs b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/another-file.sjs
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/another-file.sjs
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/browser.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/browser.ini
new file mode 100644
index 0000000000..4f1335d6b1
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/browser.ini
@@ -0,0 +1,6 @@
+[DEFAULT]
+support-files =
+ another-file.sjs
+ data/**
+
+[test_sub.js] \ No newline at end of file
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/data/one.txt b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/data/one.txt
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/data/one.txt
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/data/two.txt b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/data/two.txt
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/data/two.txt
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/test_sub.js b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/test_sub.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/child/test_sub.js
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/mochitest.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/mochitest.ini
new file mode 100644
index 0000000000..a9860f3de8
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/mochitest.ini
@@ -0,0 +1,8 @@
+[DEFAULT]
+support-files =
+ support-file.txt
+ !/child/test_sub.js
+ !/child/another-file.sjs
+ !/child/data/**
+
+[test_foo.js]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/moz.build
new file mode 100644
index 0000000000..9df54dbc99
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/moz.build
@@ -0,0 +1,5 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+MOCHITEST_MANIFESTS += ["mochitest.ini"]
+BROWSER_CHROME_MANIFESTS += ["child/browser.ini"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/support-file.txt b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/support-file.txt
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/support-file.txt
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/test_foo.js b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/test_foo.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-shared-support/test_foo.js
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-unmatched-generated/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-unmatched-generated/moz.build
new file mode 100644
index 0000000000..9d098e0eab
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-unmatched-generated/moz.build
@@ -0,0 +1,4 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+MOCHITEST_MANIFESTS += ["test.ini"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-unmatched-generated/test.ini b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-unmatched-generated/test.ini
new file mode 100644
index 0000000000..caf3911864
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-unmatched-generated/test.ini
@@ -0,0 +1,4 @@
+[DEFAULT]
+generated-files = does_not_exist
+
+[test_foo]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-manifest-unmatched-generated/test_foo b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-unmatched-generated/test_foo
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-manifest-unmatched-generated/test_foo
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-symbols-file-objdir-missing-generated/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-symbols-file-objdir-missing-generated/moz.build
new file mode 100644
index 0000000000..450af01d9a
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-symbols-file-objdir-missing-generated/moz.build
@@ -0,0 +1,12 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def SharedLibrary(name):
+ LIBRARY_NAME = name
+ FORCE_SHARED_LIB = True
+
+
+SharedLibrary("foo")
+SYMBOLS_FILE = "!foo.symbols"
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-symbols-file-objdir/foo.py b/python/mozbuild/mozbuild/test/frontend/data/test-symbols-file-objdir/foo.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-symbols-file-objdir/foo.py
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-symbols-file-objdir/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-symbols-file-objdir/moz.build
new file mode 100644
index 0000000000..7ea07b4ee9
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-symbols-file-objdir/moz.build
@@ -0,0 +1,15 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def SharedLibrary(name):
+ LIBRARY_NAME = name
+ FORCE_SHARED_LIB = True
+
+
+SharedLibrary("foo")
+SYMBOLS_FILE = "!foo.symbols"
+
+GENERATED_FILES += ["foo.symbols"]
+GENERATED_FILES["foo.symbols"].script = "foo.py"
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-symbols-file/foo.symbols b/python/mozbuild/mozbuild/test/frontend/data/test-symbols-file/foo.symbols
new file mode 100644
index 0000000000..257cc5642c
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-symbols-file/foo.symbols
@@ -0,0 +1 @@
+foo
diff --git a/python/mozbuild/mozbuild/test/frontend/data/test-symbols-file/moz.build b/python/mozbuild/mozbuild/test/frontend/data/test-symbols-file/moz.build
new file mode 100644
index 0000000000..47e435dbf5
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/test-symbols-file/moz.build
@@ -0,0 +1,12 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def SharedLibrary(name):
+ LIBRARY_NAME = name
+ FORCE_SHARED_LIB = True
+
+
+SharedLibrary("foo")
+SYMBOLS_FILE = "foo.symbols"
diff --git a/python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/moz.build b/python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/moz.build
new file mode 100644
index 0000000000..480808eb8a
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/moz.build
@@ -0,0 +1,6 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS += ["regular"]
+TEST_DIRS += ["test"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/parallel/moz.build b/python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/parallel/moz.build
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/parallel/moz.build
diff --git a/python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/regular/moz.build b/python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/regular/moz.build
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/regular/moz.build
diff --git a/python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/test/moz.build b/python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/test/moz.build
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/traversal-all-vars/test/moz.build
diff --git a/python/mozbuild/mozbuild/test/frontend/data/traversal-outside-topsrcdir/moz.build b/python/mozbuild/mozbuild/test/frontend/data/traversal-outside-topsrcdir/moz.build
new file mode 100644
index 0000000000..dbdc694a6a
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/traversal-outside-topsrcdir/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS = ["../../foo"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/traversal-relative-dirs/bar/moz.build b/python/mozbuild/mozbuild/test/frontend/data/traversal-relative-dirs/bar/moz.build
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/traversal-relative-dirs/bar/moz.build
diff --git a/python/mozbuild/mozbuild/test/frontend/data/traversal-relative-dirs/foo/moz.build b/python/mozbuild/mozbuild/test/frontend/data/traversal-relative-dirs/foo/moz.build
new file mode 100644
index 0000000000..4b42bbc5ab
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/traversal-relative-dirs/foo/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS = ["../bar"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/traversal-relative-dirs/moz.build b/python/mozbuild/mozbuild/test/frontend/data/traversal-relative-dirs/moz.build
new file mode 100644
index 0000000000..68581574b1
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/traversal-relative-dirs/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS = ["foo"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/traversal-repeated-dirs/bar/moz.build b/python/mozbuild/mozbuild/test/frontend/data/traversal-repeated-dirs/bar/moz.build
new file mode 100644
index 0000000000..f204e245b4
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/traversal-repeated-dirs/bar/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS = ["../foo"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/traversal-repeated-dirs/foo/moz.build b/python/mozbuild/mozbuild/test/frontend/data/traversal-repeated-dirs/foo/moz.build
new file mode 100644
index 0000000000..4b42bbc5ab
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/traversal-repeated-dirs/foo/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS = ["../bar"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/traversal-repeated-dirs/moz.build b/python/mozbuild/mozbuild/test/frontend/data/traversal-repeated-dirs/moz.build
new file mode 100644
index 0000000000..5a9445a6e6
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/traversal-repeated-dirs/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS = ["foo", "bar"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/traversal-simple/bar/moz.build b/python/mozbuild/mozbuild/test/frontend/data/traversal-simple/bar/moz.build
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/traversal-simple/bar/moz.build
diff --git a/python/mozbuild/mozbuild/test/frontend/data/traversal-simple/foo/biz/moz.build b/python/mozbuild/mozbuild/test/frontend/data/traversal-simple/foo/biz/moz.build
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/traversal-simple/foo/biz/moz.build
diff --git a/python/mozbuild/mozbuild/test/frontend/data/traversal-simple/foo/moz.build b/python/mozbuild/mozbuild/test/frontend/data/traversal-simple/foo/moz.build
new file mode 100644
index 0000000000..3ad8a1501d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/traversal-simple/foo/moz.build
@@ -0,0 +1,2 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+DIRS = ["biz"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/traversal-simple/moz.build b/python/mozbuild/mozbuild/test/frontend/data/traversal-simple/moz.build
new file mode 100644
index 0000000000..5a9445a6e6
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/traversal-simple/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIRS = ["foo", "bar"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/bar.cxx b/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/bar.cxx
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/bar.cxx
diff --git a/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/c1.c b/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/c1.c
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/c1.c
diff --git a/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/c2.c b/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/c2.c
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/c2.c
diff --git a/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/foo.cpp b/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/foo.cpp
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/foo.cpp
diff --git a/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/moz.build b/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/moz.build
new file mode 100644
index 0000000000..217e43831f
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/moz.build
@@ -0,0 +1,30 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def Library(name):
+ """Template for libraries."""
+ LIBRARY_NAME = name
+
+
+Library("dummy")
+
+UNIFIED_SOURCES += [
+ "bar.cxx",
+ "foo.cpp",
+ "quux.cc",
+]
+
+UNIFIED_SOURCES += [
+ "objc1.mm",
+ "objc2.mm",
+]
+
+UNIFIED_SOURCES += [
+ "c1.c",
+ "c2.c",
+]
+
+FILES_PER_UNIFIED_FILE = 1
diff --git a/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/objc1.mm b/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/objc1.mm
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/objc1.mm
diff --git a/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/objc2.mm b/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/objc2.mm
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/objc2.mm
diff --git a/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/quux.cc b/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/quux.cc
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/unified-sources-non-unified/quux.cc
diff --git a/python/mozbuild/mozbuild/test/frontend/data/unified-sources/bar.cxx b/python/mozbuild/mozbuild/test/frontend/data/unified-sources/bar.cxx
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/unified-sources/bar.cxx
diff --git a/python/mozbuild/mozbuild/test/frontend/data/unified-sources/c1.c b/python/mozbuild/mozbuild/test/frontend/data/unified-sources/c1.c
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/unified-sources/c1.c
diff --git a/python/mozbuild/mozbuild/test/frontend/data/unified-sources/c2.c b/python/mozbuild/mozbuild/test/frontend/data/unified-sources/c2.c
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/unified-sources/c2.c
diff --git a/python/mozbuild/mozbuild/test/frontend/data/unified-sources/foo.cpp b/python/mozbuild/mozbuild/test/frontend/data/unified-sources/foo.cpp
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/unified-sources/foo.cpp
diff --git a/python/mozbuild/mozbuild/test/frontend/data/unified-sources/moz.build b/python/mozbuild/mozbuild/test/frontend/data/unified-sources/moz.build
new file mode 100644
index 0000000000..8a86e055da
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/unified-sources/moz.build
@@ -0,0 +1,30 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def Library(name):
+ """Template for libraries."""
+ LIBRARY_NAME = name
+
+
+Library("dummy")
+
+UNIFIED_SOURCES += [
+ "bar.cxx",
+ "foo.cpp",
+ "quux.cc",
+]
+
+UNIFIED_SOURCES += [
+ "objc1.mm",
+ "objc2.mm",
+]
+
+UNIFIED_SOURCES += [
+ "c1.c",
+ "c2.c",
+]
+
+FILES_PER_UNIFIED_FILE = 32
diff --git a/python/mozbuild/mozbuild/test/frontend/data/unified-sources/objc1.mm b/python/mozbuild/mozbuild/test/frontend/data/unified-sources/objc1.mm
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/unified-sources/objc1.mm
diff --git a/python/mozbuild/mozbuild/test/frontend/data/unified-sources/objc2.mm b/python/mozbuild/mozbuild/test/frontend/data/unified-sources/objc2.mm
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/unified-sources/objc2.mm
diff --git a/python/mozbuild/mozbuild/test/frontend/data/unified-sources/quux.cc b/python/mozbuild/mozbuild/test/frontend/data/unified-sources/quux.cc
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/unified-sources/quux.cc
diff --git a/python/mozbuild/mozbuild/test/frontend/data/use-nasm/moz.build b/python/mozbuild/mozbuild/test/frontend/data/use-nasm/moz.build
new file mode 100644
index 0000000000..63ac5283f6
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/use-nasm/moz.build
@@ -0,0 +1,15 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def Library(name):
+ LIBRARY_NAME = name
+
+
+Library("dummy")
+
+USE_NASM = True
+
+SOURCES += ["test1.S"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/use-nasm/test1.S b/python/mozbuild/mozbuild/test/frontend/data/use-nasm/test1.S
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/use-nasm/test1.S
diff --git a/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/bans.S b/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/bans.S
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/bans.S
diff --git a/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/baz.def b/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/baz.def
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/baz.def
diff --git a/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/moz.build b/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/moz.build
new file mode 100644
index 0000000000..d080b00c92
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/moz.build
@@ -0,0 +1,13 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+DIST_INSTALL = False
+
+DELAYLOAD_DLLS = ["foo.dll", "bar.dll"]
+
+RCFILE = "foo.rc"
+RCINCLUDE = "bar.rc"
+DEFFILE = "baz.def"
+
+WIN32_EXE_LDFLAGS += ["-subsystem:console"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test1.c b/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test1.c
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test1.c
diff --git a/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test1.cpp b/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test1.cpp
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test1.cpp
diff --git a/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test1.mm b/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test1.mm
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test1.mm
diff --git a/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test2.c b/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test2.c
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test2.c
diff --git a/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test2.cpp b/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test2.cpp
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test2.cpp
diff --git a/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test2.mm b/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test2.mm
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/variable-passthru/test2.mm
diff --git a/python/mozbuild/mozbuild/test/frontend/data/visibility-flags/moz.build b/python/mozbuild/mozbuild/test/frontend/data/visibility-flags/moz.build
new file mode 100644
index 0000000000..630a3afd80
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/visibility-flags/moz.build
@@ -0,0 +1,21 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+
+@template
+def Library(name):
+ """Template for libraries."""
+ LIBRARY_NAME = name
+
+
+Library("dummy")
+
+
+@template
+def NoVisibilityFlags():
+ COMPILE_FLAGS["VISIBILITY"] = []
+
+
+UNIFIED_SOURCES += ["test1.c"]
+
+NoVisibilityFlags()
diff --git a/python/mozbuild/mozbuild/test/frontend/data/visibility-flags/test1.c b/python/mozbuild/mozbuild/test/frontend/data/visibility-flags/test1.c
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/visibility-flags/test1.c
diff --git a/python/mozbuild/mozbuild/test/frontend/data/wasm-compile-flags/moz.build b/python/mozbuild/mozbuild/test/frontend/data/wasm-compile-flags/moz.build
new file mode 100644
index 0000000000..e7cf13088f
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/wasm-compile-flags/moz.build
@@ -0,0 +1,14 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+SANDBOXED_WASM_LIBRARY_NAME = "dummy"
+
+WASM_SOURCES += ["test1.c"]
+
+value = "xyz"
+WASM_DEFINES["FOO"] = True
+WASM_DEFINES["BAZ"] = '"abcd"'
+WASM_DEFINES["BAR"] = 7
+WASM_DEFINES["VALUE"] = value
+WASM_DEFINES["QUX"] = False
+WASM_CFLAGS += ["-funroll-loops", "-wasm-arg"]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/wasm-compile-flags/test1.c b/python/mozbuild/mozbuild/test/frontend/data/wasm-compile-flags/test1.c
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/wasm-compile-flags/test1.c
diff --git a/python/mozbuild/mozbuild/test/frontend/data/wasm-sources/a.cpp b/python/mozbuild/mozbuild/test/frontend/data/wasm-sources/a.cpp
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/wasm-sources/a.cpp
diff --git a/python/mozbuild/mozbuild/test/frontend/data/wasm-sources/b.cc b/python/mozbuild/mozbuild/test/frontend/data/wasm-sources/b.cc
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/wasm-sources/b.cc
diff --git a/python/mozbuild/mozbuild/test/frontend/data/wasm-sources/c.cxx b/python/mozbuild/mozbuild/test/frontend/data/wasm-sources/c.cxx
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/wasm-sources/c.cxx
diff --git a/python/mozbuild/mozbuild/test/frontend/data/wasm-sources/d.c b/python/mozbuild/mozbuild/test/frontend/data/wasm-sources/d.c
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/wasm-sources/d.c
diff --git a/python/mozbuild/mozbuild/test/frontend/data/wasm-sources/moz.build b/python/mozbuild/mozbuild/test/frontend/data/wasm-sources/moz.build
new file mode 100644
index 0000000000..e266bcb0dd
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/wasm-sources/moz.build
@@ -0,0 +1,15 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+SANDBOXED_WASM_LIBRARY_NAME = "wasmSources"
+
+WASM_SOURCES += [
+ "a.cpp",
+ "b.cc",
+ "c.cxx",
+]
+
+WASM_SOURCES += [
+ "d.c",
+]
diff --git a/python/mozbuild/mozbuild/test/frontend/data/xpidl-module-no-sources/moz.build b/python/mozbuild/mozbuild/test/frontend/data/xpidl-module-no-sources/moz.build
new file mode 100644
index 0000000000..f0abd45382
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/data/xpidl-module-no-sources/moz.build
@@ -0,0 +1,5 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+XPIDL_MODULE = "xpidl_module"
diff --git a/python/mozbuild/mozbuild/test/frontend/test_context.py b/python/mozbuild/mozbuild/test/frontend/test_context.py
new file mode 100644
index 0000000000..fbf35e1c8c
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/test_context.py
@@ -0,0 +1,736 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import unittest
+
+import six
+from mozpack import path as mozpath
+from mozunit import main
+
+from mozbuild.frontend.context import (
+ FUNCTIONS,
+ SPECIAL_VARIABLES,
+ SUBCONTEXTS,
+ VARIABLES,
+ AbsolutePath,
+ Context,
+ ContextDerivedTypedHierarchicalStringList,
+ ContextDerivedTypedList,
+ ContextDerivedTypedListWithItems,
+ ContextDerivedTypedRecord,
+ Files,
+ ObjDirPath,
+ Path,
+ SourcePath,
+)
+from mozbuild.util import StrictOrderingOnAppendListWithFlagsFactory
+
+
+class TestContext(unittest.TestCase):
+ def test_defaults(self):
+ test = Context(
+ {
+ "foo": (int, int, ""),
+ "bar": (bool, bool, ""),
+ "baz": (dict, dict, ""),
+ }
+ )
+
+ self.assertEqual(list(test), [])
+
+ self.assertEqual(test["foo"], 0)
+
+ self.assertEqual(set(test.keys()), {"foo"})
+
+ self.assertEqual(test["bar"], False)
+
+ self.assertEqual(set(test.keys()), {"foo", "bar"})
+
+ self.assertEqual(test["baz"], {})
+
+ self.assertEqual(set(test.keys()), {"foo", "bar", "baz"})
+
+ with self.assertRaises(KeyError):
+ test["qux"]
+
+ self.assertEqual(set(test.keys()), {"foo", "bar", "baz"})
+
+ def test_type_check(self):
+ test = Context(
+ {
+ "foo": (int, int, ""),
+ "baz": (dict, list, ""),
+ }
+ )
+
+ test["foo"] = 5
+
+ self.assertEqual(test["foo"], 5)
+
+ with self.assertRaises(ValueError):
+ test["foo"] = {}
+
+ self.assertEqual(test["foo"], 5)
+
+ with self.assertRaises(KeyError):
+ test["bar"] = True
+
+ test["baz"] = [("a", 1), ("b", 2)]
+
+ self.assertEqual(test["baz"], {"a": 1, "b": 2})
+
+ def test_update(self):
+ test = Context(
+ {
+ "foo": (int, int, ""),
+ "bar": (bool, bool, ""),
+ "baz": (dict, list, ""),
+ }
+ )
+
+ self.assertEqual(list(test), [])
+
+ with self.assertRaises(ValueError):
+ test.update(bar=True, foo={})
+
+ self.assertEqual(list(test), [])
+
+ test.update(bar=True, foo=1)
+
+ self.assertEqual(set(test.keys()), {"foo", "bar"})
+ self.assertEqual(test["foo"], 1)
+ self.assertEqual(test["bar"], True)
+
+ test.update([("bar", False), ("foo", 2)])
+ self.assertEqual(test["foo"], 2)
+ self.assertEqual(test["bar"], False)
+
+ test.update([("foo", 0), ("baz", {"a": 1, "b": 2})])
+ self.assertEqual(test["foo"], 0)
+ self.assertEqual(test["baz"], {"a": 1, "b": 2})
+
+ test.update([("foo", 42), ("baz", [("c", 3), ("d", 4)])])
+ self.assertEqual(test["foo"], 42)
+ self.assertEqual(test["baz"], {"c": 3, "d": 4})
+
+ def test_context_paths(self):
+ test = Context()
+
+ # Newly created context has no paths.
+ self.assertIsNone(test.main_path)
+ self.assertIsNone(test.current_path)
+ self.assertEqual(test.all_paths, set())
+ self.assertEqual(test.source_stack, [])
+
+ foo = os.path.abspath("foo")
+ test.add_source(foo)
+
+ # Adding the first source makes it the main and current path.
+ self.assertEqual(test.main_path, foo)
+ self.assertEqual(test.current_path, foo)
+ self.assertEqual(test.all_paths, set([foo]))
+ self.assertEqual(test.source_stack, [foo])
+
+ bar = os.path.abspath("bar")
+ test.add_source(bar)
+
+ # Adding the second source makes leaves main and current paths alone.
+ self.assertEqual(test.main_path, foo)
+ self.assertEqual(test.current_path, foo)
+ self.assertEqual(test.all_paths, set([bar, foo]))
+ self.assertEqual(test.source_stack, [foo])
+
+ qux = os.path.abspath("qux")
+ test.push_source(qux)
+
+ # Pushing a source makes it the current path
+ self.assertEqual(test.main_path, foo)
+ self.assertEqual(test.current_path, qux)
+ self.assertEqual(test.all_paths, set([bar, foo, qux]))
+ self.assertEqual(test.source_stack, [foo, qux])
+
+ hoge = os.path.abspath("hoge")
+ test.push_source(hoge)
+ self.assertEqual(test.main_path, foo)
+ self.assertEqual(test.current_path, hoge)
+ self.assertEqual(test.all_paths, set([bar, foo, hoge, qux]))
+ self.assertEqual(test.source_stack, [foo, qux, hoge])
+
+ fuga = os.path.abspath("fuga")
+
+ # Adding a source after pushing doesn't change the source stack
+ test.add_source(fuga)
+ self.assertEqual(test.main_path, foo)
+ self.assertEqual(test.current_path, hoge)
+ self.assertEqual(test.all_paths, set([bar, foo, fuga, hoge, qux]))
+ self.assertEqual(test.source_stack, [foo, qux, hoge])
+
+ # Adding a source twice doesn't change anything
+ test.add_source(qux)
+ self.assertEqual(test.main_path, foo)
+ self.assertEqual(test.current_path, hoge)
+ self.assertEqual(test.all_paths, set([bar, foo, fuga, hoge, qux]))
+ self.assertEqual(test.source_stack, [foo, qux, hoge])
+
+ last = test.pop_source()
+
+ # Popping a source returns the last pushed one, not the last added one.
+ self.assertEqual(last, hoge)
+ self.assertEqual(test.main_path, foo)
+ self.assertEqual(test.current_path, qux)
+ self.assertEqual(test.all_paths, set([bar, foo, fuga, hoge, qux]))
+ self.assertEqual(test.source_stack, [foo, qux])
+
+ last = test.pop_source()
+ self.assertEqual(last, qux)
+ self.assertEqual(test.main_path, foo)
+ self.assertEqual(test.current_path, foo)
+ self.assertEqual(test.all_paths, set([bar, foo, fuga, hoge, qux]))
+ self.assertEqual(test.source_stack, [foo])
+
+ # Popping the main path is allowed.
+ last = test.pop_source()
+ self.assertEqual(last, foo)
+ self.assertEqual(test.main_path, foo)
+ self.assertIsNone(test.current_path)
+ self.assertEqual(test.all_paths, set([bar, foo, fuga, hoge, qux]))
+ self.assertEqual(test.source_stack, [])
+
+ # Popping past the main path asserts.
+ with self.assertRaises(AssertionError):
+ test.pop_source()
+
+ # Pushing after the main path was popped asserts.
+ with self.assertRaises(AssertionError):
+ test.push_source(foo)
+
+ test = Context()
+ test.push_source(foo)
+ test.push_source(bar)
+
+ # Pushing the same file twice is allowed.
+ test.push_source(bar)
+ test.push_source(foo)
+ self.assertEqual(last, foo)
+ self.assertEqual(test.main_path, foo)
+ self.assertEqual(test.current_path, foo)
+ self.assertEqual(test.all_paths, set([bar, foo]))
+ self.assertEqual(test.source_stack, [foo, bar, bar, foo])
+
+ def test_context_dirs(self):
+ class Config(object):
+ pass
+
+ config = Config()
+ config.topsrcdir = mozpath.abspath(os.curdir)
+ config.topobjdir = mozpath.abspath("obj")
+ test = Context(config=config)
+ foo = mozpath.abspath("foo")
+ test.push_source(foo)
+
+ self.assertEqual(test.srcdir, config.topsrcdir)
+ self.assertEqual(test.relsrcdir, "")
+ self.assertEqual(test.objdir, config.topobjdir)
+ self.assertEqual(test.relobjdir, "")
+
+ foobar = os.path.abspath("foo/bar")
+ test.push_source(foobar)
+ self.assertEqual(test.srcdir, mozpath.join(config.topsrcdir, "foo"))
+ self.assertEqual(test.relsrcdir, "foo")
+ self.assertEqual(test.objdir, config.topobjdir)
+ self.assertEqual(test.relobjdir, "")
+
+
+class TestSymbols(unittest.TestCase):
+ def _verify_doc(self, doc):
+ # Documentation should be of the format:
+ # """SUMMARY LINE
+ #
+ # EXTRA PARAGRAPHS
+ # """
+
+ self.assertNotIn("\r", doc)
+
+ lines = doc.split("\n")
+
+ # No trailing whitespace.
+ for line in lines[0:-1]:
+ self.assertEqual(line, line.rstrip())
+
+ self.assertGreater(len(lines), 0)
+ self.assertGreater(len(lines[0].strip()), 0)
+
+ # Last line should be empty.
+ self.assertEqual(lines[-1].strip(), "")
+
+ def test_documentation_formatting(self):
+ for typ, inp, doc in VARIABLES.values():
+ self._verify_doc(doc)
+
+ for attr, args, doc in FUNCTIONS.values():
+ self._verify_doc(doc)
+
+ for func, typ, doc in SPECIAL_VARIABLES.values():
+ self._verify_doc(doc)
+
+ for name, cls in SUBCONTEXTS.items():
+ self._verify_doc(cls.__doc__)
+
+ for name, v in cls.VARIABLES.items():
+ self._verify_doc(v[2])
+
+
+class TestPaths(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ class Config(object):
+ pass
+
+ cls.config = config = Config()
+ config.topsrcdir = mozpath.abspath(os.curdir)
+ config.topobjdir = mozpath.abspath("obj")
+
+ def test_path(self):
+ config = self.config
+ ctxt1 = Context(config=config)
+ ctxt1.push_source(mozpath.join(config.topsrcdir, "foo", "moz.build"))
+ ctxt2 = Context(config=config)
+ ctxt2.push_source(mozpath.join(config.topsrcdir, "bar", "moz.build"))
+
+ path1 = Path(ctxt1, "qux")
+ self.assertIsInstance(path1, SourcePath)
+ self.assertEqual(path1, "qux")
+ self.assertEqual(path1.full_path, mozpath.join(config.topsrcdir, "foo", "qux"))
+
+ path2 = Path(ctxt2, "../foo/qux")
+ self.assertIsInstance(path2, SourcePath)
+ self.assertEqual(path2, "../foo/qux")
+ self.assertEqual(path2.full_path, mozpath.join(config.topsrcdir, "foo", "qux"))
+
+ self.assertEqual(path1, path2)
+
+ self.assertEqual(
+ path1.join("../../bar/qux").full_path,
+ mozpath.join(config.topsrcdir, "bar", "qux"),
+ )
+
+ path1 = Path(ctxt1, "/qux/qux")
+ self.assertIsInstance(path1, SourcePath)
+ self.assertEqual(path1, "/qux/qux")
+ self.assertEqual(path1.full_path, mozpath.join(config.topsrcdir, "qux", "qux"))
+
+ path2 = Path(ctxt2, "/qux/qux")
+ self.assertIsInstance(path2, SourcePath)
+ self.assertEqual(path2, "/qux/qux")
+ self.assertEqual(path2.full_path, mozpath.join(config.topsrcdir, "qux", "qux"))
+
+ self.assertEqual(path1, path2)
+
+ path1 = Path(ctxt1, "!qux")
+ self.assertIsInstance(path1, ObjDirPath)
+ self.assertEqual(path1, "!qux")
+ self.assertEqual(path1.full_path, mozpath.join(config.topobjdir, "foo", "qux"))
+
+ path2 = Path(ctxt2, "!../foo/qux")
+ self.assertIsInstance(path2, ObjDirPath)
+ self.assertEqual(path2, "!../foo/qux")
+ self.assertEqual(path2.full_path, mozpath.join(config.topobjdir, "foo", "qux"))
+
+ self.assertEqual(path1, path2)
+
+ path1 = Path(ctxt1, "!/qux/qux")
+ self.assertIsInstance(path1, ObjDirPath)
+ self.assertEqual(path1, "!/qux/qux")
+ self.assertEqual(path1.full_path, mozpath.join(config.topobjdir, "qux", "qux"))
+
+ path2 = Path(ctxt2, "!/qux/qux")
+ self.assertIsInstance(path2, ObjDirPath)
+ self.assertEqual(path2, "!/qux/qux")
+ self.assertEqual(path2.full_path, mozpath.join(config.topobjdir, "qux", "qux"))
+
+ self.assertEqual(path1, path2)
+
+ path1 = Path(ctxt1, path1)
+ self.assertIsInstance(path1, ObjDirPath)
+ self.assertEqual(path1, "!/qux/qux")
+ self.assertEqual(path1.full_path, mozpath.join(config.topobjdir, "qux", "qux"))
+
+ path2 = Path(ctxt2, path2)
+ self.assertIsInstance(path2, ObjDirPath)
+ self.assertEqual(path2, "!/qux/qux")
+ self.assertEqual(path2.full_path, mozpath.join(config.topobjdir, "qux", "qux"))
+
+ self.assertEqual(path1, path2)
+
+ path1 = Path(path1)
+ self.assertIsInstance(path1, ObjDirPath)
+ self.assertEqual(path1, "!/qux/qux")
+ self.assertEqual(path1.full_path, mozpath.join(config.topobjdir, "qux", "qux"))
+
+ self.assertEqual(path1, path2)
+
+ path2 = Path(path2)
+ self.assertIsInstance(path2, ObjDirPath)
+ self.assertEqual(path2, "!/qux/qux")
+ self.assertEqual(path2.full_path, mozpath.join(config.topobjdir, "qux", "qux"))
+
+ self.assertEqual(path1, path2)
+
+ def test_source_path(self):
+ config = self.config
+ ctxt = Context(config=config)
+ ctxt.push_source(mozpath.join(config.topsrcdir, "foo", "moz.build"))
+
+ path = SourcePath(ctxt, "qux")
+ self.assertEqual(path, "qux")
+ self.assertEqual(path.full_path, mozpath.join(config.topsrcdir, "foo", "qux"))
+ self.assertEqual(path.translated, mozpath.join(config.topobjdir, "foo", "qux"))
+
+ path = SourcePath(ctxt, "../bar/qux")
+ self.assertEqual(path, "../bar/qux")
+ self.assertEqual(path.full_path, mozpath.join(config.topsrcdir, "bar", "qux"))
+ self.assertEqual(path.translated, mozpath.join(config.topobjdir, "bar", "qux"))
+
+ path = SourcePath(ctxt, "/qux/qux")
+ self.assertEqual(path, "/qux/qux")
+ self.assertEqual(path.full_path, mozpath.join(config.topsrcdir, "qux", "qux"))
+ self.assertEqual(path.translated, mozpath.join(config.topobjdir, "qux", "qux"))
+
+ with self.assertRaises(ValueError):
+ SourcePath(ctxt, "!../bar/qux")
+
+ with self.assertRaises(ValueError):
+ SourcePath(ctxt, "!/qux/qux")
+
+ path = SourcePath(path)
+ self.assertIsInstance(path, SourcePath)
+ self.assertEqual(path, "/qux/qux")
+ self.assertEqual(path.full_path, mozpath.join(config.topsrcdir, "qux", "qux"))
+ self.assertEqual(path.translated, mozpath.join(config.topobjdir, "qux", "qux"))
+
+ path = Path(path)
+ self.assertIsInstance(path, SourcePath)
+
+ def test_objdir_path(self):
+ config = self.config
+ ctxt = Context(config=config)
+ ctxt.push_source(mozpath.join(config.topsrcdir, "foo", "moz.build"))
+
+ path = ObjDirPath(ctxt, "!qux")
+ self.assertEqual(path, "!qux")
+ self.assertEqual(path.full_path, mozpath.join(config.topobjdir, "foo", "qux"))
+
+ path = ObjDirPath(ctxt, "!../bar/qux")
+ self.assertEqual(path, "!../bar/qux")
+ self.assertEqual(path.full_path, mozpath.join(config.topobjdir, "bar", "qux"))
+
+ path = ObjDirPath(ctxt, "!/qux/qux")
+ self.assertEqual(path, "!/qux/qux")
+ self.assertEqual(path.full_path, mozpath.join(config.topobjdir, "qux", "qux"))
+
+ with self.assertRaises(ValueError):
+ path = ObjDirPath(ctxt, "../bar/qux")
+
+ with self.assertRaises(ValueError):
+ path = ObjDirPath(ctxt, "/qux/qux")
+
+ path = ObjDirPath(path)
+ self.assertIsInstance(path, ObjDirPath)
+ self.assertEqual(path, "!/qux/qux")
+ self.assertEqual(path.full_path, mozpath.join(config.topobjdir, "qux", "qux"))
+
+ path = Path(path)
+ self.assertIsInstance(path, ObjDirPath)
+
+ def test_absolute_path(self):
+ config = self.config
+ ctxt = Context(config=config)
+ ctxt.push_source(mozpath.join(config.topsrcdir, "foo", "moz.build"))
+
+ path = AbsolutePath(ctxt, "%/qux")
+ self.assertEqual(path, "%/qux")
+ self.assertEqual(path.full_path, "/qux")
+
+ with self.assertRaises(ValueError):
+ path = AbsolutePath(ctxt, "%qux")
+
+ def test_path_with_mixed_contexts(self):
+ config = self.config
+ ctxt1 = Context(config=config)
+ ctxt1.push_source(mozpath.join(config.topsrcdir, "foo", "moz.build"))
+ ctxt2 = Context(config=config)
+ ctxt2.push_source(mozpath.join(config.topsrcdir, "bar", "moz.build"))
+
+ path1 = Path(ctxt1, "qux")
+ path2 = Path(ctxt2, path1)
+ self.assertEqual(path2, path1)
+ self.assertEqual(path2, "qux")
+ self.assertEqual(path2.context, ctxt1)
+ self.assertEqual(path2.full_path, mozpath.join(config.topsrcdir, "foo", "qux"))
+
+ path1 = Path(ctxt1, "../bar/qux")
+ path2 = Path(ctxt2, path1)
+ self.assertEqual(path2, path1)
+ self.assertEqual(path2, "../bar/qux")
+ self.assertEqual(path2.context, ctxt1)
+ self.assertEqual(path2.full_path, mozpath.join(config.topsrcdir, "bar", "qux"))
+
+ path1 = Path(ctxt1, "/qux/qux")
+ path2 = Path(ctxt2, path1)
+ self.assertEqual(path2, path1)
+ self.assertEqual(path2, "/qux/qux")
+ self.assertEqual(path2.context, ctxt1)
+ self.assertEqual(path2.full_path, mozpath.join(config.topsrcdir, "qux", "qux"))
+
+ path1 = Path(ctxt1, "!qux")
+ path2 = Path(ctxt2, path1)
+ self.assertEqual(path2, path1)
+ self.assertEqual(path2, "!qux")
+ self.assertEqual(path2.context, ctxt1)
+ self.assertEqual(path2.full_path, mozpath.join(config.topobjdir, "foo", "qux"))
+
+ path1 = Path(ctxt1, "!../bar/qux")
+ path2 = Path(ctxt2, path1)
+ self.assertEqual(path2, path1)
+ self.assertEqual(path2, "!../bar/qux")
+ self.assertEqual(path2.context, ctxt1)
+ self.assertEqual(path2.full_path, mozpath.join(config.topobjdir, "bar", "qux"))
+
+ path1 = Path(ctxt1, "!/qux/qux")
+ path2 = Path(ctxt2, path1)
+ self.assertEqual(path2, path1)
+ self.assertEqual(path2, "!/qux/qux")
+ self.assertEqual(path2.context, ctxt1)
+ self.assertEqual(path2.full_path, mozpath.join(config.topobjdir, "qux", "qux"))
+
+ def test_path_typed_list(self):
+ config = self.config
+ ctxt1 = Context(config=config)
+ ctxt1.push_source(mozpath.join(config.topsrcdir, "foo", "moz.build"))
+ ctxt2 = Context(config=config)
+ ctxt2.push_source(mozpath.join(config.topsrcdir, "bar", "moz.build"))
+
+ paths = [
+ "!../bar/qux",
+ "!/qux/qux",
+ "!qux",
+ "../bar/qux",
+ "/qux/qux",
+ "qux",
+ ]
+
+ MyList = ContextDerivedTypedList(Path)
+ l = MyList(ctxt1)
+ l += paths
+
+ for p_str, p_path in zip(paths, l):
+ self.assertEqual(p_str, p_path)
+ self.assertEqual(p_path, Path(ctxt1, p_str))
+ self.assertEqual(
+ p_path.join("foo"), Path(ctxt1, mozpath.join(p_str, "foo"))
+ )
+
+ l2 = MyList(ctxt2)
+ l2 += paths
+
+ for p_str, p_path in zip(paths, l2):
+ self.assertEqual(p_str, p_path)
+ self.assertEqual(p_path, Path(ctxt2, p_str))
+
+ # Assigning with Paths from another context doesn't rebase them
+ l2 = MyList(ctxt2)
+ l2 += l
+
+ for p_str, p_path in zip(paths, l2):
+ self.assertEqual(p_str, p_path)
+ self.assertEqual(p_path, Path(ctxt1, p_str))
+
+ MyListWithFlags = ContextDerivedTypedListWithItems(
+ Path,
+ StrictOrderingOnAppendListWithFlagsFactory(
+ {
+ "foo": bool,
+ }
+ ),
+ )
+ l = MyListWithFlags(ctxt1)
+ l += paths
+
+ for p in paths:
+ l[p].foo = True
+
+ for p_str, p_path in zip(paths, l):
+ self.assertEqual(p_str, p_path)
+ self.assertEqual(p_path, Path(ctxt1, p_str))
+ self.assertEqual(l[p_str].foo, True)
+ self.assertEqual(l[p_path].foo, True)
+
+ def test_path_typed_hierarchy_list(self):
+ config = self.config
+ ctxt1 = Context(config=config)
+ ctxt1.push_source(mozpath.join(config.topsrcdir, "foo", "moz.build"))
+ ctxt2 = Context(config=config)
+ ctxt2.push_source(mozpath.join(config.topsrcdir, "bar", "moz.build"))
+
+ paths = [
+ "!../bar/qux",
+ "!/qux/qux",
+ "!qux",
+ "../bar/qux",
+ "/qux/qux",
+ "qux",
+ ]
+
+ MyList = ContextDerivedTypedHierarchicalStringList(Path)
+ l = MyList(ctxt1)
+ l += paths
+ l.subdir += paths
+
+ for _, files in l.walk():
+ for p_str, p_path in zip(paths, files):
+ self.assertEqual(p_str, p_path)
+ self.assertEqual(p_path, Path(ctxt1, p_str))
+ self.assertEqual(
+ p_path.join("foo"), Path(ctxt1, mozpath.join(p_str, "foo"))
+ )
+
+ l2 = MyList(ctxt2)
+ l2 += paths
+ l2.subdir += paths
+
+ for _, files in l2.walk():
+ for p_str, p_path in zip(paths, files):
+ self.assertEqual(p_str, p_path)
+ self.assertEqual(p_path, Path(ctxt2, p_str))
+
+ # Assigning with Paths from another context doesn't rebase them
+ l2 = MyList(ctxt2)
+ l2 += l
+
+ for _, files in l2.walk():
+ for p_str, p_path in zip(paths, files):
+ self.assertEqual(p_str, p_path)
+ self.assertEqual(p_path, Path(ctxt1, p_str))
+
+
+class TestTypedRecord(unittest.TestCase):
+ def test_fields(self):
+ T = ContextDerivedTypedRecord(("field1", six.text_type), ("field2", list))
+ inst = T(None)
+ self.assertEqual(inst.field1, "")
+ self.assertEqual(inst.field2, [])
+
+ inst.field1 = "foo"
+ inst.field2 += ["bar"]
+
+ self.assertEqual(inst.field1, "foo")
+ self.assertEqual(inst.field2, ["bar"])
+
+ with self.assertRaises(AttributeError):
+ inst.field3 = []
+
+ def test_coercion(self):
+ T = ContextDerivedTypedRecord(("field1", six.text_type), ("field2", list))
+ inst = T(None)
+ inst.field1 = 3
+ inst.field2 += ("bar",)
+ self.assertEqual(inst.field1, "3")
+ self.assertEqual(inst.field2, ["bar"])
+
+ with self.assertRaises(TypeError):
+ inst.field2 = object()
+
+
+class TestFiles(unittest.TestCase):
+ def test_aggregate_empty(self):
+ c = Context({})
+
+ files = {"moz.build": Files(c, "**")}
+
+ self.assertEqual(
+ Files.aggregate(files),
+ {
+ "bug_component_counts": [],
+ "recommended_bug_component": None,
+ },
+ )
+
+ def test_single_bug_component(self):
+ c = Context({})
+ f = Files(c, "**")
+ f["BUG_COMPONENT"] = ("Product1", "Component1")
+
+ files = {"moz.build": f}
+ self.assertEqual(
+ Files.aggregate(files),
+ {
+ "bug_component_counts": [(("Product1", "Component1"), 1)],
+ "recommended_bug_component": ("Product1", "Component1"),
+ },
+ )
+
+ def test_multiple_bug_components(self):
+ c = Context({})
+ f1 = Files(c, "**")
+ f1["BUG_COMPONENT"] = ("Product1", "Component1")
+
+ f2 = Files(c, "**")
+ f2["BUG_COMPONENT"] = ("Product2", "Component2")
+
+ files = {"a": f1, "b": f2, "c": f1}
+ self.assertEqual(
+ Files.aggregate(files),
+ {
+ "bug_component_counts": [
+ (("Product1", "Component1"), 2),
+ (("Product2", "Component2"), 1),
+ ],
+ "recommended_bug_component": ("Product1", "Component1"),
+ },
+ )
+
+ def test_no_recommended_bug_component(self):
+ """If there is no clear count winner, we don't recommend a bug component."""
+ c = Context({})
+ f1 = Files(c, "**")
+ f1["BUG_COMPONENT"] = ("Product1", "Component1")
+
+ f2 = Files(c, "**")
+ f2["BUG_COMPONENT"] = ("Product2", "Component2")
+
+ files = {"a": f1, "b": f2}
+ self.assertEqual(
+ Files.aggregate(files),
+ {
+ "bug_component_counts": [
+ (("Product1", "Component1"), 1),
+ (("Product2", "Component2"), 1),
+ ],
+ "recommended_bug_component": None,
+ },
+ )
+
+ def test_multiple_patterns(self):
+ c = Context({})
+ f1 = Files(c, "a/**")
+ f1["BUG_COMPONENT"] = ("Product1", "Component1")
+ f2 = Files(c, "b/**", "a/bar")
+ f2["BUG_COMPONENT"] = ("Product2", "Component2")
+
+ files = {"a/foo": f1, "a/bar": f2, "b/foo": f2}
+ self.assertEqual(
+ Files.aggregate(files),
+ {
+ "bug_component_counts": [
+ (("Product2", "Component2"), 2),
+ (("Product1", "Component1"), 1),
+ ],
+ "recommended_bug_component": ("Product2", "Component2"),
+ },
+ )
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/test/frontend/test_emitter.py b/python/mozbuild/mozbuild/test/frontend/test_emitter.py
new file mode 100644
index 0000000000..4bbab3942a
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/test_emitter.py
@@ -0,0 +1,1877 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import unittest
+
+import mozpack.path as mozpath
+import six
+from mozunit import main
+
+from mozbuild.frontend.context import ObjDirPath, Path
+from mozbuild.frontend.data import (
+ ComputedFlags,
+ ConfigFileSubstitution,
+ Defines,
+ DirectoryTraversal,
+ Exports,
+ FinalTargetPreprocessedFiles,
+ GeneratedFile,
+ HostProgram,
+ HostRustLibrary,
+ HostRustProgram,
+ HostSources,
+ IPDLCollection,
+ JARManifest,
+ LocalInclude,
+ LocalizedFiles,
+ LocalizedPreprocessedFiles,
+ Program,
+ RustLibrary,
+ RustProgram,
+ SharedLibrary,
+ SimpleProgram,
+ Sources,
+ StaticLibrary,
+ TestHarnessFiles,
+ TestManifest,
+ UnifiedSources,
+ VariablePassthru,
+ WasmSources,
+)
+from mozbuild.frontend.emitter import TreeMetadataEmitter
+from mozbuild.frontend.reader import (
+ BuildReader,
+ BuildReaderError,
+ SandboxValidationError,
+)
+from mozbuild.test.common import MockConfig
+
+data_path = mozpath.abspath(mozpath.dirname(__file__))
+data_path = mozpath.join(data_path, "data")
+
+
+class TestEmitterBasic(unittest.TestCase):
+ def setUp(self):
+ self._old_env = dict(os.environ)
+ os.environ.pop("MOZ_OBJDIR", None)
+
+ def tearDown(self):
+ os.environ.clear()
+ os.environ.update(self._old_env)
+
+ def reader(self, name, enable_tests=False, extra_substs=None):
+ substs = dict(
+ ENABLE_TESTS="1" if enable_tests else "",
+ BIN_SUFFIX=".prog",
+ HOST_BIN_SUFFIX=".hostprog",
+ OS_TARGET="WINNT",
+ COMPILE_ENVIRONMENT="1",
+ STL_FLAGS=["-I/path/to/topobjdir/dist/stl_wrappers"],
+ VISIBILITY_FLAGS=["-include", "$(topsrcdir)/config/gcc_hidden.h"],
+ OBJ_SUFFIX="obj",
+ WASM_OBJ_SUFFIX="wasm",
+ WASM_CFLAGS=["-foo"],
+ )
+ if extra_substs:
+ substs.update(extra_substs)
+ config = MockConfig(mozpath.join(data_path, name), extra_substs=substs)
+
+ return BuildReader(config)
+
+ def read_topsrcdir(self, reader, filter_common=True):
+ emitter = TreeMetadataEmitter(reader.config)
+ objs = list(emitter.emit(reader.read_topsrcdir()))
+ self.assertGreater(len(objs), 0)
+
+ filtered = []
+ for obj in objs:
+ if filter_common and isinstance(obj, DirectoryTraversal):
+ continue
+
+ filtered.append(obj)
+
+ return filtered
+
+ def test_dirs_traversal_simple(self):
+ reader = self.reader("traversal-simple")
+ objs = self.read_topsrcdir(reader, filter_common=False)
+ self.assertEqual(len(objs), 4)
+
+ for o in objs:
+ self.assertIsInstance(o, DirectoryTraversal)
+ self.assertTrue(os.path.isabs(o.context_main_path))
+ self.assertEqual(len(o.context_all_paths), 1)
+
+ reldirs = [o.relsrcdir for o in objs]
+ self.assertEqual(reldirs, ["", "foo", "foo/biz", "bar"])
+
+ dirs = [[d.full_path for d in o.dirs] for o in objs]
+ self.assertEqual(
+ dirs,
+ [
+ [
+ mozpath.join(reader.config.topsrcdir, "foo"),
+ mozpath.join(reader.config.topsrcdir, "bar"),
+ ],
+ [mozpath.join(reader.config.topsrcdir, "foo", "biz")],
+ [],
+ [],
+ ],
+ )
+
+ def test_traversal_all_vars(self):
+ reader = self.reader("traversal-all-vars")
+ objs = self.read_topsrcdir(reader, filter_common=False)
+ self.assertEqual(len(objs), 2)
+
+ for o in objs:
+ self.assertIsInstance(o, DirectoryTraversal)
+
+ reldirs = set([o.relsrcdir for o in objs])
+ self.assertEqual(reldirs, set(["", "regular"]))
+
+ for o in objs:
+ reldir = o.relsrcdir
+
+ if reldir == "":
+ self.assertEqual(
+ [d.full_path for d in o.dirs],
+ [mozpath.join(reader.config.topsrcdir, "regular")],
+ )
+
+ def test_traversal_all_vars_enable_tests(self):
+ reader = self.reader("traversal-all-vars", enable_tests=True)
+ objs = self.read_topsrcdir(reader, filter_common=False)
+ self.assertEqual(len(objs), 3)
+
+ for o in objs:
+ self.assertIsInstance(o, DirectoryTraversal)
+
+ reldirs = set([o.relsrcdir for o in objs])
+ self.assertEqual(reldirs, set(["", "regular", "test"]))
+
+ for o in objs:
+ reldir = o.relsrcdir
+
+ if reldir == "":
+ self.assertEqual(
+ [d.full_path for d in o.dirs],
+ [
+ mozpath.join(reader.config.topsrcdir, "regular"),
+ mozpath.join(reader.config.topsrcdir, "test"),
+ ],
+ )
+
+ def test_config_file_substitution(self):
+ reader = self.reader("config-file-substitution")
+ objs = self.read_topsrcdir(reader)
+ self.assertEqual(len(objs), 2)
+
+ self.assertIsInstance(objs[0], ConfigFileSubstitution)
+ self.assertIsInstance(objs[1], ConfigFileSubstitution)
+
+ topobjdir = mozpath.abspath(reader.config.topobjdir)
+ self.assertEqual(objs[0].relpath, "foo")
+ self.assertEqual(
+ mozpath.normpath(objs[0].output_path),
+ mozpath.normpath(mozpath.join(topobjdir, "foo")),
+ )
+ self.assertEqual(
+ mozpath.normpath(objs[1].output_path),
+ mozpath.normpath(mozpath.join(topobjdir, "bar")),
+ )
+
+ def test_variable_passthru(self):
+ reader = self.reader("variable-passthru")
+ objs = self.read_topsrcdir(reader)
+
+ self.assertEqual(len(objs), 1)
+ self.assertIsInstance(objs[0], VariablePassthru)
+
+ wanted = {
+ "NO_DIST_INSTALL": True,
+ "RCFILE": "foo.rc",
+ "RCINCLUDE": "bar.rc",
+ "WIN32_EXE_LDFLAGS": ["-subsystem:console"],
+ }
+
+ variables = objs[0].variables
+ maxDiff = self.maxDiff
+ self.maxDiff = None
+ self.assertEqual(wanted, variables)
+ self.maxDiff = maxDiff
+
+ def test_compile_flags(self):
+ reader = self.reader(
+ "compile-flags", extra_substs={"WARNINGS_AS_ERRORS": "-Werror"}
+ )
+ sources, ldflags, lib, flags = self.read_topsrcdir(reader)
+ self.assertIsInstance(flags, ComputedFlags)
+ self.assertEqual(flags.flags["STL"], reader.config.substs["STL_FLAGS"])
+ self.assertEqual(
+ flags.flags["VISIBILITY"], reader.config.substs["VISIBILITY_FLAGS"]
+ )
+ self.assertEqual(flags.flags["WARNINGS_AS_ERRORS"], ["-Werror"])
+ self.assertEqual(flags.flags["MOZBUILD_CFLAGS"], ["-Wall", "-funroll-loops"])
+ self.assertEqual(flags.flags["MOZBUILD_CXXFLAGS"], ["-funroll-loops", "-Wall"])
+
+ def test_asflags(self):
+ reader = self.reader("asflags", extra_substs={"ASFLAGS": ["-safeseh"]})
+ as_sources, sources, ldflags, lib, flags, asflags = self.read_topsrcdir(reader)
+ self.assertIsInstance(asflags, ComputedFlags)
+ self.assertEqual(asflags.flags["OS"], reader.config.substs["ASFLAGS"])
+ self.assertEqual(asflags.flags["MOZBUILD"], ["-no-integrated-as"])
+
+ def test_debug_flags(self):
+ reader = self.reader(
+ "compile-flags",
+ extra_substs={"MOZ_DEBUG_FLAGS": "-g", "MOZ_DEBUG_SYMBOLS": "1"},
+ )
+ sources, ldflags, lib, flags = self.read_topsrcdir(reader)
+ self.assertIsInstance(flags, ComputedFlags)
+ self.assertEqual(flags.flags["DEBUG"], ["-g"])
+
+ def test_disable_debug_flags(self):
+ reader = self.reader(
+ "compile-flags",
+ extra_substs={"MOZ_DEBUG_FLAGS": "-g", "MOZ_DEBUG_SYMBOLS": ""},
+ )
+ sources, ldflags, lib, flags = self.read_topsrcdir(reader)
+ self.assertIsInstance(flags, ComputedFlags)
+ self.assertEqual(flags.flags["DEBUG"], [])
+
+ def test_link_flags(self):
+ reader = self.reader(
+ "link-flags",
+ extra_substs={
+ "OS_LDFLAGS": ["-Wl,rpath-link=/usr/lib"],
+ "MOZ_OPTIMIZE": "",
+ "MOZ_OPTIMIZE_LDFLAGS": ["-Wl,-dead_strip"],
+ "MOZ_DEBUG_LDFLAGS": ["-framework ExceptionHandling"],
+ },
+ )
+ sources, ldflags, lib, compile_flags = self.read_topsrcdir(reader)
+ self.assertIsInstance(ldflags, ComputedFlags)
+ self.assertEqual(ldflags.flags["OS"], reader.config.substs["OS_LDFLAGS"])
+ self.assertEqual(
+ ldflags.flags["MOZBUILD"], ["-Wl,-U_foo", "-framework Foo", "-x"]
+ )
+ self.assertEqual(ldflags.flags["OPTIMIZE"], [])
+
+ def test_debug_ldflags(self):
+ reader = self.reader(
+ "link-flags",
+ extra_substs={
+ "MOZ_DEBUG_SYMBOLS": "1",
+ "MOZ_DEBUG_LDFLAGS": ["-framework ExceptionHandling"],
+ },
+ )
+ sources, ldflags, lib, compile_flags = self.read_topsrcdir(reader)
+ self.assertIsInstance(ldflags, ComputedFlags)
+ self.assertEqual(ldflags.flags["OS"], reader.config.substs["MOZ_DEBUG_LDFLAGS"])
+
+ def test_windows_opt_link_flags(self):
+ reader = self.reader(
+ "link-flags",
+ extra_substs={
+ "OS_ARCH": "WINNT",
+ "GNU_CC": "",
+ "MOZ_OPTIMIZE": "1",
+ "MOZ_DEBUG_LDFLAGS": ["-DEBUG"],
+ "MOZ_DEBUG_SYMBOLS": "1",
+ "MOZ_OPTIMIZE_FLAGS": [],
+ "MOZ_OPTIMIZE_LDFLAGS": [],
+ },
+ )
+ sources, ldflags, lib, compile_flags = self.read_topsrcdir(reader)
+ self.assertIsInstance(ldflags, ComputedFlags)
+ self.assertIn("-DEBUG", ldflags.flags["OS"])
+ self.assertIn("-OPT:REF,ICF", ldflags.flags["OS"])
+
+ def test_windows_dmd_link_flags(self):
+ reader = self.reader(
+ "link-flags",
+ extra_substs={
+ "OS_ARCH": "WINNT",
+ "GNU_CC": "",
+ "MOZ_DMD": "1",
+ "MOZ_DEBUG_LDFLAGS": ["-DEBUG"],
+ "MOZ_DEBUG_SYMBOLS": "1",
+ "MOZ_OPTIMIZE": "1",
+ "MOZ_OPTIMIZE_FLAGS": [],
+ },
+ )
+ sources, ldflags, lib, compile_flags = self.read_topsrcdir(reader)
+ self.assertIsInstance(ldflags, ComputedFlags)
+ self.assertEqual(ldflags.flags["OS"], ["-DEBUG", "-OPT:REF,ICF"])
+
+ def test_host_compile_flags(self):
+ reader = self.reader(
+ "host-compile-flags",
+ extra_substs={
+ "HOST_CXXFLAGS": ["-Wall", "-Werror"],
+ "HOST_CFLAGS": ["-Werror", "-Wall"],
+ },
+ )
+ sources, ldflags, flags, lib, target_flags = self.read_topsrcdir(reader)
+ self.assertIsInstance(flags, ComputedFlags)
+ self.assertEqual(
+ flags.flags["HOST_CXXFLAGS"], reader.config.substs["HOST_CXXFLAGS"]
+ )
+ self.assertEqual(
+ flags.flags["HOST_CFLAGS"], reader.config.substs["HOST_CFLAGS"]
+ )
+ self.assertEqual(
+ set(flags.flags["HOST_DEFINES"]),
+ set(["-DFOO", '-DBAZ="abcd"', "-UQUX", "-DBAR=7", "-DVALUE=xyz"]),
+ )
+ self.assertEqual(
+ flags.flags["MOZBUILD_HOST_CFLAGS"], ["-funroll-loops", "-host-arg"]
+ )
+ self.assertEqual(flags.flags["MOZBUILD_HOST_CXXFLAGS"], [])
+
+ def test_host_no_optimize_flags(self):
+ reader = self.reader(
+ "host-compile-flags",
+ extra_substs={"MOZ_OPTIMIZE": "", "MOZ_OPTIMIZE_FLAGS": ["-O2"]},
+ )
+ sources, ldflags, flags, lib, target_flags = self.read_topsrcdir(reader)
+ self.assertIsInstance(flags, ComputedFlags)
+ self.assertEqual(flags.flags["HOST_OPTIMIZE"], [])
+
+ def test_host_optimize_flags(self):
+ reader = self.reader(
+ "host-compile-flags",
+ extra_substs={"MOZ_OPTIMIZE": "1", "MOZ_OPTIMIZE_FLAGS": ["-O2"]},
+ )
+ sources, ldflags, flags, lib, target_flags = self.read_topsrcdir(reader)
+ self.assertIsInstance(flags, ComputedFlags)
+ self.assertEqual(flags.flags["HOST_OPTIMIZE"], ["-O2"])
+
+ def test_cross_optimize_flags(self):
+ reader = self.reader(
+ "host-compile-flags",
+ extra_substs={
+ "MOZ_OPTIMIZE": "1",
+ "MOZ_OPTIMIZE_FLAGS": ["-O2"],
+ "HOST_OPTIMIZE_FLAGS": ["-O3"],
+ "CROSS_COMPILE": "1",
+ },
+ )
+ sources, ldflags, flags, lib, target_flags = self.read_topsrcdir(reader)
+ self.assertIsInstance(flags, ComputedFlags)
+ self.assertEqual(flags.flags["HOST_OPTIMIZE"], ["-O3"])
+
+ def test_host_rtl_flag(self):
+ reader = self.reader(
+ "host-compile-flags", extra_substs={"OS_ARCH": "WINNT", "MOZ_DEBUG": "1"}
+ )
+ sources, ldflags, flags, lib, target_flags = self.read_topsrcdir(reader)
+ self.assertIsInstance(flags, ComputedFlags)
+ self.assertEqual(flags.flags["RTL"], ["-MDd"])
+
+ def test_compile_flags_validation(self):
+ reader = self.reader("compile-flags-field-validation")
+
+ with six.assertRaisesRegex(self, BuildReaderError, "Invalid value."):
+ self.read_topsrcdir(reader)
+
+ reader = self.reader("compile-flags-type-validation")
+ with six.assertRaisesRegex(
+ self, BuildReaderError, "A list of strings must be provided"
+ ):
+ self.read_topsrcdir(reader)
+
+ def test_compile_flags_templates(self):
+ reader = self.reader(
+ "compile-flags-templates",
+ extra_substs={
+ "NSPR_CFLAGS": ["-I/nspr/path"],
+ "NSS_CFLAGS": ["-I/nss/path"],
+ "MOZ_JPEG_CFLAGS": ["-I/jpeg/path"],
+ "MOZ_PNG_CFLAGS": ["-I/png/path"],
+ "MOZ_ZLIB_CFLAGS": ["-I/zlib/path"],
+ "MOZ_PIXMAN_CFLAGS": ["-I/pixman/path"],
+ },
+ )
+ sources, ldflags, lib, flags = self.read_topsrcdir(reader)
+ self.assertIsInstance(flags, ComputedFlags)
+ self.assertEqual(flags.flags["STL"], [])
+ self.assertEqual(flags.flags["VISIBILITY"], [])
+ self.assertEqual(
+ flags.flags["OS_INCLUDES"],
+ [
+ "-I/nspr/path",
+ "-I/nss/path",
+ "-I/jpeg/path",
+ "-I/png/path",
+ "-I/zlib/path",
+ "-I/pixman/path",
+ ],
+ )
+
+ def test_disable_stl_wrapping(self):
+ reader = self.reader("disable-stl-wrapping")
+ sources, ldflags, lib, flags = self.read_topsrcdir(reader)
+ self.assertIsInstance(flags, ComputedFlags)
+ self.assertEqual(flags.flags["STL"], [])
+
+ def test_visibility_flags(self):
+ reader = self.reader("visibility-flags")
+ sources, ldflags, lib, flags = self.read_topsrcdir(reader)
+ self.assertIsInstance(flags, ComputedFlags)
+ self.assertEqual(flags.flags["VISIBILITY"], [])
+
+ def test_defines_in_flags(self):
+ reader = self.reader("compile-defines")
+ defines, sources, ldflags, lib, flags = self.read_topsrcdir(reader)
+ self.assertIsInstance(flags, ComputedFlags)
+ self.assertEqual(
+ flags.flags["LIBRARY_DEFINES"], ["-DMOZ_LIBRARY_DEFINE=MOZ_TEST"]
+ )
+ self.assertEqual(flags.flags["DEFINES"], ["-DMOZ_TEST_DEFINE"])
+
+ def test_resolved_flags_error(self):
+ reader = self.reader("resolved-flags-error")
+ with six.assertRaisesRegex(
+ self,
+ BuildReaderError,
+ "`DEFINES` may not be set in COMPILE_FLAGS from moz.build",
+ ):
+ self.read_topsrcdir(reader)
+
+ def test_includes_in_flags(self):
+ reader = self.reader("compile-includes")
+ defines, sources, ldflags, lib, flags = self.read_topsrcdir(reader)
+ self.assertIsInstance(flags, ComputedFlags)
+ self.assertEqual(
+ flags.flags["BASE_INCLUDES"],
+ ["-I%s" % reader.config.topsrcdir, "-I%s" % reader.config.topobjdir],
+ )
+ self.assertEqual(
+ flags.flags["EXTRA_INCLUDES"],
+ ["-I%s/dist/include" % reader.config.topobjdir],
+ )
+ self.assertEqual(
+ flags.flags["LOCAL_INCLUDES"], ["-I%s/subdir" % reader.config.topsrcdir]
+ )
+
+ def test_allow_compiler_warnings(self):
+ reader = self.reader(
+ "allow-compiler-warnings", extra_substs={"WARNINGS_AS_ERRORS": "-Werror"}
+ )
+ sources, ldflags, lib, flags = self.read_topsrcdir(reader)
+ self.assertEqual(flags.flags["WARNINGS_AS_ERRORS"], [])
+
+ def test_disable_compiler_warnings(self):
+ reader = self.reader(
+ "disable-compiler-warnings", extra_substs={"WARNINGS_CFLAGS": "-Wall"}
+ )
+ sources, ldflags, lib, flags = self.read_topsrcdir(reader)
+ self.assertEqual(flags.flags["WARNINGS_CFLAGS"], [])
+
+ def test_use_nasm(self):
+ # When nasm is not available, this should raise.
+ reader = self.reader("use-nasm")
+ with six.assertRaisesRegex(
+ self, SandboxValidationError, "nasm is not available"
+ ):
+ self.read_topsrcdir(reader)
+
+ # When nasm is available, this should work.
+ reader = self.reader(
+ "use-nasm", extra_substs=dict(NASM="nasm", NASM_ASFLAGS="-foo")
+ )
+
+ sources, passthru, ldflags, lib, flags, asflags = self.read_topsrcdir(reader)
+
+ self.assertIsInstance(passthru, VariablePassthru)
+ self.assertIsInstance(ldflags, ComputedFlags)
+ self.assertIsInstance(flags, ComputedFlags)
+ self.assertIsInstance(asflags, ComputedFlags)
+
+ self.assertEqual(asflags.flags["OS"], reader.config.substs["NASM_ASFLAGS"])
+
+ maxDiff = self.maxDiff
+ self.maxDiff = None
+ self.assertEqual(
+ passthru.variables,
+ {"AS": "nasm", "AS_DASH_C_FLAG": "", "ASOUTOPTION": "-o "},
+ )
+ self.maxDiff = maxDiff
+
+ def test_generated_files(self):
+ reader = self.reader("generated-files")
+ objs = self.read_topsrcdir(reader)
+
+ self.assertEqual(len(objs), 3)
+ for o in objs:
+ self.assertIsInstance(o, GeneratedFile)
+ self.assertFalse(o.localized)
+ self.assertFalse(o.force)
+
+ expected = ["bar.c", "foo.c", ("xpidllex.py", "xpidlyacc.py")]
+ for o, f in zip(objs, expected):
+ expected_filename = f if isinstance(f, tuple) else (f,)
+ self.assertEqual(o.outputs, expected_filename)
+ self.assertEqual(o.script, None)
+ self.assertEqual(o.method, None)
+ self.assertEqual(o.inputs, [])
+
+ def test_generated_files_force(self):
+ reader = self.reader("generated-files-force")
+ objs = self.read_topsrcdir(reader)
+
+ self.assertEqual(len(objs), 3)
+ for o in objs:
+ self.assertIsInstance(o, GeneratedFile)
+ self.assertEqual(o.force, "bar.c" in o.outputs)
+
+ def test_localized_generated_files(self):
+ reader = self.reader("localized-generated-files")
+ objs = self.read_topsrcdir(reader)
+
+ self.assertEqual(len(objs), 2)
+ for o in objs:
+ self.assertIsInstance(o, GeneratedFile)
+ self.assertTrue(o.localized)
+
+ expected = ["abc.ini", ("bar", "baz")]
+ for o, f in zip(objs, expected):
+ expected_filename = f if isinstance(f, tuple) else (f,)
+ self.assertEqual(o.outputs, expected_filename)
+ self.assertEqual(o.script, None)
+ self.assertEqual(o.method, None)
+ self.assertEqual(o.inputs, [])
+
+ def test_localized_generated_files_force(self):
+ reader = self.reader("localized-generated-files-force")
+ objs = self.read_topsrcdir(reader)
+
+ self.assertEqual(len(objs), 2)
+ for o in objs:
+ self.assertIsInstance(o, GeneratedFile)
+ self.assertTrue(o.localized)
+ self.assertEqual(o.force, "abc.ini" in o.outputs)
+
+ def test_localized_files_from_generated(self):
+ """Test that using LOCALIZED_GENERATED_FILES and then putting the output in
+ LOCALIZED_FILES as an objdir path works.
+ """
+ reader = self.reader("localized-files-from-generated")
+ objs = self.read_topsrcdir(reader)
+
+ self.assertEqual(len(objs), 2)
+ self.assertIsInstance(objs[0], GeneratedFile)
+ self.assertIsInstance(objs[1], LocalizedFiles)
+
+ def test_localized_files_not_localized_generated(self):
+ """Test that using GENERATED_FILES and then putting the output in
+ LOCALIZED_FILES as an objdir path produces an error.
+ """
+ reader = self.reader("localized-files-not-localized-generated")
+ with six.assertRaisesRegex(
+ self,
+ SandboxValidationError,
+ "Objdir file listed in LOCALIZED_FILES not in LOCALIZED_GENERATED_FILES:",
+ ):
+ self.read_topsrcdir(reader)
+
+ def test_localized_generated_files_final_target_files(self):
+ """Test that using LOCALIZED_GENERATED_FILES and then putting the output in
+ FINAL_TARGET_FILES as an objdir path produces an error.
+ """
+ reader = self.reader("localized-generated-files-final-target-files")
+ with six.assertRaisesRegex(
+ self,
+ SandboxValidationError,
+ "Outputs of LOCALIZED_GENERATED_FILES cannot be used in FINAL_TARGET_FILES:",
+ ):
+ self.read_topsrcdir(reader)
+
+ def test_generated_files_method_names(self):
+ reader = self.reader("generated-files-method-names")
+ objs = self.read_topsrcdir(reader)
+
+ self.assertEqual(len(objs), 2)
+ for o in objs:
+ self.assertIsInstance(o, GeneratedFile)
+
+ expected = ["bar.c", "foo.c"]
+ expected_method_names = ["make_bar", "main"]
+ for o, expected_filename, expected_method in zip(
+ objs, expected, expected_method_names
+ ):
+ self.assertEqual(o.outputs, (expected_filename,))
+ self.assertEqual(o.method, expected_method)
+ self.assertEqual(o.inputs, [])
+
+ def test_generated_files_absolute_script(self):
+ reader = self.reader("generated-files-absolute-script")
+ objs = self.read_topsrcdir(reader)
+
+ self.assertEqual(len(objs), 1)
+
+ o = objs[0]
+ self.assertIsInstance(o, GeneratedFile)
+ self.assertEqual(o.outputs, ("bar.c",))
+ self.assertRegex(o.script, "script.py$")
+ self.assertEqual(o.method, "make_bar")
+ self.assertEqual(o.inputs, [])
+
+ def test_generated_files_no_script(self):
+ reader = self.reader("generated-files-no-script")
+ with six.assertRaisesRegex(
+ self, SandboxValidationError, "Script for generating bar.c does not exist"
+ ):
+ self.read_topsrcdir(reader)
+
+ def test_generated_files_no_inputs(self):
+ reader = self.reader("generated-files-no-inputs")
+ with six.assertRaisesRegex(
+ self, SandboxValidationError, "Input for generating foo.c does not exist"
+ ):
+ self.read_topsrcdir(reader)
+
+ def test_generated_files_no_python_script(self):
+ reader = self.reader("generated-files-no-python-script")
+ with six.assertRaisesRegex(
+ self,
+ SandboxValidationError,
+ "Script for generating bar.c does not end in .py",
+ ):
+ self.read_topsrcdir(reader)
+
+ def test_exports(self):
+ reader = self.reader("exports")
+ objs = self.read_topsrcdir(reader)
+
+ self.assertEqual(len(objs), 1)
+ self.assertIsInstance(objs[0], Exports)
+
+ expected = [
+ ("", ["foo.h", "bar.h", "baz.h"]),
+ ("mozilla", ["mozilla1.h", "mozilla2.h"]),
+ ("mozilla/dom", ["dom1.h", "dom2.h", "dom3.h"]),
+ ("mozilla/gfx", ["gfx.h"]),
+ ("nspr/private", ["pprio.h", "pprthred.h"]),
+ ("vpx", ["mem.h", "mem2.h"]),
+ ]
+ for (expect_path, expect_headers), (actual_path, actual_headers) in zip(
+ expected, [(path, list(seq)) for path, seq in objs[0].files.walk()]
+ ):
+ self.assertEqual(expect_path, actual_path)
+ self.assertEqual(expect_headers, actual_headers)
+
+ def test_exports_missing(self):
+ """
+ Missing files in EXPORTS is an error.
+ """
+ reader = self.reader("exports-missing")
+ with six.assertRaisesRegex(
+ self, SandboxValidationError, "File listed in EXPORTS does not exist:"
+ ):
+ self.read_topsrcdir(reader)
+
+ def test_exports_missing_generated(self):
+ """
+ An objdir file in EXPORTS that is not in GENERATED_FILES is an error.
+ """
+ reader = self.reader("exports-missing-generated")
+ with six.assertRaisesRegex(
+ self,
+ SandboxValidationError,
+ "Objdir file listed in EXPORTS not in GENERATED_FILES:",
+ ):
+ self.read_topsrcdir(reader)
+
+ def test_exports_generated(self):
+ reader = self.reader("exports-generated")
+ objs = self.read_topsrcdir(reader)
+
+ self.assertEqual(len(objs), 2)
+ self.assertIsInstance(objs[0], GeneratedFile)
+ self.assertIsInstance(objs[1], Exports)
+ exports = [(path, list(seq)) for path, seq in objs[1].files.walk()]
+ self.assertEqual(
+ exports, [("", ["foo.h"]), ("mozilla", ["mozilla1.h", "!mozilla2.h"])]
+ )
+ path, files = exports[1]
+ self.assertIsInstance(files[1], ObjDirPath)
+
+ def test_test_harness_files(self):
+ reader = self.reader("test-harness-files")
+ objs = self.read_topsrcdir(reader)
+
+ self.assertEqual(len(objs), 1)
+ self.assertIsInstance(objs[0], TestHarnessFiles)
+
+ expected = {
+ "mochitest": ["runtests.py", "utils.py"],
+ "testing/mochitest": ["mochitest.py", "mochitest.ini"],
+ }
+
+ for path, strings in objs[0].files.walk():
+ self.assertTrue(path in expected)
+ basenames = sorted(mozpath.basename(s) for s in strings)
+ self.assertEqual(sorted(expected[path]), basenames)
+
+ def test_test_harness_files_root(self):
+ reader = self.reader("test-harness-files-root")
+ with six.assertRaisesRegex(
+ self,
+ SandboxValidationError,
+ "Cannot install files to the root of TEST_HARNESS_FILES",
+ ):
+ self.read_topsrcdir(reader)
+
+ def test_program(self):
+ reader = self.reader("program")
+ objs = self.read_topsrcdir(reader)
+
+ self.assertEqual(len(objs), 6)
+ self.assertIsInstance(objs[0], Sources)
+ self.assertIsInstance(objs[1], ComputedFlags)
+ self.assertIsInstance(objs[2], ComputedFlags)
+ self.assertIsInstance(objs[3], Program)
+ self.assertIsInstance(objs[4], SimpleProgram)
+ self.assertIsInstance(objs[5], SimpleProgram)
+
+ self.assertEqual(objs[3].program, "test_program.prog")
+ self.assertEqual(objs[4].program, "test_program1.prog")
+ self.assertEqual(objs[5].program, "test_program2.prog")
+
+ self.assertEqual(objs[3].name, "test_program.prog")
+ self.assertEqual(objs[4].name, "test_program1.prog")
+ self.assertEqual(objs[5].name, "test_program2.prog")
+
+ self.assertEqual(
+ objs[4].objs,
+ [
+ mozpath.join(
+ reader.config.topobjdir,
+ "test_program1.%s" % reader.config.substs["OBJ_SUFFIX"],
+ )
+ ],
+ )
+ self.assertEqual(
+ objs[5].objs,
+ [
+ mozpath.join(
+ reader.config.topobjdir,
+ "test_program2.%s" % reader.config.substs["OBJ_SUFFIX"],
+ )
+ ],
+ )
+
+ def test_program_paths(self):
+ """Various moz.build settings that change the destination of PROGRAM should be
+ accurately reflected in Program.output_path."""
+ reader = self.reader("program-paths")
+ objs = self.read_topsrcdir(reader)
+ prog_paths = [o.output_path for o in objs if isinstance(o, Program)]
+ self.assertEqual(
+ prog_paths,
+ [
+ "!/dist/bin/dist-bin.prog",
+ "!/dist/bin/foo/dist-subdir.prog",
+ "!/final/target/final-target.prog",
+ "!not-installed.prog",
+ ],
+ )
+
+ def test_host_program_paths(self):
+ """The destination of a HOST_PROGRAM (almost always dist/host/bin)
+ should be accurately reflected in Program.output_path."""
+ reader = self.reader("host-program-paths")
+ objs = self.read_topsrcdir(reader)
+ prog_paths = [o.output_path for o in objs if isinstance(o, HostProgram)]
+ self.assertEqual(
+ prog_paths,
+ [
+ "!/dist/host/bin/final-target.hostprog",
+ "!/dist/host/bin/dist-host-bin.hostprog",
+ "!not-installed.hostprog",
+ ],
+ )
+
+ def test_test_manifest_missing_manifest(self):
+ """A missing manifest file should result in an error."""
+ reader = self.reader("test-manifest-missing-manifest")
+
+ with six.assertRaisesRegex(self, BuildReaderError, "Missing files"):
+ self.read_topsrcdir(reader)
+
+ def test_empty_test_manifest_rejected(self):
+ """A test manifest without any entries is rejected."""
+ reader = self.reader("test-manifest-empty")
+
+ with six.assertRaisesRegex(self, SandboxValidationError, "Empty test manifest"):
+ self.read_topsrcdir(reader)
+
+ def test_test_manifest_just_support_files(self):
+ """A test manifest with no tests but support-files is not supported."""
+ reader = self.reader("test-manifest-just-support")
+
+ with six.assertRaisesRegex(self, SandboxValidationError, "Empty test manifest"):
+ self.read_topsrcdir(reader)
+
+ def test_test_manifest_dupe_support_files(self):
+ """A test manifest with dupe support-files in a single test is not
+ supported.
+ """
+ reader = self.reader("test-manifest-dupes")
+
+ with six.assertRaisesRegex(
+ self,
+ SandboxValidationError,
+ "bar.js appears multiple times "
+ "in a test manifest under a support-files field, please omit the duplicate entry.",
+ ):
+ self.read_topsrcdir(reader)
+
+ def test_test_manifest_absolute_support_files(self):
+ """Support files starting with '/' are placed relative to the install root"""
+ reader = self.reader("test-manifest-absolute-support")
+
+ objs = self.read_topsrcdir(reader)
+ self.assertEqual(len(objs), 1)
+ o = objs[0]
+ self.assertEqual(len(o.installs), 3)
+ expected = [
+ mozpath.normpath(mozpath.join(o.install_prefix, "../.well-known/foo.txt")),
+ mozpath.join(o.install_prefix, "absolute-support.ini"),
+ mozpath.join(o.install_prefix, "test_file.js"),
+ ]
+ paths = sorted([v[0] for v in o.installs.values()])
+ self.assertEqual(paths, expected)
+
+ @unittest.skip("Bug 1304316 - Items in the second set but not the first")
+ def test_test_manifest_shared_support_files(self):
+ """Support files starting with '!' are given separate treatment, so their
+ installation can be resolved when running tests.
+ """
+ reader = self.reader("test-manifest-shared-support")
+ supported, child = self.read_topsrcdir(reader)
+
+ expected_deferred_installs = {
+ "!/child/test_sub.js",
+ "!/child/another-file.sjs",
+ "!/child/data/**",
+ }
+
+ self.assertEqual(len(supported.installs), 3)
+ self.assertEqual(set(supported.deferred_installs), expected_deferred_installs)
+ self.assertEqual(len(child.installs), 3)
+ self.assertEqual(len(child.pattern_installs), 1)
+
+ def test_test_manifest_deffered_install_missing(self):
+ """A non-existent shared support file reference produces an error."""
+ reader = self.reader("test-manifest-shared-missing")
+
+ with six.assertRaisesRegex(
+ self,
+ SandboxValidationError,
+ "entry in support-files not present in the srcdir",
+ ):
+ self.read_topsrcdir(reader)
+
+ def test_test_manifest_install_includes(self):
+ """Ensure that any [include:foo.ini] are copied to the objdir."""
+ reader = self.reader("test-manifest-install-includes")
+
+ objs = self.read_topsrcdir(reader)
+ self.assertEqual(len(objs), 1)
+ o = objs[0]
+ self.assertEqual(len(o.installs), 3)
+ self.assertEqual(o.manifest_relpath, "mochitest.ini")
+ self.assertEqual(o.manifest_obj_relpath, "mochitest.ini")
+ expected = [
+ mozpath.normpath(mozpath.join(o.install_prefix, "common.ini")),
+ mozpath.normpath(mozpath.join(o.install_prefix, "mochitest.ini")),
+ mozpath.normpath(mozpath.join(o.install_prefix, "test_foo.html")),
+ ]
+ paths = sorted([v[0] for v in o.installs.values()])
+ self.assertEqual(paths, expected)
+
+ def test_test_manifest_includes(self):
+ """Ensure that manifest objects from the emitter list a correct manifest."""
+ reader = self.reader("test-manifest-emitted-includes")
+ [obj] = self.read_topsrcdir(reader)
+
+ # Expected manifest leafs for our tests.
+ expected_manifests = {
+ "reftest1.html": "reftest.list",
+ "reftest1-ref.html": "reftest.list",
+ "reftest2.html": "included-reftest.list",
+ "reftest2-ref.html": "included-reftest.list",
+ }
+
+ for t in obj.tests:
+ self.assertTrue(t["manifest"].endswith(expected_manifests[t["name"]]))
+
+ def test_test_manifest_keys_extracted(self):
+ """Ensure all metadata from test manifests is extracted."""
+ reader = self.reader("test-manifest-keys-extracted")
+
+ objs = [o for o in self.read_topsrcdir(reader) if isinstance(o, TestManifest)]
+
+ self.assertEqual(len(objs), 8)
+
+ metadata = {
+ "a11y.ini": {
+ "flavor": "a11y",
+ "installs": {"a11y.ini": False, "test_a11y.js": True},
+ "pattern-installs": 1,
+ },
+ "browser.ini": {
+ "flavor": "browser-chrome",
+ "installs": {
+ "browser.ini": False,
+ "test_browser.js": True,
+ "support1": False,
+ "support2": False,
+ },
+ },
+ "mochitest.ini": {
+ "flavor": "mochitest",
+ "installs": {"mochitest.ini": False, "test_mochitest.js": True},
+ "external": {"external1", "external2"},
+ },
+ "chrome.ini": {
+ "flavor": "chrome",
+ "installs": {"chrome.ini": False, "test_chrome.js": True},
+ },
+ "xpcshell.ini": {
+ "flavor": "xpcshell",
+ "dupe": True,
+ "installs": {
+ "xpcshell.ini": False,
+ "test_xpcshell.js": True,
+ "head1": False,
+ "head2": False,
+ },
+ },
+ "reftest.list": {"flavor": "reftest", "installs": {}},
+ "crashtest.list": {"flavor": "crashtest", "installs": {}},
+ "python.ini": {"flavor": "python", "installs": {"python.ini": False}},
+ }
+
+ for o in objs:
+ m = metadata[mozpath.basename(o.manifest_relpath)]
+
+ self.assertTrue(o.path.startswith(o.directory))
+ self.assertEqual(o.flavor, m["flavor"])
+ self.assertEqual(o.dupe_manifest, m.get("dupe", False))
+
+ external_normalized = set(mozpath.basename(p) for p in o.external_installs)
+ self.assertEqual(external_normalized, m.get("external", set()))
+
+ self.assertEqual(len(o.installs), len(m["installs"]))
+ for path in o.installs.keys():
+ self.assertTrue(path.startswith(o.directory))
+ relpath = path[len(o.directory) + 1 :]
+
+ self.assertIn(relpath, m["installs"])
+ self.assertEqual(o.installs[path][1], m["installs"][relpath])
+
+ if "pattern-installs" in m:
+ self.assertEqual(len(o.pattern_installs), m["pattern-installs"])
+
+ def test_test_manifest_unmatched_generated(self):
+ reader = self.reader("test-manifest-unmatched-generated")
+
+ with six.assertRaisesRegex(
+ self,
+ SandboxValidationError,
+ "entry in generated-files not present elsewhere",
+ ):
+ self.read_topsrcdir(reader),
+
+ def test_test_manifest_parent_support_files_dir(self):
+ """support-files referencing a file in a parent directory works."""
+ reader = self.reader("test-manifest-parent-support-files-dir")
+
+ objs = [o for o in self.read_topsrcdir(reader) if isinstance(o, TestManifest)]
+
+ self.assertEqual(len(objs), 1)
+
+ o = objs[0]
+
+ expected = mozpath.join(o.srcdir, "support-file.txt")
+ self.assertIn(expected, o.installs)
+ self.assertEqual(
+ o.installs[expected],
+ ("testing/mochitest/tests/child/support-file.txt", False),
+ )
+
+ def test_test_manifest_missing_test_error(self):
+ """Missing test files should result in error."""
+ reader = self.reader("test-manifest-missing-test-file")
+
+ with six.assertRaisesRegex(
+ self,
+ SandboxValidationError,
+ "lists test that does not exist: test_missing.html",
+ ):
+ self.read_topsrcdir(reader)
+
+ def test_test_manifest_missing_test_error_unfiltered(self):
+ """Missing test files should result in error, even when the test list is not filtered."""
+ reader = self.reader("test-manifest-missing-test-file-unfiltered")
+
+ with six.assertRaisesRegex(
+ self, SandboxValidationError, "lists test that does not exist: missing.js"
+ ):
+ self.read_topsrcdir(reader)
+
+ def test_ipdl_sources(self):
+ reader = self.reader(
+ "ipdl_sources",
+ extra_substs={"IPDL_ROOT": mozpath.abspath("/path/to/topobjdir")},
+ )
+ objs = self.read_topsrcdir(reader)
+ ipdl_collection = objs[0]
+ self.assertIsInstance(ipdl_collection, IPDLCollection)
+
+ ipdls = set(
+ mozpath.relpath(p, ipdl_collection.topsrcdir)
+ for p in ipdl_collection.all_regular_sources()
+ )
+ expected = set(
+ ["bar/bar.ipdl", "bar/bar2.ipdlh", "foo/foo.ipdl", "foo/foo2.ipdlh"]
+ )
+
+ self.assertEqual(ipdls, expected)
+
+ pp_ipdls = set(
+ mozpath.relpath(p, ipdl_collection.topsrcdir)
+ for p in ipdl_collection.all_preprocessed_sources()
+ )
+ expected = set(["bar/bar1.ipdl", "foo/foo1.ipdl"])
+ self.assertEqual(pp_ipdls, expected)
+
+ def test_local_includes(self):
+ """Test that LOCAL_INCLUDES is emitted correctly."""
+ reader = self.reader("local_includes")
+ objs = self.read_topsrcdir(reader)
+
+ local_includes = [o.path for o in objs if isinstance(o, LocalInclude)]
+ expected = ["/bar/baz", "foo"]
+
+ self.assertEqual(local_includes, expected)
+
+ local_includes = [o.path.full_path for o in objs if isinstance(o, LocalInclude)]
+ expected = [
+ mozpath.join(reader.config.topsrcdir, "bar/baz"),
+ mozpath.join(reader.config.topsrcdir, "foo"),
+ ]
+
+ self.assertEqual(local_includes, expected)
+
+ def test_local_includes_invalid(self):
+ """Test that invalid LOCAL_INCLUDES are properly detected."""
+ reader = self.reader("local_includes-invalid/srcdir")
+
+ with six.assertRaisesRegex(
+ self,
+ SandboxValidationError,
+ "Path specified in LOCAL_INCLUDES.*resolves to the "
+ "topsrcdir or topobjdir",
+ ):
+ self.read_topsrcdir(reader)
+
+ reader = self.reader("local_includes-invalid/objdir")
+
+ with six.assertRaisesRegex(
+ self,
+ SandboxValidationError,
+ "Path specified in LOCAL_INCLUDES.*resolves to the "
+ "topsrcdir or topobjdir",
+ ):
+ self.read_topsrcdir(reader)
+
+ def test_local_includes_file(self):
+ """Test that a filename can't be used in LOCAL_INCLUDES."""
+ reader = self.reader("local_includes-filename")
+
+ with six.assertRaisesRegex(
+ self,
+ SandboxValidationError,
+ "Path specified in LOCAL_INCLUDES is a filename",
+ ):
+ self.read_topsrcdir(reader)
+
+ def test_generated_includes(self):
+ """Test that GENERATED_INCLUDES is emitted correctly."""
+ reader = self.reader("generated_includes")
+ objs = self.read_topsrcdir(reader)
+
+ generated_includes = [o.path for o in objs if isinstance(o, LocalInclude)]
+ expected = ["!/bar/baz", "!foo"]
+
+ self.assertEqual(generated_includes, expected)
+
+ generated_includes = [
+ o.path.full_path for o in objs if isinstance(o, LocalInclude)
+ ]
+ expected = [
+ mozpath.join(reader.config.topobjdir, "bar/baz"),
+ mozpath.join(reader.config.topobjdir, "foo"),
+ ]
+
+ self.assertEqual(generated_includes, expected)
+
+ def test_defines(self):
+ reader = self.reader("defines")
+ objs = self.read_topsrcdir(reader)
+
+ defines = {}
+ for o in objs:
+ if isinstance(o, Defines):
+ defines = o.defines
+
+ expected = {
+ "BAR": 7,
+ "BAZ": '"abcd"',
+ "FOO": True,
+ "VALUE": "xyz",
+ "QUX": False,
+ }
+
+ self.assertEqual(defines, expected)
+
+ def test_jar_manifests(self):
+ reader = self.reader("jar-manifests")
+ objs = self.read_topsrcdir(reader)
+
+ self.assertEqual(len(objs), 1)
+ for obj in objs:
+ self.assertIsInstance(obj, JARManifest)
+ self.assertIsInstance(obj.path, Path)
+
+ def test_jar_manifests_multiple_files(self):
+ with six.assertRaisesRegex(
+ self, SandboxValidationError, "limited to one value"
+ ):
+ reader = self.reader("jar-manifests-multiple-files")
+ self.read_topsrcdir(reader)
+
+ def test_xpidl_module_no_sources(self):
+ """XPIDL_MODULE without XPIDL_SOURCES should be rejected."""
+ with six.assertRaisesRegex(
+ self, SandboxValidationError, "XPIDL_MODULE " "cannot be defined"
+ ):
+ reader = self.reader("xpidl-module-no-sources")
+ self.read_topsrcdir(reader)
+
+ def test_xpidl_module_missing_sources(self):
+ """Missing XPIDL_SOURCES should be rejected."""
+ with six.assertRaisesRegex(
+ self, SandboxValidationError, "File .* " "from XPIDL_SOURCES does not exist"
+ ):
+ reader = self.reader("missing-xpidl")
+ self.read_topsrcdir(reader)
+
+ def test_missing_local_includes(self):
+ """LOCAL_INCLUDES containing non-existent directories should be rejected."""
+ with six.assertRaisesRegex(
+ self,
+ SandboxValidationError,
+ "Path specified in " "LOCAL_INCLUDES does not exist",
+ ):
+ reader = self.reader("missing-local-includes")
+ self.read_topsrcdir(reader)
+
+ def test_library_defines(self):
+ """Test that LIBRARY_DEFINES is propagated properly."""
+ reader = self.reader("library-defines")
+ objs = self.read_topsrcdir(reader)
+
+ libraries = [o for o in objs if isinstance(o, StaticLibrary)]
+ library_flags = [
+ o
+ for o in objs
+ if isinstance(o, ComputedFlags) and "LIBRARY_DEFINES" in o.flags
+ ]
+ expected = {
+ "liba": "-DIN_LIBA",
+ "libb": "-DIN_LIBB -DIN_LIBA",
+ "libc": "-DIN_LIBA -DIN_LIBB",
+ "libd": "",
+ }
+ defines = {}
+ for lib in libraries:
+ defines[lib.basename] = " ".join(lib.lib_defines.get_defines())
+ self.assertEqual(expected, defines)
+ defines_in_flags = {}
+ for flags in library_flags:
+ defines_in_flags[flags.relobjdir] = " ".join(
+ flags.flags["LIBRARY_DEFINES"] or []
+ )
+ self.assertEqual(expected, defines_in_flags)
+
+ def test_sources(self):
+ """Test that SOURCES works properly."""
+ reader = self.reader("sources")
+ objs = self.read_topsrcdir(reader)
+
+ as_flags = objs.pop()
+ self.assertIsInstance(as_flags, ComputedFlags)
+ computed_flags = objs.pop()
+ self.assertIsInstance(computed_flags, ComputedFlags)
+ # The third to last object is a Linkable.
+ linkable = objs.pop()
+ self.assertTrue(linkable.cxx_link)
+ ld_flags = objs.pop()
+ self.assertIsInstance(ld_flags, ComputedFlags)
+ self.assertEqual(len(objs), 6)
+ for o in objs:
+ self.assertIsInstance(o, Sources)
+
+ suffix_map = {obj.canonical_suffix: obj for obj in objs}
+ self.assertEqual(len(suffix_map), 6)
+
+ expected = {
+ ".cpp": ["a.cpp", "b.cc", "c.cxx"],
+ ".c": ["d.c"],
+ ".m": ["e.m"],
+ ".mm": ["f.mm"],
+ ".S": ["g.S"],
+ ".s": ["h.s", "i.asm"],
+ }
+ for suffix, files in expected.items():
+ sources = suffix_map[suffix]
+ self.assertEqual(
+ sources.files, [mozpath.join(reader.config.topsrcdir, f) for f in files]
+ )
+
+ for f in files:
+ self.assertIn(
+ mozpath.join(
+ reader.config.topobjdir,
+ "%s.%s"
+ % (mozpath.splitext(f)[0], reader.config.substs["OBJ_SUFFIX"]),
+ ),
+ linkable.objs,
+ )
+
+ def test_sources_just_c(self):
+ """Test that a linkable with no C++ sources doesn't have cxx_link set."""
+ reader = self.reader("sources-just-c")
+ objs = self.read_topsrcdir(reader)
+
+ as_flags = objs.pop()
+ self.assertIsInstance(as_flags, ComputedFlags)
+ flags = objs.pop()
+ self.assertIsInstance(flags, ComputedFlags)
+ # The third to last object is a Linkable.
+ linkable = objs.pop()
+ self.assertFalse(linkable.cxx_link)
+
+ def test_linkables_cxx_link(self):
+ """Test that linkables transitively set cxx_link properly."""
+ reader = self.reader("test-linkables-cxx-link")
+ got_results = 0
+ for obj in self.read_topsrcdir(reader):
+ if isinstance(obj, SharedLibrary):
+ if obj.basename == "cxx_shared":
+ self.assertEqual(
+ obj.name,
+ "%scxx_shared%s"
+ % (reader.config.dll_prefix, reader.config.dll_suffix),
+ )
+ self.assertTrue(obj.cxx_link)
+ got_results += 1
+ elif obj.basename == "just_c_shared":
+ self.assertEqual(
+ obj.name,
+ "%sjust_c_shared%s"
+ % (reader.config.dll_prefix, reader.config.dll_suffix),
+ )
+ self.assertFalse(obj.cxx_link)
+ got_results += 1
+ self.assertEqual(got_results, 2)
+
+ def test_generated_sources(self):
+ """Test that GENERATED_SOURCES works properly."""
+ reader = self.reader("generated-sources")
+ objs = self.read_topsrcdir(reader)
+
+ as_flags = objs.pop()
+ self.assertIsInstance(as_flags, ComputedFlags)
+ flags = objs.pop()
+ self.assertIsInstance(flags, ComputedFlags)
+ # The third to last object is a Linkable.
+ linkable = objs.pop()
+ self.assertTrue(linkable.cxx_link)
+ flags = objs.pop()
+ self.assertIsInstance(flags, ComputedFlags)
+ self.assertEqual(len(objs), 6)
+
+ generated_sources = [
+ o for o in objs if isinstance(o, Sources) and o.generated_files
+ ]
+ self.assertEqual(len(generated_sources), 6)
+
+ suffix_map = {obj.canonical_suffix: obj for obj in generated_sources}
+ self.assertEqual(len(suffix_map), 6)
+
+ expected = {
+ ".cpp": ["a.cpp", "b.cc", "c.cxx"],
+ ".c": ["d.c"],
+ ".m": ["e.m"],
+ ".mm": ["f.mm"],
+ ".S": ["g.S"],
+ ".s": ["h.s", "i.asm"],
+ }
+ for suffix, files in expected.items():
+ sources = suffix_map[suffix]
+ self.assertEqual(
+ sources.generated_files,
+ [mozpath.join(reader.config.topobjdir, f) for f in files],
+ )
+
+ for f in files:
+ self.assertIn(
+ mozpath.join(
+ reader.config.topobjdir,
+ "%s.%s"
+ % (mozpath.splitext(f)[0], reader.config.substs["OBJ_SUFFIX"]),
+ ),
+ linkable.objs,
+ )
+
+ def test_host_sources(self):
+ """Test that HOST_SOURCES works properly."""
+ reader = self.reader("host-sources")
+ objs = self.read_topsrcdir(reader)
+
+ # This objdir will generate target flags.
+ flags = objs.pop()
+ self.assertIsInstance(flags, ComputedFlags)
+ # The second to last object is a Linkable
+ linkable = objs.pop()
+ self.assertTrue(linkable.cxx_link)
+ # This objdir will also generate host flags.
+ host_flags = objs.pop()
+ self.assertIsInstance(host_flags, ComputedFlags)
+ # ...and ldflags.
+ ldflags = objs.pop()
+ self.assertIsInstance(ldflags, ComputedFlags)
+ self.assertEqual(len(objs), 3)
+ for o in objs:
+ self.assertIsInstance(o, HostSources)
+
+ suffix_map = {obj.canonical_suffix: obj for obj in objs}
+ self.assertEqual(len(suffix_map), 3)
+
+ expected = {
+ ".cpp": ["a.cpp", "b.cc", "c.cxx"],
+ ".c": ["d.c"],
+ ".mm": ["e.mm", "f.mm"],
+ }
+ for suffix, files in expected.items():
+ sources = suffix_map[suffix]
+ self.assertEqual(
+ sources.files, [mozpath.join(reader.config.topsrcdir, f) for f in files]
+ )
+
+ for f in files:
+ self.assertIn(
+ mozpath.join(
+ reader.config.topobjdir,
+ "host_%s.%s"
+ % (mozpath.splitext(f)[0], reader.config.substs["OBJ_SUFFIX"]),
+ ),
+ linkable.objs,
+ )
+
+ def test_wasm_sources(self):
+ """Test that WASM_SOURCES works properly."""
+ reader = self.reader(
+ "wasm-sources", extra_substs={"WASM_CC": "clang", "WASM_CXX": "clang++"}
+ )
+ objs = list(self.read_topsrcdir(reader))
+
+ # The second to last object is a linkable.
+ linkable = objs[-2]
+ # Other than that, we only care about the WasmSources objects.
+ objs = objs[:2]
+ for o in objs:
+ self.assertIsInstance(o, WasmSources)
+
+ suffix_map = {obj.canonical_suffix: obj for obj in objs}
+ self.assertEqual(len(suffix_map), 2)
+
+ expected = {".cpp": ["a.cpp", "b.cc", "c.cxx"], ".c": ["d.c"]}
+ for suffix, files in expected.items():
+ sources = suffix_map[suffix]
+ self.assertEqual(
+ sources.files, [mozpath.join(reader.config.topsrcdir, f) for f in files]
+ )
+ for f in files:
+ self.assertIn(
+ mozpath.join(
+ reader.config.topobjdir,
+ "%s.%s"
+ % (
+ mozpath.splitext(f)[0],
+ reader.config.substs["WASM_OBJ_SUFFIX"],
+ ),
+ ),
+ linkable.objs,
+ )
+
+ def test_unified_sources(self):
+ """Test that UNIFIED_SOURCES works properly."""
+ reader = self.reader("unified-sources")
+ objs = self.read_topsrcdir(reader)
+
+ # The last object is a ComputedFlags, the second to last a Linkable,
+ # followed by ldflags, ignore them.
+ linkable = objs[-2]
+ objs = objs[:-3]
+ self.assertEqual(len(objs), 3)
+ for o in objs:
+ self.assertIsInstance(o, UnifiedSources)
+
+ suffix_map = {obj.canonical_suffix: obj for obj in objs}
+ self.assertEqual(len(suffix_map), 3)
+
+ expected = {
+ ".cpp": ["bar.cxx", "foo.cpp", "quux.cc"],
+ ".mm": ["objc1.mm", "objc2.mm"],
+ ".c": ["c1.c", "c2.c"],
+ }
+ for suffix, files in expected.items():
+ sources = suffix_map[suffix]
+ self.assertEqual(
+ sources.files, [mozpath.join(reader.config.topsrcdir, f) for f in files]
+ )
+
+ # Unified sources are not required
+ if sources.have_unified_mapping:
+
+ for f in dict(sources.unified_source_mapping).keys():
+ self.assertIn(
+ mozpath.join(
+ reader.config.topobjdir,
+ "%s.%s"
+ % (
+ mozpath.splitext(f)[0],
+ reader.config.substs["OBJ_SUFFIX"],
+ ),
+ ),
+ linkable.objs,
+ )
+
+ def test_unified_sources_non_unified(self):
+ """Test that UNIFIED_SOURCES with FILES_PER_UNIFIED_FILE=1 works properly."""
+ reader = self.reader("unified-sources-non-unified")
+ objs = self.read_topsrcdir(reader)
+
+ # The last object is a Linkable, the second to last ComputedFlags,
+ # followed by ldflags, ignore them.
+ objs = objs[:-3]
+ self.assertEqual(len(objs), 3)
+ for o in objs:
+ self.assertIsInstance(o, UnifiedSources)
+
+ suffix_map = {obj.canonical_suffix: obj for obj in objs}
+ self.assertEqual(len(suffix_map), 3)
+
+ expected = {
+ ".cpp": ["bar.cxx", "foo.cpp", "quux.cc"],
+ ".mm": ["objc1.mm", "objc2.mm"],
+ ".c": ["c1.c", "c2.c"],
+ }
+ for suffix, files in expected.items():
+ sources = suffix_map[suffix]
+ self.assertEqual(
+ sources.files, [mozpath.join(reader.config.topsrcdir, f) for f in files]
+ )
+ self.assertFalse(sources.have_unified_mapping)
+
+ def test_object_conflicts(self):
+ """Test that object name conflicts are detected."""
+ reader = self.reader("object-conflicts/1")
+ with self.assertRaisesRegex(
+ SandboxValidationError,
+ "Test.cpp from SOURCES would have the same object name as"
+ " Test.c from SOURCES\.",
+ ):
+ self.read_topsrcdir(reader)
+
+ reader = self.reader("object-conflicts/2")
+ with self.assertRaisesRegex(
+ SandboxValidationError,
+ "Test.cpp from SOURCES would have the same object name as"
+ " subdir/Test.cpp from SOURCES\.",
+ ):
+ self.read_topsrcdir(reader)
+
+ reader = self.reader("object-conflicts/3")
+ with self.assertRaisesRegex(
+ SandboxValidationError,
+ "Test.cpp from UNIFIED_SOURCES would have the same object name as"
+ " Test.c from SOURCES in non-unified builds\.",
+ ):
+ self.read_topsrcdir(reader)
+
+ reader = self.reader("object-conflicts/4")
+ with self.assertRaisesRegex(
+ SandboxValidationError,
+ "Test.cpp from UNIFIED_SOURCES would have the same object name as"
+ " Test.c from UNIFIED_SOURCES in non-unified builds\.",
+ ):
+ self.read_topsrcdir(reader)
+
+ def test_final_target_pp_files(self):
+ """Test that FINAL_TARGET_PP_FILES works properly."""
+ reader = self.reader("dist-files")
+ objs = self.read_topsrcdir(reader)
+
+ self.assertEqual(len(objs), 1)
+ self.assertIsInstance(objs[0], FinalTargetPreprocessedFiles)
+
+ # Ideally we'd test hierarchies, but that would just be testing
+ # the HierarchicalStringList class, which we test separately.
+ for path, files in objs[0].files.walk():
+ self.assertEqual(path, "")
+ self.assertEqual(len(files), 2)
+
+ expected = {"install.rdf", "main.js"}
+ for f in files:
+ self.assertTrue(six.text_type(f) in expected)
+
+ def test_missing_final_target_pp_files(self):
+ """Test that FINAL_TARGET_PP_FILES with missing files throws errors."""
+ with six.assertRaisesRegex(
+ self,
+ SandboxValidationError,
+ "File listed in " "FINAL_TARGET_PP_FILES does not exist",
+ ):
+ reader = self.reader("dist-files-missing")
+ self.read_topsrcdir(reader)
+
+ def test_final_target_pp_files_non_srcdir(self):
+ """Test that non-srcdir paths in FINAL_TARGET_PP_FILES throws errors."""
+ reader = self.reader("final-target-pp-files-non-srcdir")
+ with six.assertRaisesRegex(
+ self,
+ SandboxValidationError,
+ "Only source directory paths allowed in FINAL_TARGET_PP_FILES:",
+ ):
+ self.read_topsrcdir(reader)
+
+ def test_localized_files(self):
+ """Test that LOCALIZED_FILES works properly."""
+ reader = self.reader("localized-files")
+ objs = self.read_topsrcdir(reader)
+
+ self.assertEqual(len(objs), 1)
+ self.assertIsInstance(objs[0], LocalizedFiles)
+
+ for path, files in objs[0].files.walk():
+ self.assertEqual(path, "foo")
+ self.assertEqual(len(files), 3)
+
+ expected = {"en-US/bar.ini", "en-US/code/*.js", "en-US/foo.js"}
+ for f in files:
+ self.assertTrue(six.text_type(f) in expected)
+
+ def test_localized_files_no_en_us(self):
+ """Test that LOCALIZED_FILES errors if a path does not start with
+ `en-US/` or contain `locales/en-US/`."""
+ reader = self.reader("localized-files-no-en-us")
+ with six.assertRaisesRegex(
+ self,
+ SandboxValidationError,
+ "LOCALIZED_FILES paths must start with `en-US/` or contain `locales/en-US/`: "
+ "foo.js",
+ ):
+ self.read_topsrcdir(reader)
+
+ def test_localized_pp_files(self):
+ """Test that LOCALIZED_PP_FILES works properly."""
+ reader = self.reader("localized-pp-files")
+ objs = self.read_topsrcdir(reader)
+
+ self.assertEqual(len(objs), 1)
+ self.assertIsInstance(objs[0], LocalizedPreprocessedFiles)
+
+ for path, files in objs[0].files.walk():
+ self.assertEqual(path, "foo")
+ self.assertEqual(len(files), 2)
+
+ expected = {"en-US/bar.ini", "en-US/foo.js"}
+ for f in files:
+ self.assertTrue(six.text_type(f) in expected)
+
+ def test_rust_library_no_cargo_toml(self):
+ """Test that defining a RustLibrary without a Cargo.toml fails."""
+ reader = self.reader("rust-library-no-cargo-toml")
+ with six.assertRaisesRegex(
+ self, SandboxValidationError, "No Cargo.toml file found"
+ ):
+ self.read_topsrcdir(reader)
+
+ def test_rust_library_name_mismatch(self):
+ """Test that defining a RustLibrary that doesn't match Cargo.toml fails."""
+ reader = self.reader("rust-library-name-mismatch")
+ with six.assertRaisesRegex(
+ self,
+ SandboxValidationError,
+ "library.*does not match Cargo.toml-defined package",
+ ):
+ self.read_topsrcdir(reader)
+
+ def test_rust_library_no_lib_section(self):
+ """Test that a RustLibrary Cargo.toml with no [lib] section fails."""
+ reader = self.reader("rust-library-no-lib-section")
+ with six.assertRaisesRegex(
+ self, SandboxValidationError, "Cargo.toml for.* has no \\[lib\\] section"
+ ):
+ self.read_topsrcdir(reader)
+
+ def test_rust_library_invalid_crate_type(self):
+ """Test that a RustLibrary Cargo.toml has a permitted crate-type."""
+ reader = self.reader("rust-library-invalid-crate-type")
+ with six.assertRaisesRegex(
+ self, SandboxValidationError, "crate-type.* is not permitted"
+ ):
+ self.read_topsrcdir(reader)
+
+ def test_rust_library_dash_folding(self):
+ """Test that on-disk names of RustLibrary objects convert dashes to underscores."""
+ reader = self.reader(
+ "rust-library-dash-folding",
+ extra_substs=dict(RUST_TARGET="i686-pc-windows-msvc"),
+ )
+ objs = self.read_topsrcdir(reader)
+
+ self.assertEqual(len(objs), 4)
+ ldflags, host_cflags, lib, cflags = objs
+ self.assertIsInstance(ldflags, ComputedFlags)
+ self.assertIsInstance(cflags, ComputedFlags)
+ self.assertIsInstance(host_cflags, ComputedFlags)
+ self.assertIsInstance(lib, RustLibrary)
+ self.assertRegex(lib.lib_name, "random_crate")
+ self.assertRegex(lib.import_name, "random_crate")
+ self.assertRegex(lib.basename, "random-crate")
+
+ def test_multiple_rust_libraries(self):
+ """Test that linking multiple Rust libraries throws an error"""
+ reader = self.reader(
+ "multiple-rust-libraries",
+ extra_substs=dict(RUST_TARGET="i686-pc-windows-msvc"),
+ )
+ with six.assertRaisesRegex(
+ self, SandboxValidationError, "Cannot link the following Rust libraries"
+ ):
+ self.read_topsrcdir(reader)
+
+ def test_rust_library_features(self):
+ """Test that RustLibrary features are correctly emitted."""
+ reader = self.reader(
+ "rust-library-features",
+ extra_substs=dict(RUST_TARGET="i686-pc-windows-msvc"),
+ )
+ objs = self.read_topsrcdir(reader)
+
+ self.assertEqual(len(objs), 4)
+ ldflags, host_cflags, lib, cflags = objs
+ self.assertIsInstance(ldflags, ComputedFlags)
+ self.assertIsInstance(cflags, ComputedFlags)
+ self.assertIsInstance(host_cflags, ComputedFlags)
+ self.assertIsInstance(lib, RustLibrary)
+ self.assertEqual(lib.features, ["musthave", "cantlivewithout"])
+
+ def test_rust_library_duplicate_features(self):
+ """Test that duplicate RustLibrary features are rejected."""
+ reader = self.reader("rust-library-duplicate-features")
+ with six.assertRaisesRegex(
+ self,
+ SandboxValidationError,
+ "features for .* should not contain duplicates",
+ ):
+ self.read_topsrcdir(reader)
+
+ def test_rust_program_no_cargo_toml(self):
+ """Test that specifying RUST_PROGRAMS without a Cargo.toml fails."""
+ reader = self.reader("rust-program-no-cargo-toml")
+ with six.assertRaisesRegex(
+ self, SandboxValidationError, "No Cargo.toml file found"
+ ):
+ self.read_topsrcdir(reader)
+
+ def test_host_rust_program_no_cargo_toml(self):
+ """Test that specifying HOST_RUST_PROGRAMS without a Cargo.toml fails."""
+ reader = self.reader("host-rust-program-no-cargo-toml")
+ with six.assertRaisesRegex(
+ self, SandboxValidationError, "No Cargo.toml file found"
+ ):
+ self.read_topsrcdir(reader)
+
+ def test_rust_program_nonexistent_name(self):
+ """Test that specifying RUST_PROGRAMS that don't exist in Cargo.toml
+ correctly throws an error."""
+ reader = self.reader("rust-program-nonexistent-name")
+ with six.assertRaisesRegex(
+ self, SandboxValidationError, "Cannot find Cargo.toml definition for"
+ ):
+ self.read_topsrcdir(reader)
+
+ def test_host_rust_program_nonexistent_name(self):
+ """Test that specifying HOST_RUST_PROGRAMS that don't exist in
+ Cargo.toml correctly throws an error."""
+ reader = self.reader("host-rust-program-nonexistent-name")
+ with six.assertRaisesRegex(
+ self, SandboxValidationError, "Cannot find Cargo.toml definition for"
+ ):
+ self.read_topsrcdir(reader)
+
+ def test_rust_programs(self):
+ """Test RUST_PROGRAMS emission."""
+ reader = self.reader(
+ "rust-programs",
+ extra_substs=dict(RUST_TARGET="i686-pc-windows-msvc", BIN_SUFFIX=".exe"),
+ )
+ objs = self.read_topsrcdir(reader)
+
+ self.assertEqual(len(objs), 4)
+ ldflags, host_cflags, cflags, prog = objs
+ self.assertIsInstance(ldflags, ComputedFlags)
+ self.assertIsInstance(cflags, ComputedFlags)
+ self.assertIsInstance(host_cflags, ComputedFlags)
+ self.assertIsInstance(prog, RustProgram)
+ self.assertEqual(prog.name, "some")
+
+ def test_host_rust_programs(self):
+ """Test HOST_RUST_PROGRAMS emission."""
+ reader = self.reader(
+ "host-rust-programs",
+ extra_substs=dict(
+ RUST_HOST_TARGET="i686-pc-windows-msvc", HOST_BIN_SUFFIX=".exe"
+ ),
+ )
+ objs = self.read_topsrcdir(reader)
+
+ self.assertEqual(len(objs), 4)
+ print(objs)
+ ldflags, cflags, hostflags, prog = objs
+ self.assertIsInstance(ldflags, ComputedFlags)
+ self.assertIsInstance(cflags, ComputedFlags)
+ self.assertIsInstance(hostflags, ComputedFlags)
+ self.assertIsInstance(prog, HostRustProgram)
+ self.assertEqual(prog.name, "some")
+
+ def test_host_rust_libraries(self):
+ """Test HOST_RUST_LIBRARIES emission."""
+ reader = self.reader(
+ "host-rust-libraries",
+ extra_substs=dict(
+ RUST_HOST_TARGET="i686-pc-windows-msvc", HOST_BIN_SUFFIX=".exe"
+ ),
+ )
+ objs = self.read_topsrcdir(reader)
+
+ self.assertEqual(len(objs), 4)
+ ldflags, host_cflags, lib, cflags = objs
+ self.assertIsInstance(ldflags, ComputedFlags)
+ self.assertIsInstance(cflags, ComputedFlags)
+ self.assertIsInstance(host_cflags, ComputedFlags)
+ self.assertIsInstance(lib, HostRustLibrary)
+ self.assertRegex(lib.lib_name, "host_lib")
+ self.assertRegex(lib.import_name, "host_lib")
+
+ def test_crate_dependency_path_resolution(self):
+ """Test recursive dependencies resolve with the correct paths."""
+ reader = self.reader(
+ "crate-dependency-path-resolution",
+ extra_substs=dict(RUST_TARGET="i686-pc-windows-msvc"),
+ )
+ objs = self.read_topsrcdir(reader)
+
+ self.assertEqual(len(objs), 4)
+ ldflags, host_cflags, lib, cflags = objs
+ self.assertIsInstance(ldflags, ComputedFlags)
+ self.assertIsInstance(cflags, ComputedFlags)
+ self.assertIsInstance(host_cflags, ComputedFlags)
+ self.assertIsInstance(lib, RustLibrary)
+
+ def test_install_shared_lib(self):
+ """Test that we can install a shared library with TEST_HARNESS_FILES"""
+ reader = self.reader("test-install-shared-lib")
+ objs = self.read_topsrcdir(reader)
+ self.assertIsInstance(objs[0], TestHarnessFiles)
+ self.assertIsInstance(objs[1], VariablePassthru)
+ self.assertIsInstance(objs[2], ComputedFlags)
+ self.assertIsInstance(objs[3], SharedLibrary)
+ self.assertIsInstance(objs[4], ComputedFlags)
+ for path, files in objs[0].files.walk():
+ for f in files:
+ self.assertEqual(str(f), "!libfoo.so")
+ self.assertEqual(path, "foo/bar")
+
+ def test_symbols_file(self):
+ """Test that SYMBOLS_FILE works"""
+ reader = self.reader("test-symbols-file")
+ genfile, ldflags, shlib, flags = self.read_topsrcdir(reader)
+ self.assertIsInstance(genfile, GeneratedFile)
+ self.assertIsInstance(flags, ComputedFlags)
+ self.assertIsInstance(ldflags, ComputedFlags)
+ self.assertIsInstance(shlib, SharedLibrary)
+ # This looks weird but MockConfig sets DLL_{PREFIX,SUFFIX} and
+ # the reader method in this class sets OS_TARGET=WINNT.
+ self.assertEqual(shlib.symbols_file, "libfoo.so.def")
+
+ def test_symbols_file_objdir(self):
+ """Test that a SYMBOLS_FILE in the objdir works"""
+ reader = self.reader("test-symbols-file-objdir")
+ genfile, ldflags, shlib, flags = self.read_topsrcdir(reader)
+ self.assertIsInstance(genfile, GeneratedFile)
+ self.assertEqual(
+ genfile.script, mozpath.join(reader.config.topsrcdir, "foo.py")
+ )
+ self.assertIsInstance(flags, ComputedFlags)
+ self.assertIsInstance(ldflags, ComputedFlags)
+ self.assertIsInstance(shlib, SharedLibrary)
+ self.assertEqual(shlib.symbols_file, "foo.symbols")
+
+ def test_symbols_file_objdir_missing_generated(self):
+ """Test that a SYMBOLS_FILE in the objdir that's missing
+ from GENERATED_FILES is an error.
+ """
+ reader = self.reader("test-symbols-file-objdir-missing-generated")
+ with six.assertRaisesRegex(
+ self,
+ SandboxValidationError,
+ "Objdir file specified in SYMBOLS_FILE not in GENERATED_FILES:",
+ ):
+ self.read_topsrcdir(reader)
+
+ def test_wasm_compile_flags(self):
+ reader = self.reader(
+ "wasm-compile-flags",
+ extra_substs={"WASM_CC": "clang", "WASM_CXX": "clang++"},
+ )
+ flags = list(self.read_topsrcdir(reader))[2]
+ self.assertIsInstance(flags, ComputedFlags)
+ self.assertEqual(
+ flags.flags["WASM_CFLAGS"], reader.config.substs["WASM_CFLAGS"]
+ )
+ self.assertEqual(
+ flags.flags["MOZBUILD_WASM_CFLAGS"], ["-funroll-loops", "-wasm-arg"]
+ )
+ self.assertEqual(
+ set(flags.flags["WASM_DEFINES"]),
+ set(["-DFOO", '-DBAZ="abcd"', "-UQUX", "-DBAR=7", "-DVALUE=xyz"]),
+ )
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/test/frontend/test_namespaces.py b/python/mozbuild/mozbuild/test/frontend/test_namespaces.py
new file mode 100644
index 0000000000..e8c1a3eb00
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/test_namespaces.py
@@ -0,0 +1,225 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+
+import six
+from mozunit import main
+
+from mozbuild.frontend.context import (
+ Context,
+ ContextDerivedTypedList,
+ ContextDerivedTypedListWithItems,
+ ContextDerivedValue,
+)
+from mozbuild.util import (
+ StrictOrderingOnAppendList,
+ StrictOrderingOnAppendListWithFlagsFactory,
+ UnsortedError,
+)
+
+
+class Fuga(object):
+ def __init__(self, value):
+ self.value = value
+
+
+class Piyo(ContextDerivedValue):
+ def __init__(self, context, value):
+ if not isinstance(value, six.text_type):
+ raise ValueError
+ self.context = context
+ self.value = value
+
+ def lower(self):
+ return self.value.lower()
+
+ def __str__(self):
+ return self.value
+
+ def __eq__(self, other):
+ return self.value == six.text_type(other)
+
+ def __lt__(self, other):
+ return self.value < six.text_type(other)
+
+ def __le__(self, other):
+ return self.value <= six.text_type(other)
+
+ def __gt__(self, other):
+ return self.value > six.text_type(other)
+
+ def __ge__(self, other):
+ return self.value >= six.text_type(other)
+
+ def __hash__(self):
+ return hash(self.value)
+
+
+VARIABLES = {
+ "HOGE": (six.text_type, six.text_type, None),
+ "FUGA": (Fuga, six.text_type, None),
+ "PIYO": (Piyo, six.text_type, None),
+ "HOGERA": (ContextDerivedTypedList(Piyo, StrictOrderingOnAppendList), list, None),
+ "HOGEHOGE": (
+ ContextDerivedTypedListWithItems(
+ Piyo,
+ StrictOrderingOnAppendListWithFlagsFactory(
+ {
+ "foo": bool,
+ }
+ ),
+ ),
+ list,
+ None,
+ ),
+}
+
+
+class TestContext(unittest.TestCase):
+ def test_key_rejection(self):
+ # Lowercase keys should be rejected during normal operation.
+ ns = Context(allowed_variables=VARIABLES)
+
+ with self.assertRaises(KeyError) as ke:
+ ns["foo"] = True
+
+ e = ke.exception.args
+ self.assertEqual(e[0], "global_ns")
+ self.assertEqual(e[1], "set_unknown")
+ self.assertEqual(e[2], "foo")
+ self.assertTrue(e[3])
+
+ # Unknown uppercase keys should be rejected.
+ with self.assertRaises(KeyError) as ke:
+ ns["FOO"] = True
+
+ e = ke.exception.args
+ self.assertEqual(e[0], "global_ns")
+ self.assertEqual(e[1], "set_unknown")
+ self.assertEqual(e[2], "FOO")
+ self.assertTrue(e[3])
+
+ def test_allowed_set(self):
+ self.assertIn("HOGE", VARIABLES)
+
+ ns = Context(allowed_variables=VARIABLES)
+
+ ns["HOGE"] = "foo"
+ self.assertEqual(ns["HOGE"], "foo")
+
+ def test_value_checking(self):
+ ns = Context(allowed_variables=VARIABLES)
+
+ # Setting to a non-allowed type should not work.
+ with self.assertRaises(ValueError) as ve:
+ ns["HOGE"] = True
+
+ e = ve.exception.args
+ self.assertEqual(e[0], "global_ns")
+ self.assertEqual(e[1], "set_type")
+ self.assertEqual(e[2], "HOGE")
+ self.assertEqual(e[3], True)
+ self.assertEqual(e[4], six.text_type)
+
+ def test_key_checking(self):
+ # Checking for existence of a key should not populate the key if it
+ # doesn't exist.
+ g = Context(allowed_variables=VARIABLES)
+
+ self.assertFalse("HOGE" in g)
+ self.assertFalse("HOGE" in g)
+
+ def test_coercion(self):
+ ns = Context(allowed_variables=VARIABLES)
+
+ # Setting to a type different from the allowed input type should not
+ # work.
+ with self.assertRaises(ValueError) as ve:
+ ns["FUGA"] = False
+
+ e = ve.exception.args
+ self.assertEqual(e[0], "global_ns")
+ self.assertEqual(e[1], "set_type")
+ self.assertEqual(e[2], "FUGA")
+ self.assertEqual(e[3], False)
+ self.assertEqual(e[4], six.text_type)
+
+ ns["FUGA"] = "fuga"
+ self.assertIsInstance(ns["FUGA"], Fuga)
+ self.assertEqual(ns["FUGA"].value, "fuga")
+
+ ns["FUGA"] = Fuga("hoge")
+ self.assertIsInstance(ns["FUGA"], Fuga)
+ self.assertEqual(ns["FUGA"].value, "hoge")
+
+ def test_context_derived_coercion(self):
+ ns = Context(allowed_variables=VARIABLES)
+
+ # Setting to a type different from the allowed input type should not
+ # work.
+ with self.assertRaises(ValueError) as ve:
+ ns["PIYO"] = False
+
+ e = ve.exception.args
+ self.assertEqual(e[0], "global_ns")
+ self.assertEqual(e[1], "set_type")
+ self.assertEqual(e[2], "PIYO")
+ self.assertEqual(e[3], False)
+ self.assertEqual(e[4], six.text_type)
+
+ ns["PIYO"] = "piyo"
+ self.assertIsInstance(ns["PIYO"], Piyo)
+ self.assertEqual(ns["PIYO"].value, "piyo")
+ self.assertEqual(ns["PIYO"].context, ns)
+
+ ns["PIYO"] = Piyo(ns, "fuga")
+ self.assertIsInstance(ns["PIYO"], Piyo)
+ self.assertEqual(ns["PIYO"].value, "fuga")
+ self.assertEqual(ns["PIYO"].context, ns)
+
+ def test_context_derived_typed_list(self):
+ ns = Context(allowed_variables=VARIABLES)
+
+ # Setting to a type that's rejected by coercion should not work.
+ with self.assertRaises(ValueError):
+ ns["HOGERA"] = [False]
+
+ ns["HOGERA"] += ["a", "b", "c"]
+
+ self.assertIsInstance(ns["HOGERA"], VARIABLES["HOGERA"][0])
+ for n in range(0, 3):
+ self.assertIsInstance(ns["HOGERA"][n], Piyo)
+ self.assertEqual(ns["HOGERA"][n].value, ["a", "b", "c"][n])
+ self.assertEqual(ns["HOGERA"][n].context, ns)
+
+ with self.assertRaises(UnsortedError):
+ ns["HOGERA"] += ["f", "e", "d"]
+
+ def test_context_derived_typed_list_with_items(self):
+ ns = Context(allowed_variables=VARIABLES)
+
+ # Setting to a type that's rejected by coercion should not work.
+ with self.assertRaises(ValueError):
+ ns["HOGEHOGE"] = [False]
+
+ values = ["a", "b", "c"]
+ ns["HOGEHOGE"] += values
+
+ self.assertIsInstance(ns["HOGEHOGE"], VARIABLES["HOGEHOGE"][0])
+ for v in values:
+ ns["HOGEHOGE"][v].foo = True
+
+ for v, item in zip(values, ns["HOGEHOGE"]):
+ self.assertIsInstance(item, Piyo)
+ self.assertEqual(v, item)
+ self.assertEqual(ns["HOGEHOGE"][v].foo, True)
+ self.assertEqual(ns["HOGEHOGE"][item].foo, True)
+
+ with self.assertRaises(UnsortedError):
+ ns["HOGEHOGE"] += ["f", "e", "d"]
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/test/frontend/test_reader.py b/python/mozbuild/mozbuild/test/frontend/test_reader.py
new file mode 100644
index 0000000000..a15bb15d7e
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/test_reader.py
@@ -0,0 +1,531 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import sys
+import unittest
+
+import mozpack.path as mozpath
+from mozunit import main
+
+from mozbuild import schedules
+from mozbuild.frontend.context import BugzillaComponent
+from mozbuild.frontend.reader import BuildReader, BuildReaderError
+from mozbuild.test.common import MockConfig
+
+if sys.version_info.major == 2:
+ text_type = "unicode"
+else:
+ text_type = "str"
+
+data_path = mozpath.abspath(mozpath.dirname(__file__))
+data_path = mozpath.join(data_path, "data")
+
+
+class TestBuildReader(unittest.TestCase):
+ def setUp(self):
+ self._old_env = dict(os.environ)
+ os.environ.pop("MOZ_OBJDIR", None)
+
+ def tearDown(self):
+ os.environ.clear()
+ os.environ.update(self._old_env)
+
+ def config(self, name, **kwargs):
+ path = mozpath.join(data_path, name)
+
+ return MockConfig(path, **kwargs)
+
+ def reader(self, name, enable_tests=False, error_is_fatal=True, **kwargs):
+ extra = {}
+ if enable_tests:
+ extra["ENABLE_TESTS"] = "1"
+ config = self.config(name, extra_substs=extra, error_is_fatal=error_is_fatal)
+
+ return BuildReader(config, **kwargs)
+
+ def file_path(self, name, *args):
+ return mozpath.join(data_path, name, *args)
+
+ def test_dirs_traversal_simple(self):
+ reader = self.reader("traversal-simple")
+
+ contexts = list(reader.read_topsrcdir())
+
+ self.assertEqual(len(contexts), 4)
+
+ def test_dirs_traversal_no_descend(self):
+ reader = self.reader("traversal-simple")
+
+ path = mozpath.join(reader.config.topsrcdir, "moz.build")
+ self.assertTrue(os.path.exists(path))
+
+ contexts = list(reader.read_mozbuild(path, reader.config, descend=False))
+
+ self.assertEqual(len(contexts), 1)
+
+ def test_dirs_traversal_all_variables(self):
+ reader = self.reader("traversal-all-vars")
+
+ contexts = list(reader.read_topsrcdir())
+ self.assertEqual(len(contexts), 2)
+
+ reader = self.reader("traversal-all-vars", enable_tests=True)
+
+ contexts = list(reader.read_topsrcdir())
+ self.assertEqual(len(contexts), 3)
+
+ def test_relative_dirs(self):
+ # Ensure relative directories are traversed.
+ reader = self.reader("traversal-relative-dirs")
+
+ contexts = list(reader.read_topsrcdir())
+ self.assertEqual(len(contexts), 3)
+
+ def test_repeated_dirs_ignored(self):
+ # Ensure repeated directories are ignored.
+ reader = self.reader("traversal-repeated-dirs")
+
+ contexts = list(reader.read_topsrcdir())
+ self.assertEqual(len(contexts), 3)
+
+ def test_outside_topsrcdir(self):
+ # References to directories outside the topsrcdir should fail.
+ reader = self.reader("traversal-outside-topsrcdir")
+
+ with self.assertRaises(Exception):
+ list(reader.read_topsrcdir())
+
+ def test_error_basic(self):
+ reader = self.reader("reader-error-basic")
+
+ with self.assertRaises(BuildReaderError) as bre:
+ list(reader.read_topsrcdir())
+
+ e = bre.exception
+ self.assertEqual(
+ e.actual_file, self.file_path("reader-error-basic", "moz.build")
+ )
+
+ self.assertIn("The error occurred while processing the", str(e))
+
+ def test_error_included_from(self):
+ reader = self.reader("reader-error-included-from")
+
+ with self.assertRaises(BuildReaderError) as bre:
+ list(reader.read_topsrcdir())
+
+ e = bre.exception
+ self.assertEqual(
+ e.actual_file, self.file_path("reader-error-included-from", "child.build")
+ )
+ self.assertEqual(
+ e.main_file, self.file_path("reader-error-included-from", "moz.build")
+ )
+
+ self.assertIn("This file was included as part of processing", str(e))
+
+ def test_error_syntax_error(self):
+ reader = self.reader("reader-error-syntax")
+
+ with self.assertRaises(BuildReaderError) as bre:
+ list(reader.read_topsrcdir())
+
+ e = bre.exception
+ self.assertIn("Python syntax error on line 5", str(e))
+ self.assertIn(" foo =", str(e))
+ self.assertIn(" ^", str(e))
+
+ def test_error_read_unknown_global(self):
+ reader = self.reader("reader-error-read-unknown-global")
+
+ with self.assertRaises(BuildReaderError) as bre:
+ list(reader.read_topsrcdir())
+
+ e = bre.exception
+ self.assertIn("The error was triggered on line 5", str(e))
+ self.assertIn("The underlying problem is an attempt to read", str(e))
+ self.assertIn(" FOO", str(e))
+
+ def test_error_write_unknown_global(self):
+ reader = self.reader("reader-error-write-unknown-global")
+
+ with self.assertRaises(BuildReaderError) as bre:
+ list(reader.read_topsrcdir())
+
+ e = bre.exception
+ self.assertIn("The error was triggered on line 7", str(e))
+ self.assertIn("The underlying problem is an attempt to write", str(e))
+ self.assertIn(" FOO", str(e))
+
+ def test_error_write_bad_value(self):
+ reader = self.reader("reader-error-write-bad-value")
+
+ with self.assertRaises(BuildReaderError) as bre:
+ list(reader.read_topsrcdir())
+
+ e = bre.exception
+ self.assertIn("The error was triggered on line 5", str(e))
+ self.assertIn("is an attempt to write an illegal value to a special", str(e))
+
+ self.assertIn("variable whose value was rejected is:\n\n DIRS", str(e))
+
+ self.assertIn(
+ "written to it was of the following type:\n\n %s" % text_type, str(e)
+ )
+
+ self.assertIn("expects the following type(s):\n\n list", str(e))
+
+ def test_error_illegal_path(self):
+ reader = self.reader("reader-error-outside-topsrcdir")
+
+ with self.assertRaises(BuildReaderError) as bre:
+ list(reader.read_topsrcdir())
+
+ e = bre.exception
+ self.assertIn("The underlying problem is an illegal file access", str(e))
+
+ def test_error_missing_include_path(self):
+ reader = self.reader("reader-error-missing-include")
+
+ with self.assertRaises(BuildReaderError) as bre:
+ list(reader.read_topsrcdir())
+
+ e = bre.exception
+ self.assertIn("we referenced a path that does not exist", str(e))
+
+ def test_error_script_error(self):
+ reader = self.reader("reader-error-script-error")
+
+ with self.assertRaises(BuildReaderError) as bre:
+ list(reader.read_topsrcdir())
+
+ e = bre.exception
+ self.assertIn("The error appears to be the fault of the script", str(e))
+ self.assertIn(' ["TypeError: unsupported operand', str(e))
+
+ def test_error_bad_dir(self):
+ reader = self.reader("reader-error-bad-dir")
+
+ with self.assertRaises(BuildReaderError) as bre:
+ list(reader.read_topsrcdir())
+
+ e = bre.exception
+ self.assertIn("we referenced a path that does not exist", str(e))
+
+ def test_error_repeated_dir(self):
+ reader = self.reader("reader-error-repeated-dir")
+
+ with self.assertRaises(BuildReaderError) as bre:
+ list(reader.read_topsrcdir())
+
+ e = bre.exception
+ self.assertIn("Directory (foo) registered multiple times", str(e))
+
+ def test_error_error_func(self):
+ reader = self.reader("reader-error-error-func")
+
+ with self.assertRaises(BuildReaderError) as bre:
+ list(reader.read_topsrcdir())
+
+ e = bre.exception
+ self.assertIn("A moz.build file called the error() function.", str(e))
+ self.assertIn(" Some error.", str(e))
+
+ def test_error_error_func_ok(self):
+ reader = self.reader("reader-error-error-func", error_is_fatal=False)
+
+ list(reader.read_topsrcdir())
+
+ def test_error_empty_list(self):
+ reader = self.reader("reader-error-empty-list")
+
+ with self.assertRaises(BuildReaderError) as bre:
+ list(reader.read_topsrcdir())
+
+ e = bre.exception
+ self.assertIn("Variable DIRS assigned an empty value.", str(e))
+
+ def test_inheriting_variables(self):
+ reader = self.reader("inheriting-variables")
+
+ contexts = list(reader.read_topsrcdir())
+
+ self.assertEqual(len(contexts), 4)
+ self.assertEqual(
+ [context.relsrcdir for context in contexts], ["", "foo", "foo/baz", "bar"]
+ )
+ self.assertEqual(
+ [context["XPIDL_MODULE"] for context in contexts],
+ ["foobar", "foobar", "baz", "foobar"],
+ )
+
+ def test_find_relevant_mozbuilds(self):
+ reader = self.reader("reader-relevant-mozbuild")
+
+ # Absolute paths outside topsrcdir are rejected.
+ with self.assertRaises(Exception):
+ reader._find_relevant_mozbuilds(["/foo"])
+
+ # File in root directory.
+ paths = reader._find_relevant_mozbuilds(["file"])
+ self.assertEqual(paths, {"file": ["moz.build"]})
+
+ # File in child directory.
+ paths = reader._find_relevant_mozbuilds(["d1/file1"])
+ self.assertEqual(paths, {"d1/file1": ["moz.build", "d1/moz.build"]})
+
+ # Multiple files in same directory.
+ paths = reader._find_relevant_mozbuilds(["d1/file1", "d1/file2"])
+ self.assertEqual(
+ paths,
+ {
+ "d1/file1": ["moz.build", "d1/moz.build"],
+ "d1/file2": ["moz.build", "d1/moz.build"],
+ },
+ )
+
+ # Missing moz.build from missing intermediate directory.
+ paths = reader._find_relevant_mozbuilds(
+ ["d1/no-intermediate-moz-build/child/file"]
+ )
+ self.assertEqual(
+ paths,
+ {
+ "d1/no-intermediate-moz-build/child/file": [
+ "moz.build",
+ "d1/moz.build",
+ "d1/no-intermediate-moz-build/child/moz.build",
+ ]
+ },
+ )
+
+ # Lots of empty directories.
+ paths = reader._find_relevant_mozbuilds(
+ ["d1/parent-is-far/dir1/dir2/dir3/file"]
+ )
+ self.assertEqual(
+ paths,
+ {
+ "d1/parent-is-far/dir1/dir2/dir3/file": [
+ "moz.build",
+ "d1/moz.build",
+ "d1/parent-is-far/moz.build",
+ ]
+ },
+ )
+
+ # Lots of levels.
+ paths = reader._find_relevant_mozbuilds(
+ ["d1/every-level/a/file", "d1/every-level/b/file"]
+ )
+ self.assertEqual(
+ paths,
+ {
+ "d1/every-level/a/file": [
+ "moz.build",
+ "d1/moz.build",
+ "d1/every-level/moz.build",
+ "d1/every-level/a/moz.build",
+ ],
+ "d1/every-level/b/file": [
+ "moz.build",
+ "d1/moz.build",
+ "d1/every-level/moz.build",
+ "d1/every-level/b/moz.build",
+ ],
+ },
+ )
+
+ # Different root directories.
+ paths = reader._find_relevant_mozbuilds(["d1/file", "d2/file", "file"])
+ self.assertEqual(
+ paths,
+ {
+ "file": ["moz.build"],
+ "d1/file": ["moz.build", "d1/moz.build"],
+ "d2/file": ["moz.build", "d2/moz.build"],
+ },
+ )
+
+ def test_read_relevant_mozbuilds(self):
+ reader = self.reader("reader-relevant-mozbuild")
+
+ paths, contexts = reader.read_relevant_mozbuilds(
+ ["d1/every-level/a/file", "d1/every-level/b/file", "d2/file"]
+ )
+ self.assertEqual(len(paths), 3)
+ self.assertEqual(len(contexts), 6)
+
+ self.assertEqual(
+ [ctx.relsrcdir for ctx in paths["d1/every-level/a/file"]],
+ ["", "d1", "d1/every-level", "d1/every-level/a"],
+ )
+ self.assertEqual(
+ [ctx.relsrcdir for ctx in paths["d1/every-level/b/file"]],
+ ["", "d1", "d1/every-level", "d1/every-level/b"],
+ )
+ self.assertEqual([ctx.relsrcdir for ctx in paths["d2/file"]], ["", "d2"])
+
+ def test_all_mozbuild_paths(self):
+ reader = self.reader("reader-relevant-mozbuild")
+
+ paths = list(reader.all_mozbuild_paths())
+ # Ensure no duplicate paths.
+ self.assertEqual(sorted(paths), sorted(set(paths)))
+ self.assertEqual(len(paths), 10)
+
+ def test_files_bad_bug_component(self):
+ reader = self.reader("files-info")
+
+ with self.assertRaises(BuildReaderError):
+ reader.files_info(["bug_component/bad-assignment/moz.build"])
+
+ def test_files_bug_component_static(self):
+ reader = self.reader("files-info")
+
+ v = reader.files_info(
+ [
+ "bug_component/static/foo",
+ "bug_component/static/bar",
+ "bug_component/static/foo/baz",
+ ]
+ )
+ self.assertEqual(len(v), 3)
+ self.assertEqual(
+ v["bug_component/static/foo"]["BUG_COMPONENT"],
+ BugzillaComponent("FooProduct", "FooComponent"),
+ )
+ self.assertEqual(
+ v["bug_component/static/bar"]["BUG_COMPONENT"],
+ BugzillaComponent("BarProduct", "BarComponent"),
+ )
+ self.assertEqual(
+ v["bug_component/static/foo/baz"]["BUG_COMPONENT"],
+ BugzillaComponent("default_product", "default_component"),
+ )
+
+ def test_files_bug_component_simple(self):
+ reader = self.reader("files-info")
+
+ v = reader.files_info(["bug_component/simple/moz.build"])
+ self.assertEqual(len(v), 1)
+ flags = v["bug_component/simple/moz.build"]
+ self.assertEqual(flags["BUG_COMPONENT"].product, "Firefox Build System")
+ self.assertEqual(flags["BUG_COMPONENT"].component, "General")
+
+ def test_files_bug_component_different_matchers(self):
+ reader = self.reader("files-info")
+
+ v = reader.files_info(
+ [
+ "bug_component/different-matchers/foo.jsm",
+ "bug_component/different-matchers/bar.cpp",
+ "bug_component/different-matchers/baz.misc",
+ ]
+ )
+ self.assertEqual(len(v), 3)
+
+ js_flags = v["bug_component/different-matchers/foo.jsm"]
+ cpp_flags = v["bug_component/different-matchers/bar.cpp"]
+ misc_flags = v["bug_component/different-matchers/baz.misc"]
+
+ self.assertEqual(js_flags["BUG_COMPONENT"], BugzillaComponent("Firefox", "JS"))
+ self.assertEqual(
+ cpp_flags["BUG_COMPONENT"], BugzillaComponent("Firefox", "C++")
+ )
+ self.assertEqual(
+ misc_flags["BUG_COMPONENT"],
+ BugzillaComponent("default_product", "default_component"),
+ )
+
+ def test_files_bug_component_final(self):
+ reader = self.reader("files-info")
+
+ v = reader.files_info(
+ [
+ "bug_component/final/foo",
+ "bug_component/final/Makefile.in",
+ "bug_component/final/subcomponent/Makefile.in",
+ "bug_component/final/subcomponent/bar",
+ ]
+ )
+
+ self.assertEqual(
+ v["bug_component/final/foo"]["BUG_COMPONENT"],
+ BugzillaComponent("default_product", "default_component"),
+ )
+ self.assertEqual(
+ v["bug_component/final/Makefile.in"]["BUG_COMPONENT"],
+ BugzillaComponent("Firefox Build System", "General"),
+ )
+ self.assertEqual(
+ v["bug_component/final/subcomponent/Makefile.in"]["BUG_COMPONENT"],
+ BugzillaComponent("Firefox Build System", "General"),
+ )
+ self.assertEqual(
+ v["bug_component/final/subcomponent/bar"]["BUG_COMPONENT"],
+ BugzillaComponent("Another", "Component"),
+ )
+
+ def test_invalid_flavor(self):
+ reader = self.reader("invalid-files-flavor")
+
+ with self.assertRaises(BuildReaderError):
+ reader.files_info(["foo.js"])
+
+ def test_schedules(self):
+ reader = self.reader("schedules")
+ info = reader.files_info(
+ [
+ "win.and.osx",
+ "somefile",
+ "foo.win",
+ "foo.osx",
+ "subd/aa.py",
+ "subd/yaml.py",
+ "subd/win.js",
+ ]
+ )
+ # default: all exclusive, no inclusive
+ self.assertEqual(info["somefile"]["SCHEDULES"].inclusive, [])
+ self.assertEqual(
+ info["somefile"]["SCHEDULES"].exclusive, schedules.EXCLUSIVE_COMPONENTS
+ )
+ # windows-only
+ self.assertEqual(info["foo.win"]["SCHEDULES"].inclusive, [])
+ self.assertEqual(info["foo.win"]["SCHEDULES"].exclusive, ["windows"])
+ # osx-only
+ self.assertEqual(info["foo.osx"]["SCHEDULES"].inclusive, [])
+ self.assertEqual(info["foo.osx"]["SCHEDULES"].exclusive, ["macosx"])
+ # top-level moz.build specifies subd/**.py with an inclusive option
+ self.assertEqual(info["subd/aa.py"]["SCHEDULES"].inclusive, ["py-lint"])
+ self.assertEqual(
+ info["subd/aa.py"]["SCHEDULES"].exclusive, schedules.EXCLUSIVE_COMPONENTS
+ )
+ # Files('yaml.py') in subd/moz.build combines with Files('subdir/**.py')
+ self.assertEqual(
+ info["subd/yaml.py"]["SCHEDULES"].inclusive, ["py-lint", "yaml-lint"]
+ )
+ self.assertEqual(
+ info["subd/yaml.py"]["SCHEDULES"].exclusive, schedules.EXCLUSIVE_COMPONENTS
+ )
+ # .. but exlusive does not override inclusive
+ self.assertEqual(info["subd/win.js"]["SCHEDULES"].inclusive, ["js-lint"])
+ self.assertEqual(info["subd/win.js"]["SCHEDULES"].exclusive, ["windows"])
+
+ self.assertEqual(
+ set(info["subd/yaml.py"]["SCHEDULES"].components),
+ set(schedules.EXCLUSIVE_COMPONENTS + ["py-lint", "yaml-lint"]),
+ )
+
+ # win.and.osx is defined explicitly, and matches *.osx, and the two have
+ # conflicting SCHEDULES.exclusive settings, so the later one is used
+ self.assertEqual(
+ set(info["win.and.osx"]["SCHEDULES"].exclusive), set(["macosx", "windows"])
+ )
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/test/frontend/test_sandbox.py b/python/mozbuild/mozbuild/test/frontend/test_sandbox.py
new file mode 100644
index 0000000000..017de1ce9c
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/frontend/test_sandbox.py
@@ -0,0 +1,536 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+
+import mozpack.path as mozpath
+from mozunit import main
+
+from mozbuild.frontend.context import (
+ FUNCTIONS,
+ SPECIAL_VARIABLES,
+ VARIABLES,
+ Context,
+ SourcePath,
+)
+from mozbuild.frontend.reader import MozbuildSandbox, SandboxCalledError
+from mozbuild.frontend.sandbox import Sandbox, SandboxExecutionError, SandboxLoadError
+from mozbuild.test.common import MockConfig
+
+test_data_path = mozpath.abspath(mozpath.dirname(__file__))
+test_data_path = mozpath.join(test_data_path, "data")
+
+
+class TestSandbox(unittest.TestCase):
+ def sandbox(self):
+ return Sandbox(
+ Context(
+ {
+ "DIRS": (list, list, None),
+ }
+ )
+ )
+
+ def test_exec_source_success(self):
+ sandbox = self.sandbox()
+ context = sandbox._context
+
+ sandbox.exec_source("foo = True", mozpath.abspath("foo.py"))
+
+ self.assertNotIn("foo", context)
+ self.assertEqual(context.main_path, mozpath.abspath("foo.py"))
+ self.assertEqual(context.all_paths, set([mozpath.abspath("foo.py")]))
+
+ def test_exec_compile_error(self):
+ sandbox = self.sandbox()
+
+ with self.assertRaises(SandboxExecutionError) as se:
+ sandbox.exec_source("2f23;k;asfj", mozpath.abspath("foo.py"))
+
+ self.assertEqual(se.exception.file_stack, [mozpath.abspath("foo.py")])
+ self.assertIsInstance(se.exception.exc_value, SyntaxError)
+ self.assertEqual(sandbox._context.main_path, mozpath.abspath("foo.py"))
+
+ def test_exec_import_denied(self):
+ sandbox = self.sandbox()
+
+ with self.assertRaises(SandboxExecutionError) as se:
+ sandbox.exec_source("import sys")
+
+ self.assertIsInstance(se.exception, SandboxExecutionError)
+ self.assertEqual(se.exception.exc_type, ImportError)
+
+ def test_exec_source_multiple(self):
+ sandbox = self.sandbox()
+
+ sandbox.exec_source('DIRS = ["foo"]')
+ sandbox.exec_source('DIRS += ["bar"]')
+
+ self.assertEqual(sandbox["DIRS"], ["foo", "bar"])
+
+ def test_exec_source_illegal_key_set(self):
+ sandbox = self.sandbox()
+
+ with self.assertRaises(SandboxExecutionError) as se:
+ sandbox.exec_source("ILLEGAL = True")
+
+ e = se.exception
+ self.assertIsInstance(e.exc_value, KeyError)
+
+ e = se.exception.exc_value
+ self.assertEqual(e.args[0], "global_ns")
+ self.assertEqual(e.args[1], "set_unknown")
+
+ def test_exec_source_reassign(self):
+ sandbox = self.sandbox()
+
+ sandbox.exec_source('DIRS = ["foo"]')
+ with self.assertRaises(SandboxExecutionError) as se:
+ sandbox.exec_source('DIRS = ["bar"]')
+
+ self.assertEqual(sandbox["DIRS"], ["foo"])
+ e = se.exception
+ self.assertIsInstance(e.exc_value, KeyError)
+
+ e = se.exception.exc_value
+ self.assertEqual(e.args[0], "global_ns")
+ self.assertEqual(e.args[1], "reassign")
+ self.assertEqual(e.args[2], "DIRS")
+
+ def test_exec_source_reassign_builtin(self):
+ sandbox = self.sandbox()
+
+ with self.assertRaises(SandboxExecutionError) as se:
+ sandbox.exec_source("sorted = 1")
+
+ e = se.exception
+ self.assertIsInstance(e.exc_value, KeyError)
+
+ e = se.exception.exc_value
+ self.assertEqual(e.args[0], "Cannot reassign builtins")
+
+
+class TestedSandbox(MozbuildSandbox):
+ """Version of MozbuildSandbox with a little more convenience for testing.
+
+ It automatically normalizes paths given to exec_file and exec_source. This
+ helps simplify the test code.
+ """
+
+ def normalize_path(self, path):
+ return mozpath.normpath(mozpath.join(self._context.config.topsrcdir, path))
+
+ def source_path(self, path):
+ return SourcePath(self._context, path)
+
+ def exec_file(self, path):
+ super(TestedSandbox, self).exec_file(self.normalize_path(path))
+
+ def exec_source(self, source, path=""):
+ super(TestedSandbox, self).exec_source(
+ source, self.normalize_path(path) if path else ""
+ )
+
+
+class TestMozbuildSandbox(unittest.TestCase):
+ def sandbox(self, data_path=None, metadata={}):
+ config = None
+
+ if data_path is not None:
+ config = MockConfig(mozpath.join(test_data_path, data_path))
+ else:
+ config = MockConfig()
+
+ return TestedSandbox(Context(VARIABLES, config), metadata)
+
+ def test_default_state(self):
+ sandbox = self.sandbox()
+ sandbox._context.add_source(sandbox.normalize_path("moz.build"))
+ config = sandbox._context.config
+
+ self.assertEqual(sandbox["TOPSRCDIR"], config.topsrcdir)
+ self.assertEqual(sandbox["TOPOBJDIR"], config.topobjdir)
+ self.assertEqual(sandbox["RELATIVEDIR"], "")
+ self.assertEqual(sandbox["SRCDIR"], config.topsrcdir)
+ self.assertEqual(sandbox["OBJDIR"], config.topobjdir)
+
+ def test_symbol_presence(self):
+ # Ensure no discrepancies between the master symbol table and what's in
+ # the sandbox.
+ sandbox = self.sandbox()
+ sandbox._context.add_source(sandbox.normalize_path("moz.build"))
+
+ all_symbols = set()
+ all_symbols |= set(FUNCTIONS.keys())
+ all_symbols |= set(SPECIAL_VARIABLES.keys())
+
+ for symbol in all_symbols:
+ self.assertIsNotNone(sandbox[symbol])
+
+ def test_path_calculation(self):
+ sandbox = self.sandbox()
+ sandbox._context.add_source(sandbox.normalize_path("foo/bar/moz.build"))
+ config = sandbox._context.config
+
+ self.assertEqual(sandbox["TOPSRCDIR"], config.topsrcdir)
+ self.assertEqual(sandbox["TOPOBJDIR"], config.topobjdir)
+ self.assertEqual(sandbox["RELATIVEDIR"], "foo/bar")
+ self.assertEqual(sandbox["SRCDIR"], mozpath.join(config.topsrcdir, "foo/bar"))
+ self.assertEqual(sandbox["OBJDIR"], mozpath.join(config.topobjdir, "foo/bar"))
+
+ def test_config_access(self):
+ sandbox = self.sandbox()
+ config = sandbox._context.config
+
+ self.assertEqual(sandbox["CONFIG"]["MOZ_TRUE"], "1")
+ self.assertEqual(sandbox["CONFIG"]["MOZ_FOO"], config.substs["MOZ_FOO"])
+
+ # Access to an undefined substitution should return None.
+ self.assertNotIn("MISSING", sandbox["CONFIG"])
+ self.assertIsNone(sandbox["CONFIG"]["MISSING"])
+
+ # Should shouldn't be allowed to assign to the config.
+ with self.assertRaises(Exception):
+ sandbox["CONFIG"]["FOO"] = ""
+
+ def test_special_variables(self):
+ sandbox = self.sandbox()
+ sandbox._context.add_source(sandbox.normalize_path("moz.build"))
+
+ for k in SPECIAL_VARIABLES:
+ with self.assertRaises(KeyError):
+ sandbox[k] = 0
+
+ def test_exec_source_reassign_exported(self):
+ template_sandbox = self.sandbox(data_path="templates")
+
+ # Templates need to be defined in actual files because of
+ # inspect.getsourcelines.
+ template_sandbox.exec_file("templates.mozbuild")
+
+ config = MockConfig()
+
+ exports = {"DIST_SUBDIR": "browser"}
+
+ sandbox = TestedSandbox(
+ Context(VARIABLES, config),
+ metadata={
+ "exports": exports,
+ "templates": template_sandbox.templates,
+ },
+ )
+
+ self.assertEqual(sandbox["DIST_SUBDIR"], "browser")
+
+ # Templates should not interfere
+ sandbox.exec_source("Template([])", "foo.mozbuild")
+
+ sandbox.exec_source('DIST_SUBDIR = "foo"')
+ with self.assertRaises(SandboxExecutionError) as se:
+ sandbox.exec_source('DIST_SUBDIR = "bar"')
+
+ self.assertEqual(sandbox["DIST_SUBDIR"], "foo")
+ e = se.exception
+ self.assertIsInstance(e.exc_value, KeyError)
+
+ e = se.exception.exc_value
+ self.assertEqual(e.args[0], "global_ns")
+ self.assertEqual(e.args[1], "reassign")
+ self.assertEqual(e.args[2], "DIST_SUBDIR")
+
+ def test_include_basic(self):
+ sandbox = self.sandbox(data_path="include-basic")
+
+ sandbox.exec_file("moz.build")
+
+ self.assertEqual(
+ sandbox["DIRS"],
+ [
+ sandbox.source_path("foo"),
+ sandbox.source_path("bar"),
+ ],
+ )
+ self.assertEqual(
+ sandbox._context.main_path, sandbox.normalize_path("moz.build")
+ )
+ self.assertEqual(len(sandbox._context.all_paths), 2)
+
+ def test_include_outside_topsrcdir(self):
+ sandbox = self.sandbox(data_path="include-outside-topsrcdir")
+
+ with self.assertRaises(SandboxLoadError) as se:
+ sandbox.exec_file("relative.build")
+
+ self.assertEqual(
+ se.exception.illegal_path, sandbox.normalize_path("../moz.build")
+ )
+
+ def test_include_error_stack(self):
+ # Ensure the path stack is reported properly in exceptions.
+ sandbox = self.sandbox(data_path="include-file-stack")
+
+ with self.assertRaises(SandboxExecutionError) as se:
+ sandbox.exec_file("moz.build")
+
+ e = se.exception
+ self.assertIsInstance(e.exc_value, KeyError)
+
+ args = e.exc_value.args
+ self.assertEqual(args[0], "global_ns")
+ self.assertEqual(args[1], "set_unknown")
+ self.assertEqual(args[2], "ILLEGAL")
+
+ expected_stack = [
+ mozpath.join(sandbox._context.config.topsrcdir, p)
+ for p in ["moz.build", "included-1.build", "included-2.build"]
+ ]
+
+ self.assertEqual(e.file_stack, expected_stack)
+
+ def test_include_missing(self):
+ sandbox = self.sandbox(data_path="include-missing")
+
+ with self.assertRaises(SandboxLoadError) as sle:
+ sandbox.exec_file("moz.build")
+
+ self.assertIsNotNone(sle.exception.read_error)
+
+ def test_include_relative_from_child_dir(self):
+ # A relative path from a subdirectory should be relative from that
+ # child directory.
+ sandbox = self.sandbox(data_path="include-relative-from-child")
+ sandbox.exec_file("child/child.build")
+ self.assertEqual(sandbox["DIRS"], [sandbox.source_path("../foo")])
+
+ sandbox = self.sandbox(data_path="include-relative-from-child")
+ sandbox.exec_file("child/child2.build")
+ self.assertEqual(sandbox["DIRS"], [sandbox.source_path("../foo")])
+
+ def test_include_topsrcdir_relative(self):
+ # An absolute path for include() is relative to topsrcdir.
+
+ sandbox = self.sandbox(data_path="include-topsrcdir-relative")
+ sandbox.exec_file("moz.build")
+
+ self.assertEqual(sandbox["DIRS"], [sandbox.source_path("foo")])
+
+ def test_error(self):
+ sandbox = self.sandbox()
+
+ with self.assertRaises(SandboxCalledError) as sce:
+ sandbox.exec_source('error("This is an error.")')
+
+ e = sce.exception.message
+ self.assertIn("This is an error.", str(e))
+
+ def test_substitute_config_files(self):
+ sandbox = self.sandbox()
+ sandbox._context.add_source(sandbox.normalize_path("moz.build"))
+
+ sandbox.exec_source('CONFIGURE_SUBST_FILES += ["bar", "foo"]')
+ self.assertEqual(sandbox["CONFIGURE_SUBST_FILES"], ["bar", "foo"])
+ for item in sandbox["CONFIGURE_SUBST_FILES"]:
+ self.assertIsInstance(item, SourcePath)
+
+ def test_invalid_exports_set_base(self):
+ sandbox = self.sandbox()
+
+ with self.assertRaises(SandboxExecutionError) as se:
+ sandbox.exec_source('EXPORTS = "foo.h"')
+
+ self.assertEqual(se.exception.exc_type, ValueError)
+
+ def test_templates(self):
+ sandbox = self.sandbox(data_path="templates")
+
+ # Templates need to be defined in actual files because of
+ # inspect.getsourcelines.
+ sandbox.exec_file("templates.mozbuild")
+
+ sandbox2 = self.sandbox(metadata={"templates": sandbox.templates})
+ source = """
+Template([
+ 'foo.cpp',
+])
+"""
+ sandbox2.exec_source(source, "foo.mozbuild")
+
+ self.assertEqual(
+ sandbox2._context,
+ {
+ "SOURCES": ["foo.cpp"],
+ "DIRS": [],
+ },
+ )
+
+ sandbox2 = self.sandbox(metadata={"templates": sandbox.templates})
+ source = """
+SOURCES += ['qux.cpp']
+Template([
+ 'bar.cpp',
+ 'foo.cpp',
+],[
+ 'foo',
+])
+SOURCES += ['hoge.cpp']
+"""
+ sandbox2.exec_source(source, "foo.mozbuild")
+
+ self.assertEqual(
+ sandbox2._context,
+ {
+ "SOURCES": ["qux.cpp", "bar.cpp", "foo.cpp", "hoge.cpp"],
+ "DIRS": [sandbox2.source_path("foo")],
+ },
+ )
+
+ sandbox2 = self.sandbox(metadata={"templates": sandbox.templates})
+ source = """
+TemplateError([
+ 'foo.cpp',
+])
+"""
+ with self.assertRaises(SandboxExecutionError) as se:
+ sandbox2.exec_source(source, "foo.mozbuild")
+
+ e = se.exception
+ self.assertIsInstance(e.exc_value, KeyError)
+
+ e = se.exception.exc_value
+ self.assertEqual(e.args[0], "global_ns")
+ self.assertEqual(e.args[1], "set_unknown")
+
+ # TemplateGlobalVariable tries to access 'illegal' but that is expected
+ # to throw.
+ sandbox2 = self.sandbox(metadata={"templates": sandbox.templates})
+ source = """
+illegal = True
+TemplateGlobalVariable()
+"""
+ with self.assertRaises(SandboxExecutionError) as se:
+ sandbox2.exec_source(source, "foo.mozbuild")
+
+ e = se.exception
+ self.assertIsInstance(e.exc_value, NameError)
+
+ # TemplateGlobalUPPERVariable sets SOURCES with DIRS, but the context
+ # used when running the template is not expected to access variables
+ # from the global context.
+ sandbox2 = self.sandbox(metadata={"templates": sandbox.templates})
+ source = """
+DIRS += ['foo']
+TemplateGlobalUPPERVariable()
+"""
+ sandbox2.exec_source(source, "foo.mozbuild")
+ self.assertEqual(
+ sandbox2._context,
+ {
+ "SOURCES": [],
+ "DIRS": [sandbox2.source_path("foo")],
+ },
+ )
+
+ # However, the result of the template is mixed with the global
+ # context.
+ sandbox2 = self.sandbox(metadata={"templates": sandbox.templates})
+ source = """
+SOURCES += ['qux.cpp']
+TemplateInherit([
+ 'bar.cpp',
+ 'foo.cpp',
+])
+SOURCES += ['hoge.cpp']
+"""
+ sandbox2.exec_source(source, "foo.mozbuild")
+
+ self.assertEqual(
+ sandbox2._context,
+ {
+ "SOURCES": ["qux.cpp", "bar.cpp", "foo.cpp", "hoge.cpp"],
+ "USE_LIBS": ["foo"],
+ "DIRS": [],
+ },
+ )
+
+ # Template names must be CamelCase. Here, we can define the template
+ # inline because the error happens before inspect.getsourcelines.
+ sandbox2 = self.sandbox(metadata={"templates": sandbox.templates})
+ source = """
+@template
+def foo():
+ pass
+"""
+
+ with self.assertRaises(SandboxExecutionError) as se:
+ sandbox2.exec_source(source, "foo.mozbuild")
+
+ e = se.exception
+ self.assertIsInstance(e.exc_value, NameError)
+
+ e = se.exception.exc_value
+ self.assertIn("Template function names must be CamelCase.", str(e))
+
+ # Template names must not already be registered.
+ sandbox2 = self.sandbox(metadata={"templates": sandbox.templates})
+ source = """
+@template
+def Template():
+ pass
+"""
+ with self.assertRaises(SandboxExecutionError) as se:
+ sandbox2.exec_source(source, "foo.mozbuild")
+
+ e = se.exception
+ self.assertIsInstance(e.exc_value, KeyError)
+
+ e = se.exception.exc_value
+ self.assertIn(
+ 'A template named "Template" was already declared in %s.'
+ % sandbox.normalize_path("templates.mozbuild"),
+ str(e),
+ )
+
+ def test_function_args(self):
+ class Foo(int):
+ pass
+
+ def foo(a, b):
+ return type(a), type(b)
+
+ FUNCTIONS.update(
+ {
+ "foo": (lambda self: foo, (Foo, int), ""),
+ }
+ )
+
+ try:
+ sandbox = self.sandbox()
+ source = 'foo("a", "b")'
+
+ with self.assertRaises(SandboxExecutionError) as se:
+ sandbox.exec_source(source, "foo.mozbuild")
+
+ e = se.exception
+ self.assertIsInstance(e.exc_value, ValueError)
+
+ sandbox = self.sandbox()
+ source = 'foo(1, "b")'
+
+ with self.assertRaises(SandboxExecutionError) as se:
+ sandbox.exec_source(source, "foo.mozbuild")
+
+ e = se.exception
+ self.assertIsInstance(e.exc_value, ValueError)
+
+ sandbox = self.sandbox()
+ source = "a = foo(1, 2)"
+ sandbox.exec_source(source, "foo.mozbuild")
+
+ self.assertEqual(sandbox["a"], (Foo, int))
+ finally:
+ del FUNCTIONS["foo"]
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/test/python.ini b/python/mozbuild/mozbuild/test/python.ini
new file mode 100644
index 0000000000..b55612c43b
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/python.ini
@@ -0,0 +1,64 @@
+[DEFAULT]
+subsuite = mozbuild
+
+[action/test_buildlist.py]
+[action/test_html_fragment_preprocessor.py]
+[action/test_langpack_manifest.py]
+[action/test_node.py]
+[action/test_process_install_manifest.py]
+[backend/test_fastermake.py]
+[backend/test_recursivemake.py]
+[backend/test_build.py]
+[backend/test_database.py]
+[backend/test_configenvironment.py]
+[backend/test_partialconfigenvironment.py]
+[backend/test_test_manifest.py]
+[backend/test_visualstudio.py]
+[code_analysis/test_mach_commands.py]
+[codecoverage/test_lcov_rewrite.py]
+[compilation/test_warnings.py]
+[configure/lint.py]
+[configure/test_bootstrap.py]
+[configure/test_checks_configure.py]
+[configure/test_compile_checks.py]
+[configure/test_configure.py]
+[configure/test_lint.py]
+[configure/test_moz_configure.py]
+[configure/test_options.py]
+[configure/test_toolchain_configure.py]
+[configure/test_toolchain_helpers.py]
+[configure/test_toolkit_moz_configure.py]
+[configure/test_util.py]
+[controller/test_ccachestats.py]
+[controller/test_clobber.py]
+[frontend/test_context.py]
+[frontend/test_emitter.py]
+[frontend/test_namespaces.py]
+[frontend/test_reader.py]
+[frontend/test_sandbox.py]
+[repackaging/test_deb.py]
+[test_artifact_cache.py]
+[test_artifacts.py]
+[test_base.py]
+[test_containers.py]
+[test_dotproperties.py]
+[test_expression.py]
+[test_jarmaker.py]
+[test_licenses.py]
+[test_line_endings.py]
+[test_makeutil.py]
+[test_manifest.py]
+[test_mozconfig.py]
+[test_mozinfo.py]
+[test_preprocessor.py]
+[test_pythonutil.py]
+[test_rewrite_mozbuild.py]
+[test_telemetry.py]
+[test_telemetry_settings.py]
+[test_util.py]
+[test_util_fileavoidwrite.py]
+[test_vendor.py]
+skip-if = true # Bug 1765416
+requirements = python/mozbuild/mozbuild/test/vendor_requirements.txt
+[test_vendor_tools.py]
+skip-if = os == "win" # Windows doesn't have the same path seperator as linux, and we just don't need to run it there
diff --git a/python/mozbuild/mozbuild/test/repackaging/test_deb.py b/python/mozbuild/mozbuild/test/repackaging/test_deb.py
new file mode 100644
index 0000000000..477f7ea346
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/repackaging/test_deb.py
@@ -0,0 +1,551 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import datetime
+import json
+import logging
+import os
+import tarfile
+import tempfile
+import zipfile
+from contextlib import nullcontext as does_not_raise
+from io import StringIO
+from unittest.mock import MagicMock, Mock, call
+
+import mozpack.path as mozpath
+import mozunit
+import pytest
+
+from mozbuild.repackaging import deb
+
+_APPLICATION_INI_CONTENT = """[App]
+Vendor=Mozilla
+Name=Firefox
+RemotingName=firefox-nightly-try
+CodeName=Firefox Nightly
+BuildID=20230222000000
+"""
+
+_APPLICATION_INI_CONTENT_DATA = {
+ "name": "Firefox",
+ "display_name": "Firefox Nightly",
+ "vendor": "Mozilla",
+ "remoting_name": "firefox-nightly-try",
+ "build_id": "20230222000000",
+ "timestamp": datetime.datetime(2023, 2, 22),
+}
+
+
+@pytest.mark.parametrize(
+ "number_of_application_ini_files, expectaction, expected_result",
+ (
+ (0, pytest.raises(ValueError), None),
+ (1, does_not_raise(), _APPLICATION_INI_CONTENT_DATA),
+ (2, pytest.raises(ValueError), None),
+ ),
+)
+def test_extract_application_ini_data(
+ number_of_application_ini_files, expectaction, expected_result
+):
+ with tempfile.TemporaryDirectory() as d:
+ tar_path = os.path.join(d, "input.tar")
+ with tarfile.open(tar_path, "w") as tar:
+ application_ini_path = os.path.join(d, "application.ini")
+ with open(application_ini_path, "w") as application_ini_file:
+ application_ini_file.write(_APPLICATION_INI_CONTENT)
+
+ for i in range(number_of_application_ini_files):
+ tar.add(application_ini_path, f"{i}/application.ini")
+
+ with expectaction:
+ assert deb._extract_application_ini_data(tar_path) == expected_result
+
+
+def test_extract_application_ini_data_from_directory():
+ with tempfile.TemporaryDirectory() as d:
+ with open(os.path.join(d, "application.ini"), "w") as f:
+ f.write(_APPLICATION_INI_CONTENT)
+
+ assert (
+ deb._extract_application_ini_data_from_directory(d)
+ == _APPLICATION_INI_CONTENT_DATA
+ )
+
+
+@pytest.mark.parametrize(
+ "version, build_number, package_name_suffix, description_suffix, expected",
+ (
+ (
+ "112.0a1",
+ 1,
+ "",
+ "",
+ {
+ "DEB_DESCRIPTION": "Mozilla Firefox",
+ "DEB_PKG_INSTALL_PATH": "usr/lib/firefox-nightly-try",
+ "DEB_PKG_NAME": "firefox-nightly-try",
+ "DEB_PKG_VERSION": "112.0a1~20230222000000",
+ },
+ ),
+ (
+ "112.0a1",
+ 1,
+ "-l10n-fr",
+ " - Language pack for Firefox Nightly for fr",
+ {
+ "DEB_DESCRIPTION": "Mozilla Firefox - Language pack for Firefox Nightly for fr",
+ "DEB_PKG_INSTALL_PATH": "usr/lib/firefox-nightly-try",
+ "DEB_PKG_NAME": "firefox-nightly-try-l10n-fr",
+ "DEB_PKG_VERSION": "112.0a1~20230222000000",
+ },
+ ),
+ (
+ "112.0b1",
+ 1,
+ "",
+ "",
+ {
+ "DEB_DESCRIPTION": "Mozilla Firefox",
+ "DEB_PKG_INSTALL_PATH": "usr/lib/firefox-nightly-try",
+ "DEB_PKG_NAME": "firefox-nightly-try",
+ "DEB_PKG_VERSION": "112.0b1~build1",
+ },
+ ),
+ (
+ "112.0",
+ 2,
+ "",
+ "",
+ {
+ "DEB_DESCRIPTION": "Mozilla Firefox",
+ "DEB_PKG_INSTALL_PATH": "usr/lib/firefox-nightly-try",
+ "DEB_PKG_NAME": "firefox-nightly-try",
+ "DEB_PKG_VERSION": "112.0~build2",
+ },
+ ),
+ ),
+)
+def test_get_build_variables(
+ version, build_number, package_name_suffix, description_suffix, expected
+):
+ application_ini_data = {
+ "name": "Firefox",
+ "display_name": "Firefox",
+ "vendor": "Mozilla",
+ "remoting_name": "firefox-nightly-try",
+ "build_id": "20230222000000",
+ "timestamp": datetime.datetime(2023, 2, 22),
+ }
+ assert deb._get_build_variables(
+ application_ini_data,
+ "x86",
+ version,
+ build_number,
+ depends="${shlibs:Depends},",
+ package_name_suffix=package_name_suffix,
+ description_suffix=description_suffix,
+ ) == {
+ **{
+ "DEB_CHANGELOG_DATE": "Wed, 22 Feb 2023 00:00:00 -0000",
+ "DEB_ARCH_NAME": "i386",
+ "DEB_DEPENDS": "${shlibs:Depends},",
+ },
+ **expected,
+ }
+
+
+def test_copy_plain_deb_config(monkeypatch):
+ def mock_listdir(dir):
+ assert dir == "/template_dir"
+ return [
+ "/template_dir/debian_file1.in",
+ "/template_dir/debian_file2.in",
+ "/template_dir/debian_file3",
+ "/template_dir/debian_file4",
+ ]
+
+ monkeypatch.setattr(deb.os, "listdir", mock_listdir)
+
+ def mock_makedirs(dir, exist_ok):
+ assert dir == "/source_dir/debian"
+ assert exist_ok is True
+
+ monkeypatch.setattr(deb.os, "makedirs", mock_makedirs)
+
+ mock_copy = MagicMock()
+ monkeypatch.setattr(deb.shutil, "copy", mock_copy)
+
+ deb._copy_plain_deb_config("/template_dir", "/source_dir")
+ assert mock_copy.call_args_list == [
+ call("/template_dir/debian_file3", "/source_dir/debian/debian_file3"),
+ call("/template_dir/debian_file4", "/source_dir/debian/debian_file4"),
+ ]
+
+
+def test_render_deb_templates():
+ with tempfile.TemporaryDirectory() as template_dir, tempfile.TemporaryDirectory() as source_dir:
+ with open(os.path.join(template_dir, "debian_file1.in"), "w") as f:
+ f.write("${some_build_variable}")
+
+ with open(os.path.join(template_dir, "debian_file2.in"), "w") as f:
+ f.write("Some hardcoded value")
+
+ with open(os.path.join(template_dir, "ignored_file.in"), "w") as f:
+ f.write("Must not be copied")
+
+ deb._render_deb_templates(
+ template_dir,
+ source_dir,
+ {"some_build_variable": "some_value"},
+ exclude_file_names=["ignored_file.in"],
+ )
+
+ with open(os.path.join(source_dir, "debian", "debian_file1")) as f:
+ assert f.read() == "some_value"
+
+ with open(os.path.join(source_dir, "debian", "debian_file2")) as f:
+ assert f.read() == "Some hardcoded value"
+
+ assert not os.path.exists(os.path.join(source_dir, "debian", "ignored_file"))
+ assert not os.path.exists(os.path.join(source_dir, "debian", "ignored_file.in"))
+
+
+def test_inject_deb_distribution_folder(monkeypatch):
+ def mock_check_call(command):
+ global clone_dir
+ clone_dir = command[-1]
+ os.makedirs(os.path.join(clone_dir, "desktop/deb/distribution"))
+
+ monkeypatch.setattr(deb.subprocess, "check_call", mock_check_call)
+
+ def mock_copytree(source_tree, destination_tree):
+ global clone_dir
+ assert source_tree == mozpath.join(clone_dir, "desktop/deb/distribution")
+ assert destination_tree == "/source_dir/firefox/distribution"
+
+ monkeypatch.setattr(deb.shutil, "copytree", mock_copytree)
+
+ deb._inject_deb_distribution_folder("/source_dir", "Firefox")
+
+
+ZH_TW_FTL = """\
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+# These messages are used by the Firefox ".desktop" file on Linux.
+# https://specifications.freedesktop.org/desktop-entry-spec/desktop-entry-spec-latest.html
+
+# The entry name is the label on the desktop icon, among other things.
+desktop-entry-name = { -brand-shortcut-name }
+# The comment usually appears as a tooltip when hovering over application menu entry.
+desktop-entry-comment = ç€è¦½å…¨çƒè³‡è¨Šç¶²
+desktop-entry-generic-name = 網é ç€è¦½å™¨
+# Keywords are search terms used to find this application.
+# The string is a list of keywords separated by semicolons:
+# - Do NOT replace semicolons with other punctuation signs.
+# - The list MUST end with a semicolon.
+desktop-entry-keywords = 網際網路;網路;ç€è¦½å™¨;網é ;上網;Internet;WWW;Browser;Web;Explorer;
+
+## Actions are visible in a context menu after right clicking the
+## taskbar icon, possibly other places depending on the environment.
+
+desktop-action-new-window-name = 開新視窗
+desktop-action-new-private-window-name = é–‹æ–°éš±ç§è¦–窗
+"""
+
+DESKTOP_ENTRY_FILE_TEXT = """\
+[Desktop Entry]
+Version=1.0
+Type=Application
+Exec=firefox-nightly %u
+Terminal=false
+X-MultipleArgs=false
+Icon=firefox-nightly
+StartupWMClass=firefox-nightly
+Categories=GNOME;GTK;Network;WebBrowser;
+MimeType=application/json;application/pdf;application/rdf+xml;application/rss+xml;application/x-xpinstall;application/xhtml+xml;application/xml;audio/flac;audio/ogg;audio/webm;image/avif;image/gif;image/jpeg;image/png;image/svg+xml;image/webp;text/html;text/xml;video/ogg;video/webm;x-scheme-handler/chrome;x-scheme-handler/http;x-scheme-handler/https;
+StartupNotify=true
+Actions=new-window;new-private-window;open-profile-manager;
+Name=en-US-desktop-entry-name
+Name[zh_TW]=zh-TW-desktop-entry-name
+Comment=en-US-desktop-entry-comment
+Comment[zh_TW]=zh-TW-desktop-entry-comment
+GenericName=en-US-desktop-entry-generic-name
+GenericName[zh_TW]=zh-TW-desktop-entry-generic-name
+Keywords=en-US-desktop-entry-keywords
+Keywords[zh_TW]=zh-TW-desktop-entry-keywords
+X-GNOME-FullName=en-US-desktop-entry-x-gnome-full-name
+X-GNOME-FullName[zh_TW]=zh-TW-desktop-entry-x-gnome-full-name
+
+[Desktop Action new-window]
+Exec=firefox-nightly --new-window %u
+Name=en-US-desktop-action-new-window-name
+Name[zh_TW]=zh-TW-desktop-action-new-window-name
+
+[Desktop Action new-private-window]
+Exec=firefox-nightly --private-window %u
+Name=en-US-desktop-action-new-private-window-name
+Name[zh_TW]=zh-TW-desktop-action-new-private-window-name
+
+[Desktop Action open-profile-manager]
+Exec=firefox-nightly --ProfileManager
+Name=en-US-desktop-action-open-profile-manager
+Name[zh_TW]=zh-TW-desktop-action-open-profile-manager
+"""
+
+
+def test_generate_deb_desktop_entry_file_text(monkeypatch):
+ def responsive(url):
+ if "zh-TW" in url:
+ return Mock(
+ **{
+ "status_code": 200,
+ "text": ZH_TW_FTL,
+ }
+ )
+ return Mock(**{"status_code": 404})
+
+ monkeypatch.setattr(deb.requests, "get", responsive)
+
+ output_stream = StringIO()
+ logger = logging.getLogger("mozbuild:test:repackaging")
+ logger.setLevel(logging.DEBUG)
+ stream_handler = logging.StreamHandler(output_stream)
+ logger.addHandler(stream_handler)
+
+ def log(level, action, params, format_str):
+ logger.log(
+ level,
+ format_str.format(**params),
+ extra={"action": action, "params": params},
+ )
+
+ build_variables = {
+ "DEB_PKG_NAME": "firefox-nightly",
+ }
+ release_product = "firefox"
+ release_type = "nightly"
+
+ def fluent_localization(locales, resources, loader):
+ def format_value(resource):
+ return f"{locales[0]}-{resource}"
+
+ return Mock(**{"format_value": format_value})
+
+ fluent_resource_loader = Mock()
+
+ desktop_entry_file_text = deb._generate_browser_desktop_entry_file_text(
+ log,
+ build_variables,
+ release_product,
+ release_type,
+ fluent_localization,
+ fluent_resource_loader,
+ )
+
+ assert desktop_entry_file_text == DESKTOP_ENTRY_FILE_TEXT
+
+ def outage(url):
+ return Mock(**{"status_code": 500})
+
+ monkeypatch.setattr(deb.requests, "get", outage)
+
+ with pytest.raises(deb.HgServerError):
+ desktop_entry_file_text = deb._generate_browser_desktop_entry_file_text(
+ log,
+ build_variables,
+ release_product,
+ release_type,
+ fluent_localization,
+ fluent_resource_loader,
+ )
+
+
+@pytest.mark.parametrize(
+ "does_path_exits, expectation",
+ (
+ (True, does_not_raise()),
+ (False, pytest.raises(deb.NoDebPackageFound)),
+ ),
+)
+def test_generate_deb_archive(
+ monkeypatch,
+ does_path_exits,
+ expectation,
+):
+ monkeypatch.setattr(deb, "_get_command", lambda _: ["mock_command"])
+ monkeypatch.setattr(deb.subprocess, "check_call", lambda *_, **__: None)
+
+ def mock_exists(path):
+ assert path == "/target_dir/firefox_111.0_amd64.deb"
+ return does_path_exits
+
+ monkeypatch.setattr(deb.os.path, "exists", mock_exists)
+
+ def mock_move(source_path, destination_path):
+ assert source_path == "/target_dir/firefox_111.0_amd64.deb"
+ assert destination_path == "/output/target.deb"
+
+ monkeypatch.setattr(deb.shutil, "move", mock_move)
+
+ with expectation:
+ deb._generate_deb_archive(
+ source_dir="/source_dir",
+ target_dir="/target_dir",
+ output_file_path="/output/target.deb",
+ build_variables={
+ "DEB_PKG_NAME": "firefox",
+ "DEB_PKG_VERSION": "111.0",
+ },
+ arch="x86_64",
+ )
+
+
+@pytest.mark.parametrize(
+ "arch, is_chroot_available, expected",
+ (
+ (
+ "all",
+ True,
+ [
+ "chroot",
+ "/srv/jessie-amd64",
+ "bash",
+ "-c",
+ "cd /tmp/*/source; dpkg-buildpackage -us -uc -b",
+ ],
+ ),
+ ("all", False, ["dpkg-buildpackage", "-us", "-uc", "-b"]),
+ (
+ "x86",
+ True,
+ [
+ "chroot",
+ "/srv/jessie-i386",
+ "bash",
+ "-c",
+ "cd /tmp/*/source; dpkg-buildpackage -us -uc -b --host-arch=i386",
+ ],
+ ),
+ ("x86", False, ["dpkg-buildpackage", "-us", "-uc", "-b", "--host-arch=i386"]),
+ (
+ "x86_64",
+ True,
+ [
+ "chroot",
+ "/srv/jessie-amd64",
+ "bash",
+ "-c",
+ "cd /tmp/*/source; dpkg-buildpackage -us -uc -b --host-arch=amd64",
+ ],
+ ),
+ (
+ "x86_64",
+ False,
+ ["dpkg-buildpackage", "-us", "-uc", "-b", "--host-arch=amd64"],
+ ),
+ ),
+)
+def test_get_command(monkeypatch, arch, is_chroot_available, expected):
+ monkeypatch.setattr(deb, "_is_chroot_available", lambda _: is_chroot_available)
+ assert deb._get_command(arch) == expected
+
+
+@pytest.mark.parametrize(
+ "arch, does_dir_exist, expected_path, expected_result",
+ (
+ ("all", False, "/srv/jessie-amd64", False),
+ ("all", True, "/srv/jessie-amd64", True),
+ ("x86", False, "/srv/jessie-i386", False),
+ ("x86_64", False, "/srv/jessie-amd64", False),
+ ("x86", True, "/srv/jessie-i386", True),
+ ("x86_64", True, "/srv/jessie-amd64", True),
+ ),
+)
+def test_is_chroot_available(
+ monkeypatch, arch, does_dir_exist, expected_path, expected_result
+):
+ def _mock_is_dir(path):
+ assert path == expected_path
+ return does_dir_exist
+
+ monkeypatch.setattr(deb.os.path, "isdir", _mock_is_dir)
+ assert deb._is_chroot_available(arch) == expected_result
+
+
+@pytest.mark.parametrize(
+ "arch, expected",
+ (
+ ("all", "/srv/jessie-amd64"),
+ ("x86", "/srv/jessie-i386"),
+ ("x86_64", "/srv/jessie-amd64"),
+ ),
+)
+def test_get_chroot_path(arch, expected):
+ assert deb._get_chroot_path(arch) == expected
+
+
+_MANIFEST_JSON_DATA = {
+ "langpack_id": "fr",
+ "manifest_version": 2,
+ "browser_specific_settings": {
+ "gecko": {
+ "id": "langpack-fr@devedition.mozilla.org",
+ "strict_min_version": "112.0a1",
+ "strict_max_version": "112.0a1",
+ }
+ },
+ "name": "Language: Français (French)",
+ "description": "Firefox Developer Edition Language Pack for Français (fr) – French",
+ "version": "112.0.20230227.181253",
+ "languages": {
+ "fr": {
+ "version": "20230223164410",
+ "chrome_resources": {
+ "app-marketplace-icons": "browser/chrome/browser/locale/fr/app-marketplace-icons/",
+ "branding": "browser/chrome/fr/locale/branding/",
+ "browser": "browser/chrome/fr/locale/browser/",
+ "browser-region": "browser/chrome/fr/locale/browser-region/",
+ "devtools": "browser/chrome/fr/locale/fr/devtools/client/",
+ "devtools-shared": "browser/chrome/fr/locale/fr/devtools/shared/",
+ "formautofill": "browser/features/formautofill@mozilla.org/fr/locale/fr/",
+ "report-site-issue": "browser/features/webcompat-reporter@mozilla.org/fr/locale/fr/",
+ "alerts": "chrome/fr/locale/fr/alerts/",
+ "autoconfig": "chrome/fr/locale/fr/autoconfig/",
+ "global": "chrome/fr/locale/fr/global/",
+ "global-platform": {
+ "macosx": "chrome/fr/locale/fr/global-platform/mac/",
+ "linux": "chrome/fr/locale/fr/global-platform/unix/",
+ "android": "chrome/fr/locale/fr/global-platform/unix/",
+ "win": "chrome/fr/locale/fr/global-platform/win/",
+ },
+ "mozapps": "chrome/fr/locale/fr/mozapps/",
+ "necko": "chrome/fr/locale/fr/necko/",
+ "passwordmgr": "chrome/fr/locale/fr/passwordmgr/",
+ "pdf.js": "chrome/fr/locale/pdfviewer/",
+ "pipnss": "chrome/fr/locale/fr/pipnss/",
+ "pippki": "chrome/fr/locale/fr/pippki/",
+ "places": "chrome/fr/locale/fr/places/",
+ "weave": "chrome/fr/locale/fr/services/",
+ },
+ }
+ },
+ "sources": {"browser": {"base_path": "browser/"}},
+ "author": "mozfr.org (contributors: L’équipe francophone)",
+}
+
+
+def test_extract_langpack_metadata():
+ with tempfile.TemporaryDirectory() as d:
+ langpack_path = os.path.join(d, "langpack.xpi")
+ with zipfile.ZipFile(langpack_path, "w") as zip:
+ zip.writestr("manifest.json", json.dumps(_MANIFEST_JSON_DATA))
+
+ assert deb._extract_langpack_metadata(langpack_path) == _MANIFEST_JSON_DATA
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozbuild/test/test_android_version_code.py b/python/mozbuild/mozbuild/test/test_android_version_code.py
new file mode 100644
index 0000000000..7600ebe0d8
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/test_android_version_code.py
@@ -0,0 +1,111 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+
+from mozunit import main
+
+from mozbuild.android_version_code import (
+ android_version_code_v0,
+ android_version_code_v1,
+)
+
+
+class TestAndroidVersionCode(unittest.TestCase):
+ def test_android_version_code_v0(self):
+ # From https://treeherder.mozilla.org/#/jobs?repo=mozilla-central&revision=e25de9972a77.
+ buildid = "20150708104620"
+ arm_api9 = 2015070819
+ arm_api11 = 2015070821
+ x86_api9 = 2015070822
+ self.assertEqual(
+ android_version_code_v0(
+ buildid, cpu_arch="armeabi", min_sdk=9, max_sdk=None
+ ),
+ arm_api9,
+ )
+ self.assertEqual(
+ android_version_code_v0(
+ buildid, cpu_arch="armeabi-v7a", min_sdk=11, max_sdk=None
+ ),
+ arm_api11,
+ )
+ self.assertEqual(
+ android_version_code_v0(buildid, cpu_arch="x86", min_sdk=9, max_sdk=None),
+ x86_api9,
+ )
+
+ def test_android_version_code_v1(self):
+ buildid = "20150825141628"
+ arm_api16 = 0b01111000001000000001001001110001
+ arm64_api21 = 0b01111000001000000001001001110100
+ x86_api9 = 0b01111000001000000001001001110100
+ self.assertEqual(
+ android_version_code_v1(
+ buildid, cpu_arch="armeabi-v7a", min_sdk=16, max_sdk=None
+ ),
+ arm_api16,
+ )
+ self.assertEqual(
+ android_version_code_v1(
+ buildid, cpu_arch="arm64-v8a", min_sdk=21, max_sdk=None
+ ),
+ arm64_api21,
+ )
+ self.assertEqual(
+ android_version_code_v1(buildid, cpu_arch="x86", min_sdk=9, max_sdk=None),
+ x86_api9,
+ )
+
+ def test_android_version_code_v1_underflow(self):
+ """Verify that it is an error to ask for v1 codes predating the cutoff."""
+ buildid = "201508010000" # Earliest possible.
+ arm_api9 = 0b01111000001000000000000000000000
+ self.assertEqual(
+ android_version_code_v1(
+ buildid, cpu_arch="armeabi", min_sdk=9, max_sdk=None
+ ),
+ arm_api9,
+ )
+ with self.assertRaises(ValueError) as cm:
+ underflow = "201507310000" # Latest possible (valid) underflowing date.
+ android_version_code_v1(
+ underflow, cpu_arch="armeabi", min_sdk=9, max_sdk=None
+ )
+ self.assertTrue("underflow" in cm.exception.message)
+
+ def test_android_version_code_v1_running_low(self):
+ """Verify there is an informative message if one asks for v1
+ codes that are close to overflow."""
+ with self.assertRaises(ValueError) as cm:
+ overflow = "20290801000000"
+ android_version_code_v1(
+ overflow, cpu_arch="armeabi", min_sdk=9, max_sdk=None
+ )
+ self.assertTrue("Running out of low order bits" in cm.exception.message)
+
+ def test_android_version_code_v1_overflow(self):
+ """Verify that it is an error to ask for v1 codes that actually does overflow."""
+ with self.assertRaises(ValueError) as cm:
+ overflow = "20310801000000"
+ android_version_code_v1(
+ overflow, cpu_arch="armeabi", min_sdk=9, max_sdk=None
+ )
+ self.assertTrue("overflow" in cm.exception.message)
+
+ def test_android_version_code_v0_relative_v1(self):
+ """Verify that the first v1 code is greater than the equivalent v0 code."""
+ buildid = "20150801000000"
+ self.assertGreater(
+ android_version_code_v1(
+ buildid, cpu_arch="armeabi", min_sdk=9, max_sdk=None
+ ),
+ android_version_code_v0(
+ buildid, cpu_arch="armeabi", min_sdk=9, max_sdk=None
+ ),
+ )
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/test/test_artifact_cache.py b/python/mozbuild/mozbuild/test/test_artifact_cache.py
new file mode 100644
index 0000000000..d12d150183
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/test_artifact_cache.py
@@ -0,0 +1,145 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import time
+import unittest
+from shutil import rmtree
+from tempfile import mkdtemp
+
+import mozunit
+
+from mozbuild import artifact_cache
+from mozbuild.artifact_cache import ArtifactCache
+
+CONTENTS = {
+ "http://server/foo": b"foo",
+ "http://server/bar": b"bar" * 400,
+ "http://server/qux": b"qux" * 400,
+ "http://server/fuga": b"fuga" * 300,
+ "http://server/hoge": b"hoge" * 300,
+ "http://server/larger": b"larger" * 3000,
+}
+
+
+class FakeResponse(object):
+ def __init__(self, content):
+ self._content = content
+
+ @property
+ def headers(self):
+ return {"Content-length": str(len(self._content))}
+
+ def iter_content(self, chunk_size):
+ content = memoryview(self._content)
+ while content:
+ yield content[:chunk_size]
+ content = content[chunk_size:]
+
+ def raise_for_status(self):
+ pass
+
+ def close(self):
+ pass
+
+
+class FakeSession(object):
+ def get(self, url, stream=True):
+ assert stream is True
+ return FakeResponse(CONTENTS[url])
+
+
+class TestArtifactCache(unittest.TestCase):
+ def setUp(self):
+ self.min_cached_artifacts = artifact_cache.MIN_CACHED_ARTIFACTS
+ self.max_cached_artifacts_size = artifact_cache.MAX_CACHED_ARTIFACTS_SIZE
+ artifact_cache.MIN_CACHED_ARTIFACTS = 2
+ artifact_cache.MAX_CACHED_ARTIFACTS_SIZE = 4096
+
+ self._real_utime = os.utime
+ os.utime = self.utime
+ self.timestamp = time.time() - 86400
+
+ self.tmpdir = mkdtemp()
+
+ def tearDown(self):
+ rmtree(self.tmpdir)
+ artifact_cache.MIN_CACHED_ARTIFACTS = self.min_cached_artifacts
+ artifact_cache.MAX_CACHED_ARTIFACTS_SIZE = self.max_cached_artifacts_size
+ os.utime = self._real_utime
+
+ def utime(self, path, times):
+ if times is None:
+ # Ensure all downloaded files have a different timestamp
+ times = (self.timestamp, self.timestamp)
+ self.timestamp += 2
+ self._real_utime(path, times)
+
+ def listtmpdir(self):
+ return [p for p in os.listdir(self.tmpdir) if p != ".metadata_never_index"]
+
+ def test_artifact_cache_persistence(self):
+ cache = ArtifactCache(self.tmpdir)
+ cache._download_manager.session = FakeSession()
+
+ path = cache.fetch("http://server/foo")
+ expected = [os.path.basename(path)]
+ self.assertEqual(self.listtmpdir(), expected)
+
+ path = cache.fetch("http://server/bar")
+ expected.append(os.path.basename(path))
+ self.assertEqual(sorted(self.listtmpdir()), sorted(expected))
+
+ # We're downloading more than the cache allows us, but since it's all
+ # in the same session, no purge happens.
+ path = cache.fetch("http://server/qux")
+ expected.append(os.path.basename(path))
+ self.assertEqual(sorted(self.listtmpdir()), sorted(expected))
+
+ path = cache.fetch("http://server/fuga")
+ expected.append(os.path.basename(path))
+ self.assertEqual(sorted(self.listtmpdir()), sorted(expected))
+
+ cache = ArtifactCache(self.tmpdir)
+ cache._download_manager.session = FakeSession()
+
+ # Downloading a new file in a new session purges the oldest files in
+ # the cache.
+ path = cache.fetch("http://server/hoge")
+ expected.append(os.path.basename(path))
+ expected = expected[2:]
+ self.assertEqual(sorted(self.listtmpdir()), sorted(expected))
+
+ # Downloading a file already in the cache leaves the cache untouched
+ cache = ArtifactCache(self.tmpdir)
+ cache._download_manager.session = FakeSession()
+
+ path = cache.fetch("http://server/qux")
+ self.assertEqual(sorted(self.listtmpdir()), sorted(expected))
+
+ # bar was purged earlier, re-downloading it should purge the oldest
+ # downloaded file, which at this point would be qux, but we also
+ # re-downloaded it in the mean time, so the next one (fuga) should be
+ # the purged one.
+ cache = ArtifactCache(self.tmpdir)
+ cache._download_manager.session = FakeSession()
+
+ path = cache.fetch("http://server/bar")
+ expected.append(os.path.basename(path))
+ expected = [p for p in expected if "fuga" not in p]
+ self.assertEqual(sorted(self.listtmpdir()), sorted(expected))
+
+ # Downloading one file larger than the cache size should still leave
+ # MIN_CACHED_ARTIFACTS files.
+ cache = ArtifactCache(self.tmpdir)
+ cache._download_manager.session = FakeSession()
+
+ path = cache.fetch("http://server/larger")
+ expected.append(os.path.basename(path))
+ expected = expected[-2:]
+ self.assertEqual(sorted(self.listtmpdir()), sorted(expected))
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozbuild/test/test_artifacts.py b/python/mozbuild/mozbuild/test/test_artifacts.py
new file mode 100644
index 0000000000..397b6dbdb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/test_artifacts.py
@@ -0,0 +1,115 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from unittest import TestCase
+
+import buildconfig
+import mozunit
+
+from mozbuild.artifacts import ArtifactJob, ThunderbirdMixin
+
+
+class FakeArtifactJob(ArtifactJob):
+ package_re = r""
+
+
+class TestArtifactJob(TestCase):
+ def _assert_candidate_trees(self, version_display, expected_trees):
+ buildconfig.substs["MOZ_APP_VERSION_DISPLAY"] = version_display
+
+ job = FakeArtifactJob()
+ self.assertGreater(len(job.candidate_trees), 0)
+ self.assertEqual(job.candidate_trees, expected_trees)
+
+ def test_candidate_trees_with_empty_file(self):
+ self._assert_candidate_trees(
+ version_display="", expected_trees=ArtifactJob.default_candidate_trees
+ )
+
+ def test_candidate_trees_with_beta_version(self):
+ self._assert_candidate_trees(
+ version_display="92.1b2", expected_trees=ArtifactJob.beta_candidate_trees
+ )
+
+ def test_candidate_trees_with_esr_version(self):
+ self._assert_candidate_trees(
+ version_display="91.3.0esr", expected_trees=ArtifactJob.esr_candidate_trees
+ )
+
+ def test_candidate_trees_with_nightly_version(self):
+ self._assert_candidate_trees(
+ version_display="95.0a1", expected_trees=ArtifactJob.nightly_candidate_trees
+ )
+
+ def test_candidate_trees_with_release_version(self):
+ self._assert_candidate_trees(
+ version_display="93.0.1", expected_trees=ArtifactJob.default_candidate_trees
+ )
+
+ def test_candidate_trees_with_newline_before_version(self):
+ self._assert_candidate_trees(
+ version_display="\n\n91.3.0esr",
+ expected_trees=ArtifactJob.esr_candidate_trees,
+ )
+
+ def test_property_is_cached(self):
+ job = FakeArtifactJob()
+ expected_trees = ArtifactJob.esr_candidate_trees
+
+ buildconfig.substs["MOZ_APP_VERSION_DISPLAY"] = "91.3.0.esr"
+ self.assertEqual(job.candidate_trees, expected_trees)
+ # Because the property is cached, changing the
+ # `MOZ_APP_VERSION_DISPLAY` won't have any impact.
+ buildconfig.substs["MOZ_APP_VERSION_DISPLAY"] = ""
+ self.assertEqual(job.candidate_trees, expected_trees)
+
+
+class FakeThunderbirdJob(ThunderbirdMixin, FakeArtifactJob):
+ pass
+
+
+class TestThunderbirdMixin(TestCase):
+ def _assert_candidate_trees(self, version_display, source_repo, expected_trees):
+ buildconfig.substs["MOZ_APP_VERSION_DISPLAY"] = version_display
+ buildconfig.substs["MOZ_SOURCE_REPO"] = source_repo
+
+ job = FakeThunderbirdJob()
+ self.assertGreater(len(job.candidate_trees), 0)
+ self.assertEqual(job.candidate_trees, expected_trees)
+
+ def test_candidate_trees_with_beta_version(self):
+ self._assert_candidate_trees(
+ version_display="92.1b2",
+ source_repo="https://hg.mozilla.org/releases/comm-beta",
+ expected_trees=ThunderbirdMixin.beta_candidate_trees,
+ )
+
+ def test_candidate_trees_with_esr_version(self):
+ self._assert_candidate_trees(
+ version_display="91.3.0",
+ source_repo="https://hg.mozilla.org/releases/comm-esr91",
+ expected_trees=ThunderbirdMixin.esr_candidate_trees,
+ )
+
+ def test_candidate_trees_with_nightly_version(self):
+ self._assert_candidate_trees(
+ version_display="95.0a1",
+ source_repo="https://hg.mozilla.org/comm-central",
+ expected_trees=ThunderbirdMixin.nightly_candidate_trees,
+ )
+
+ def test_property_is_cached(self):
+ job = FakeThunderbirdJob()
+ expected_trees = ThunderbirdMixin.esr_candidate_trees
+
+ buildconfig.substs["MOZ_APP_VERSION_DISPLAY"] = "91.3.0.esr"
+ self.assertEqual(job.candidate_trees, expected_trees)
+ # Because the property is cached, changing the
+ # `MOZ_APP_VERSION_DISPLAY` won't have any impact.
+ buildconfig.substs["MOZ_APP_VERSION_DISPLAY"] = ""
+ self.assertEqual(job.candidate_trees, expected_trees)
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozbuild/test/test_base.py b/python/mozbuild/mozbuild/test/test_base.py
new file mode 100644
index 0000000000..c75a71ef5d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/test_base.py
@@ -0,0 +1,446 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import json
+import os
+import shutil
+import sys
+import tempfile
+import unittest
+
+import mozpack.path as mozpath
+from buildconfig import topobjdir, topsrcdir
+from mach.logging import LoggingManager
+from mozfile.mozfile import NamedTemporaryFile
+from mozunit import main
+from six import StringIO
+
+from mozbuild.backend.configenvironment import ConfigEnvironment
+from mozbuild.base import (
+ BadEnvironmentException,
+ MachCommandBase,
+ MozbuildObject,
+ PathArgument,
+)
+from mozbuild.test.common import prepare_tmp_topsrcdir
+
+curdir = os.path.dirname(__file__)
+log_manager = LoggingManager()
+
+
+class TestMozbuildObject(unittest.TestCase):
+ def setUp(self):
+ self._old_cwd = os.getcwd()
+ self._old_env = dict(os.environ)
+ os.environ.pop("MOZCONFIG", None)
+ os.environ.pop("MOZ_OBJDIR", None)
+
+ def tearDown(self):
+ os.chdir(self._old_cwd)
+ os.environ.clear()
+ os.environ.update(self._old_env)
+
+ def get_base(self, topobjdir=None):
+ return MozbuildObject(topsrcdir, None, log_manager, topobjdir=topobjdir)
+
+ def test_objdir_config_guess(self):
+ base = self.get_base()
+
+ with NamedTemporaryFile(mode="wt") as mozconfig:
+ os.environ["MOZCONFIG"] = mozconfig.name
+
+ self.assertIsNotNone(base.topobjdir)
+ self.assertEqual(len(base.topobjdir.split()), 1)
+ config_guess = base.resolve_config_guess()
+ self.assertTrue(base.topobjdir.endswith(config_guess))
+ self.assertTrue(os.path.isabs(base.topobjdir))
+ self.assertTrue(base.topobjdir.startswith(base.topsrcdir))
+
+ def test_objdir_trailing_slash(self):
+ """Trailing slashes in topobjdir should be removed."""
+ base = self.get_base()
+
+ with NamedTemporaryFile(mode="wt") as mozconfig:
+ mozconfig.write("mk_add_options MOZ_OBJDIR=@TOPSRCDIR@/foo/")
+ mozconfig.flush()
+ os.environ["MOZCONFIG"] = mozconfig.name
+
+ self.assertEqual(base.topobjdir, mozpath.join(base.topsrcdir, "foo"))
+ self.assertTrue(base.topobjdir.endswith("foo"))
+
+ def test_objdir_config_status(self):
+ """Ensure @CONFIG_GUESS@ is handled when loading mozconfig."""
+ base = self.get_base()
+ guess = base.resolve_config_guess()
+
+ # There may be symlinks involved, so we use real paths to ensure
+ # path consistency.
+ d = os.path.realpath(tempfile.mkdtemp())
+ try:
+ mozconfig = os.path.join(d, "mozconfig")
+ with open(mozconfig, "wt") as fh:
+ fh.write("mk_add_options MOZ_OBJDIR=@TOPSRCDIR@/foo/@CONFIG_GUESS@")
+ print("Wrote mozconfig %s" % mozconfig)
+
+ topobjdir = os.path.join(d, "foo", guess)
+ os.makedirs(topobjdir)
+
+ # Create a fake topsrcdir.
+ prepare_tmp_topsrcdir(d)
+
+ mozinfo = os.path.join(topobjdir, "mozinfo.json")
+ with open(mozinfo, "wt") as fh:
+ json.dump(
+ dict(
+ topsrcdir=d,
+ mozconfig=mozconfig,
+ ),
+ fh,
+ )
+
+ os.environ["MOZCONFIG"] = mozconfig
+ os.chdir(topobjdir)
+
+ obj = MozbuildObject.from_environment(detect_virtualenv_mozinfo=False)
+
+ self.assertEqual(obj.topobjdir, mozpath.normsep(topobjdir))
+ finally:
+ os.chdir(self._old_cwd)
+ shutil.rmtree(d)
+
+ def test_relative_objdir(self):
+ """Relative defined objdirs are loaded properly."""
+ d = os.path.realpath(tempfile.mkdtemp())
+ try:
+ mozconfig = os.path.join(d, "mozconfig")
+ with open(mozconfig, "wt") as fh:
+ fh.write("mk_add_options MOZ_OBJDIR=./objdir")
+
+ topobjdir = mozpath.join(d, "objdir")
+ os.mkdir(topobjdir)
+
+ mozinfo = os.path.join(topobjdir, "mozinfo.json")
+ with open(mozinfo, "wt") as fh:
+ json.dump(
+ dict(
+ topsrcdir=d,
+ mozconfig=mozconfig,
+ ),
+ fh,
+ )
+
+ os.environ["MOZCONFIG"] = mozconfig
+ child = os.path.join(topobjdir, "foo", "bar")
+ os.makedirs(child)
+ os.chdir(child)
+
+ obj = MozbuildObject.from_environment(detect_virtualenv_mozinfo=False)
+
+ self.assertEqual(obj.topobjdir, topobjdir)
+
+ finally:
+ os.chdir(self._old_cwd)
+ shutil.rmtree(d)
+
+ @unittest.skipIf(
+ not hasattr(os, "symlink") or os.name == "nt", "symlinks not available."
+ )
+ def test_symlink_objdir(self):
+ """Objdir that is a symlink is loaded properly."""
+ d = os.path.realpath(tempfile.mkdtemp())
+ try:
+ topobjdir_real = os.path.join(d, "objdir")
+ topobjdir_link = os.path.join(d, "objlink")
+
+ os.mkdir(topobjdir_real)
+ os.symlink(topobjdir_real, topobjdir_link)
+
+ mozconfig = os.path.join(d, "mozconfig")
+ with open(mozconfig, "wt") as fh:
+ fh.write("mk_add_options MOZ_OBJDIR=%s" % topobjdir_link)
+
+ mozinfo = os.path.join(topobjdir_real, "mozinfo.json")
+ with open(mozinfo, "wt") as fh:
+ json.dump(
+ dict(
+ topsrcdir=d,
+ mozconfig=mozconfig,
+ ),
+ fh,
+ )
+
+ os.chdir(topobjdir_link)
+ obj = MozbuildObject.from_environment(detect_virtualenv_mozinfo=False)
+ self.assertEqual(obj.topobjdir, topobjdir_real)
+
+ os.chdir(topobjdir_real)
+ obj = MozbuildObject.from_environment(detect_virtualenv_mozinfo=False)
+ self.assertEqual(obj.topobjdir, topobjdir_real)
+
+ finally:
+ os.chdir(self._old_cwd)
+ shutil.rmtree(d)
+
+ def test_mach_command_base_inside_objdir(self):
+ """Ensure a MachCommandBase constructed from inside the objdir works."""
+
+ d = os.path.realpath(tempfile.mkdtemp())
+
+ try:
+ topobjdir = os.path.join(d, "objdir")
+ os.makedirs(topobjdir)
+
+ topsrcdir = os.path.join(d, "srcdir")
+ prepare_tmp_topsrcdir(topsrcdir)
+
+ mozinfo = os.path.join(topobjdir, "mozinfo.json")
+ with open(mozinfo, "wt") as fh:
+ json.dump(
+ dict(
+ topsrcdir=topsrcdir,
+ ),
+ fh,
+ )
+
+ os.chdir(topobjdir)
+
+ class MockMachContext(object):
+ pass
+
+ context = MockMachContext()
+ context.cwd = topobjdir
+ context.topdir = topsrcdir
+ context.settings = None
+ context.log_manager = None
+ context.detect_virtualenv_mozinfo = False
+
+ o = MachCommandBase(context, None)
+
+ self.assertEqual(o.topobjdir, mozpath.normsep(topobjdir))
+ self.assertEqual(o.topsrcdir, mozpath.normsep(topsrcdir))
+
+ finally:
+ os.chdir(self._old_cwd)
+ shutil.rmtree(d)
+
+ def test_objdir_is_srcdir_rejected(self):
+ """Ensure the srcdir configurations are rejected."""
+ d = os.path.realpath(tempfile.mkdtemp())
+
+ try:
+ # The easiest way to do this is to create a mozinfo.json with data
+ # that will never happen.
+ mozinfo = os.path.join(d, "mozinfo.json")
+ with open(mozinfo, "wt") as fh:
+ json.dump({"topsrcdir": d}, fh)
+
+ os.chdir(d)
+
+ with self.assertRaises(BadEnvironmentException):
+ MozbuildObject.from_environment(detect_virtualenv_mozinfo=False)
+
+ finally:
+ os.chdir(self._old_cwd)
+ shutil.rmtree(d)
+
+ def test_objdir_mismatch(self):
+ """Ensure MachCommandBase throwing on objdir mismatch."""
+ d = os.path.realpath(tempfile.mkdtemp())
+
+ try:
+ real_topobjdir = os.path.join(d, "real-objdir")
+ os.makedirs(real_topobjdir)
+
+ topobjdir = os.path.join(d, "objdir")
+ os.makedirs(topobjdir)
+
+ topsrcdir = os.path.join(d, "srcdir")
+ prepare_tmp_topsrcdir(topsrcdir)
+
+ mozconfig = os.path.join(d, "mozconfig")
+ with open(mozconfig, "wt") as fh:
+ fh.write(
+ "mk_add_options MOZ_OBJDIR=%s" % real_topobjdir.replace("\\", "/")
+ )
+
+ mozinfo = os.path.join(topobjdir, "mozinfo.json")
+ with open(mozinfo, "wt") as fh:
+ json.dump(
+ dict(
+ topsrcdir=topsrcdir,
+ mozconfig=mozconfig,
+ ),
+ fh,
+ )
+
+ os.chdir(topobjdir)
+
+ class MockMachContext(object):
+ pass
+
+ context = MockMachContext()
+ context.cwd = topobjdir
+ context.topdir = topsrcdir
+ context.settings = None
+ context.log_manager = None
+ context.detect_virtualenv_mozinfo = False
+
+ stdout = sys.stdout
+ sys.stdout = StringIO()
+ try:
+ with self.assertRaises(SystemExit):
+ MachCommandBase(context, None)
+
+ self.assertTrue(
+ sys.stdout.getvalue().startswith(
+ "Ambiguous object directory detected."
+ )
+ )
+ finally:
+ sys.stdout = stdout
+
+ finally:
+ os.chdir(self._old_cwd)
+ shutil.rmtree(d)
+
+ def test_config_environment(self):
+ d = os.path.realpath(tempfile.mkdtemp())
+
+ try:
+ with open(os.path.join(d, "config.status"), "w") as fh:
+ fh.write("# coding=utf-8\n")
+ fh.write("from __future__ import unicode_literals\n")
+ fh.write("topobjdir = '%s'\n" % mozpath.normsep(d))
+ fh.write("topsrcdir = '%s'\n" % topsrcdir)
+ fh.write("mozconfig = None\n")
+ fh.write("defines = { 'FOO': 'foo' }\n")
+ fh.write("substs = { 'QUX': 'qux' }\n")
+ fh.write(
+ "__all__ = ['topobjdir', 'topsrcdir', 'defines', "
+ "'substs', 'mozconfig']"
+ )
+
+ base = self.get_base(topobjdir=d)
+
+ ce = base.config_environment
+ self.assertIsInstance(ce, ConfigEnvironment)
+
+ self.assertEqual(base.defines, ce.defines)
+ self.assertEqual(base.substs, ce.substs)
+
+ self.assertEqual(base.defines, {"FOO": "foo"})
+ self.assertEqual(
+ base.substs,
+ {
+ "ACDEFINES": "-DFOO=foo",
+ "ALLEMPTYSUBSTS": "",
+ "ALLSUBSTS": "ACDEFINES = -DFOO=foo\nQUX = qux",
+ "QUX": "qux",
+ },
+ )
+ finally:
+ shutil.rmtree(d)
+
+ def test_get_binary_path(self):
+ base = self.get_base(topobjdir=topobjdir)
+
+ platform = sys.platform
+
+ # We should ideally use the config.status from the build. Let's install
+ # a fake one.
+ substs = [
+ ("MOZ_APP_NAME", "awesomeapp"),
+ ("MOZ_BUILD_APP", "awesomeapp"),
+ ]
+ if sys.platform.startswith("darwin"):
+ substs.append(("OS_ARCH", "Darwin"))
+ substs.append(("BIN_SUFFIX", ""))
+ substs.append(("MOZ_MACBUNDLE_NAME", "Nightly.app"))
+ elif sys.platform.startswith(("win32", "cygwin")):
+ substs.append(("OS_ARCH", "WINNT"))
+ substs.append(("BIN_SUFFIX", ".exe"))
+ else:
+ substs.append(("OS_ARCH", "something"))
+ substs.append(("BIN_SUFFIX", ""))
+
+ base._config_environment = ConfigEnvironment(
+ base.topsrcdir, base.topobjdir, substs=substs
+ )
+
+ p = base.get_binary_path("xpcshell", False)
+ if platform.startswith("darwin"):
+ self.assertTrue(p.endswith("Contents/MacOS/xpcshell"))
+ elif platform.startswith(("win32", "cygwin")):
+ self.assertTrue(p.endswith("xpcshell.exe"))
+ else:
+ self.assertTrue(p.endswith("dist/bin/xpcshell"))
+
+ p = base.get_binary_path(validate_exists=False)
+ if platform.startswith("darwin"):
+ self.assertTrue(p.endswith("Contents/MacOS/awesomeapp"))
+ elif platform.startswith(("win32", "cygwin")):
+ self.assertTrue(p.endswith("awesomeapp.exe"))
+ else:
+ self.assertTrue(p.endswith("dist/bin/awesomeapp"))
+
+ p = base.get_binary_path(validate_exists=False, where="staged-package")
+ if platform.startswith("darwin"):
+ self.assertTrue(
+ p.endswith("awesomeapp/Nightly.app/Contents/MacOS/awesomeapp")
+ )
+ elif platform.startswith(("win32", "cygwin")):
+ self.assertTrue(p.endswith("awesomeapp\\awesomeapp.exe"))
+ else:
+ self.assertTrue(p.endswith("awesomeapp/awesomeapp"))
+
+ self.assertRaises(Exception, base.get_binary_path, where="somewhere")
+
+ p = base.get_binary_path("foobar", validate_exists=False)
+ if platform.startswith("win32"):
+ self.assertTrue(p.endswith("foobar.exe"))
+ else:
+ self.assertTrue(p.endswith("foobar"))
+
+
+class TestPathArgument(unittest.TestCase):
+ def test_path_argument(self):
+ # Absolute path
+ p = PathArgument("/obj/foo", "/src", "/obj", "/src")
+ self.assertEqual(p.relpath(), "foo")
+ self.assertEqual(p.srcdir_path(), "/src/foo")
+ self.assertEqual(p.objdir_path(), "/obj/foo")
+
+ # Relative path within srcdir
+ p = PathArgument("foo", "/src", "/obj", "/src")
+ self.assertEqual(p.relpath(), "foo")
+ self.assertEqual(p.srcdir_path(), "/src/foo")
+ self.assertEqual(p.objdir_path(), "/obj/foo")
+
+ # Relative path within subdirectory
+ p = PathArgument("bar", "/src", "/obj", "/src/foo")
+ self.assertEqual(p.relpath(), "foo/bar")
+ self.assertEqual(p.srcdir_path(), "/src/foo/bar")
+ self.assertEqual(p.objdir_path(), "/obj/foo/bar")
+
+ # Relative path within objdir
+ p = PathArgument("foo", "/src", "/obj", "/obj")
+ self.assertEqual(p.relpath(), "foo")
+ self.assertEqual(p.srcdir_path(), "/src/foo")
+ self.assertEqual(p.objdir_path(), "/obj/foo")
+
+ # "." path
+ p = PathArgument(".", "/src", "/obj", "/src/foo")
+ self.assertEqual(p.relpath(), "foo")
+ self.assertEqual(p.srcdir_path(), "/src/foo")
+ self.assertEqual(p.objdir_path(), "/obj/foo")
+
+ # Nested src/obj directories
+ p = PathArgument("bar", "/src", "/src/obj", "/src/obj/foo")
+ self.assertEqual(p.relpath(), "foo/bar")
+ self.assertEqual(p.srcdir_path(), "/src/foo/bar")
+ self.assertEqual(p.objdir_path(), "/src/obj/foo/bar")
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/test/test_containers.py b/python/mozbuild/mozbuild/test/test_containers.py
new file mode 100644
index 0000000000..50dd0a4088
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/test_containers.py
@@ -0,0 +1,224 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+from collections import OrderedDict
+
+from mozunit import main
+
+from mozbuild.util import (
+ KeyedDefaultDict,
+ List,
+ OrderedDefaultDict,
+ ReadOnlyDefaultDict,
+ ReadOnlyDict,
+ ReadOnlyKeyedDefaultDict,
+ ReadOnlyNamespace,
+)
+
+
+class TestReadOnlyNamespace(unittest.TestCase):
+ def test_basic(self):
+ test = ReadOnlyNamespace(foo=1, bar=2)
+
+ self.assertEqual(test.foo, 1)
+ self.assertEqual(test.bar, 2)
+ self.assertEqual(
+ sorted(i for i in dir(test) if not i.startswith("__")), ["bar", "foo"]
+ )
+
+ with self.assertRaises(AttributeError):
+ test.missing
+
+ with self.assertRaises(Exception):
+ test.foo = 2
+
+ with self.assertRaises(Exception):
+ del test.foo
+
+ self.assertEqual(test, test)
+ self.assertEqual(test, ReadOnlyNamespace(foo=1, bar=2))
+ self.assertNotEqual(test, ReadOnlyNamespace(foo="1", bar=2))
+ self.assertNotEqual(test, ReadOnlyNamespace(foo=1, bar=2, qux=3))
+ self.assertNotEqual(test, ReadOnlyNamespace(foo=1, qux=3))
+ self.assertNotEqual(test, ReadOnlyNamespace(foo=3, bar="42"))
+
+
+class TestReadOnlyDict(unittest.TestCase):
+ def test_basic(self):
+ original = {"foo": 1, "bar": 2}
+
+ test = ReadOnlyDict(original)
+
+ self.assertEqual(original, test)
+ self.assertEqual(test["foo"], 1)
+
+ with self.assertRaises(KeyError):
+ test["missing"]
+
+ with self.assertRaises(Exception):
+ test["baz"] = True
+
+ def test_update(self):
+ original = {"foo": 1, "bar": 2}
+
+ test = ReadOnlyDict(original)
+
+ with self.assertRaises(Exception):
+ test.update(foo=2)
+
+ self.assertEqual(original, test)
+
+ def test_del(self):
+ original = {"foo": 1, "bar": 2}
+
+ test = ReadOnlyDict(original)
+
+ with self.assertRaises(Exception):
+ del test["foo"]
+
+ self.assertEqual(original, test)
+
+
+class TestReadOnlyDefaultDict(unittest.TestCase):
+ def test_simple(self):
+ original = {"foo": 1, "bar": 2}
+
+ test = ReadOnlyDefaultDict(bool, original)
+
+ self.assertEqual(original, test)
+
+ self.assertEqual(test["foo"], 1)
+
+ def test_assignment(self):
+ test = ReadOnlyDefaultDict(bool, {})
+
+ with self.assertRaises(Exception):
+ test["foo"] = True
+
+ def test_defaults(self):
+ test = ReadOnlyDefaultDict(bool, {"foo": 1})
+
+ self.assertEqual(test["foo"], 1)
+
+ self.assertEqual(test["qux"], False)
+
+
+class TestList(unittest.TestCase):
+ def test_add_list(self):
+ test = List([1, 2, 3])
+
+ test += [4, 5, 6]
+ self.assertIsInstance(test, List)
+ self.assertEqual(test, [1, 2, 3, 4, 5, 6])
+
+ test = test + [7, 8]
+ self.assertIsInstance(test, List)
+ self.assertEqual(test, [1, 2, 3, 4, 5, 6, 7, 8])
+
+ def test_add_string(self):
+ test = List([1, 2, 3])
+
+ with self.assertRaises(ValueError):
+ test += "string"
+
+ def test_none(self):
+ """As a special exception, we allow None to be treated as an empty
+ list."""
+ test = List([1, 2, 3])
+
+ test += None
+ self.assertEqual(test, [1, 2, 3])
+
+ test = test + None
+ self.assertIsInstance(test, List)
+ self.assertEqual(test, [1, 2, 3])
+
+ with self.assertRaises(ValueError):
+ test += False
+
+ with self.assertRaises(ValueError):
+ test = test + False
+
+
+class TestOrderedDefaultDict(unittest.TestCase):
+ def test_simple(self):
+ original = OrderedDict(foo=1, bar=2)
+
+ test = OrderedDefaultDict(bool, original)
+
+ self.assertEqual(original, test)
+
+ self.assertEqual(test["foo"], 1)
+
+ self.assertEqual(list(test), ["foo", "bar"])
+
+ def test_defaults(self):
+ test = OrderedDefaultDict(bool, {"foo": 1})
+
+ self.assertEqual(test["foo"], 1)
+
+ self.assertEqual(test["qux"], False)
+
+ self.assertEqual(list(test), ["foo", "qux"])
+
+
+class TestKeyedDefaultDict(unittest.TestCase):
+ def test_simple(self):
+ original = {"foo": 1, "bar": 2}
+
+ test = KeyedDefaultDict(lambda x: x, original)
+
+ self.assertEqual(original, test)
+
+ self.assertEqual(test["foo"], 1)
+
+ def test_defaults(self):
+ test = KeyedDefaultDict(lambda x: x, {"foo": 1})
+
+ self.assertEqual(test["foo"], 1)
+
+ self.assertEqual(test["qux"], "qux")
+
+ self.assertEqual(test["bar"], "bar")
+
+ test["foo"] = 2
+ test["qux"] = None
+ test["baz"] = "foo"
+
+ self.assertEqual(test["foo"], 2)
+
+ self.assertEqual(test["qux"], None)
+
+ self.assertEqual(test["baz"], "foo")
+
+
+class TestReadOnlyKeyedDefaultDict(unittest.TestCase):
+ def test_defaults(self):
+ test = ReadOnlyKeyedDefaultDict(lambda x: x, {"foo": 1})
+
+ self.assertEqual(test["foo"], 1)
+
+ self.assertEqual(test["qux"], "qux")
+
+ self.assertEqual(test["bar"], "bar")
+
+ copy = dict(test)
+
+ with self.assertRaises(Exception):
+ test["foo"] = 2
+
+ with self.assertRaises(Exception):
+ test["qux"] = None
+
+ with self.assertRaises(Exception):
+ test["baz"] = "foo"
+
+ self.assertEqual(test, copy)
+
+ self.assertEqual(len(test), 3)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/test/test_dotproperties.py b/python/mozbuild/mozbuild/test/test_dotproperties.py
new file mode 100644
index 0000000000..4e7a437799
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/test_dotproperties.py
@@ -0,0 +1,183 @@
+# -*- coding: utf-8 -*-
+
+import os
+import unittest
+
+import mozpack.path as mozpath
+from mozunit import main
+from six import StringIO
+
+from mozbuild.dotproperties import DotProperties
+
+test_data_path = mozpath.abspath(mozpath.dirname(__file__))
+test_data_path = mozpath.join(test_data_path, "data")
+
+
+class TestDotProperties(unittest.TestCase):
+ def test_get(self):
+ contents = StringIO(
+ """
+key=value
+"""
+ )
+ p = DotProperties(contents)
+ self.assertEqual(p.get("missing"), None)
+ self.assertEqual(p.get("missing", "default"), "default")
+ self.assertEqual(p.get("key"), "value")
+
+ def test_update(self):
+ contents = StringIO(
+ """
+old=old value
+key=value
+"""
+ )
+ p = DotProperties(contents)
+ self.assertEqual(p.get("old"), "old value")
+ self.assertEqual(p.get("key"), "value")
+
+ new_contents = StringIO(
+ """
+key=new value
+"""
+ )
+ p.update(new_contents)
+ self.assertEqual(p.get("old"), "old value")
+ self.assertEqual(p.get("key"), "new value")
+
+ def test_get_list(self):
+ contents = StringIO(
+ """
+list.0=A
+list.1=B
+list.2=C
+
+order.1=B
+order.0=A
+order.2=C
+"""
+ )
+ p = DotProperties(contents)
+ self.assertEqual(p.get_list("missing"), [])
+ self.assertEqual(p.get_list("list"), ["A", "B", "C"])
+ self.assertEqual(p.get_list("order"), ["A", "B", "C"])
+
+ def test_get_list_with_shared_prefix(self):
+ contents = StringIO(
+ """
+list.0=A
+list.1=B
+list.2=C
+
+list.sublist.1=E
+list.sublist.0=D
+list.sublist.2=F
+
+list.sublist.second.0=G
+
+list.other.0=H
+"""
+ )
+ p = DotProperties(contents)
+ self.assertEqual(p.get_list("list"), ["A", "B", "C"])
+ self.assertEqual(p.get_list("list.sublist"), ["D", "E", "F"])
+ self.assertEqual(p.get_list("list.sublist.second"), ["G"])
+ self.assertEqual(p.get_list("list.other"), ["H"])
+
+ def test_get_dict(self):
+ contents = StringIO(
+ """
+A.title=title A
+
+B.title=title B
+B.url=url B
+
+C=value
+"""
+ )
+ p = DotProperties(contents)
+ self.assertEqual(p.get_dict("missing"), {})
+ self.assertEqual(p.get_dict("A"), {"title": "title A"})
+ self.assertEqual(p.get_dict("B"), {"title": "title B", "url": "url B"})
+ with self.assertRaises(ValueError):
+ p.get_dict("A", required_keys=["title", "url"])
+ with self.assertRaises(ValueError):
+ p.get_dict("missing", required_keys=["key"])
+ # A key=value pair is considered to root an empty dict.
+ self.assertEqual(p.get_dict("C"), {})
+ with self.assertRaises(ValueError):
+ p.get_dict("C", required_keys=["missing_key"])
+
+ def test_get_dict_with_shared_prefix(self):
+ contents = StringIO(
+ """
+A.title=title A
+A.subdict.title=title A subdict
+
+B.title=title B
+B.url=url B
+B.subdict.title=title B subdict
+B.subdict.url=url B subdict
+"""
+ )
+ p = DotProperties(contents)
+ self.assertEqual(p.get_dict("A"), {"title": "title A"})
+ self.assertEqual(p.get_dict("B"), {"title": "title B", "url": "url B"})
+ self.assertEqual(p.get_dict("A.subdict"), {"title": "title A subdict"})
+ self.assertEqual(
+ p.get_dict("B.subdict"),
+ {"title": "title B subdict", "url": "url B subdict"},
+ )
+
+ def test_get_dict_with_value_prefix(self):
+ contents = StringIO(
+ """
+A.default=A
+A.default.B=B
+A.default.B.ignored=B ignored
+A.default.C=C
+A.default.C.ignored=C ignored
+"""
+ )
+ p = DotProperties(contents)
+ self.assertEqual(p.get("A.default"), "A")
+ # This enumerates the properties.
+ self.assertEqual(p.get_dict("A.default"), {"B": "B", "C": "C"})
+ # They can still be fetched directly.
+ self.assertEqual(p.get("A.default.B"), "B")
+ self.assertEqual(p.get("A.default.C"), "C")
+
+ def test_unicode(self):
+ contents = StringIO(
+ """
+# Danish.
+# #### ~~ Søren Munk Skrøder, sskroeder - 2009-05-30 @ #mozmae
+
+# Korean.
+A.title=한메ì¼
+
+# Russian.
+list.0 = test
+list.1 = ЯндекÑ
+"""
+ )
+ p = DotProperties(contents)
+ self.assertEqual(p.get_dict("A"), {"title": "한메ì¼"})
+ self.assertEqual(p.get_list("list"), ["test", "ЯндекÑ"])
+
+ def test_valid_unicode_from_file(self):
+ # The contents of valid.properties is identical to the contents of the
+ # test above. This specifically exercises reading from a file.
+ p = DotProperties(os.path.join(test_data_path, "valid.properties"))
+ self.assertEqual(p.get_dict("A"), {"title": "한메ì¼"})
+ self.assertEqual(p.get_list("list"), ["test", "ЯндекÑ"])
+
+ def test_bad_unicode_from_file(self):
+ # The contents of bad.properties is not valid Unicode; see the comments
+ # in the file itself for details.
+ with self.assertRaises(UnicodeDecodeError):
+ DotProperties(os.path.join(test_data_path, "bad.properties"))
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/test/test_expression.py b/python/mozbuild/mozbuild/test/test_expression.py
new file mode 100644
index 0000000000..535e62bf43
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/test_expression.py
@@ -0,0 +1,88 @@
+import unittest
+
+import mozunit
+
+from mozbuild.preprocessor import Context, Expression
+
+
+class TestContext(unittest.TestCase):
+ """
+ Unit tests for the Context class
+ """
+
+ def setUp(self):
+ self.c = Context()
+ self.c["FAIL"] = "PASS"
+
+ def test_string_literal(self):
+ """test string literal, fall-through for undefined var in a Context"""
+ self.assertEqual(self.c["PASS"], "PASS")
+
+ def test_variable(self):
+ """test value for defined var in the Context class"""
+ self.assertEqual(self.c["FAIL"], "PASS")
+
+ def test_in(self):
+ """test 'var in context' to not fall for fallback"""
+ self.assertTrue("FAIL" in self.c)
+ self.assertTrue("PASS" not in self.c)
+
+
+class TestExpression(unittest.TestCase):
+ """
+ Unit tests for the Expression class
+ evaluate() is called with a context {FAIL: 'PASS'}
+ """
+
+ def setUp(self):
+ self.c = Context()
+ self.c["FAIL"] = "PASS"
+
+ def test_string_literal(self):
+ """Test for a string literal in an Expression"""
+ self.assertEqual(Expression("PASS").evaluate(self.c), "PASS")
+
+ def test_variable(self):
+ """Test for variable value in an Expression"""
+ self.assertEqual(Expression("FAIL").evaluate(self.c), "PASS")
+
+ def test_not(self):
+ """Test for the ! operator"""
+ self.assertTrue(Expression("!0").evaluate(self.c))
+ self.assertTrue(not Expression("!1").evaluate(self.c))
+
+ def test_equals(self):
+ """Test for the == operator"""
+ self.assertTrue(Expression("FAIL == PASS").evaluate(self.c))
+
+ def test_notequals(self):
+ """Test for the != operator"""
+ self.assertTrue(Expression("FAIL != 1").evaluate(self.c))
+
+ def test_logical_and(self):
+ """Test for the && operator"""
+ self.assertTrue(Expression("PASS == PASS && PASS != NOTPASS").evaluate(self.c))
+
+ def test_logical_or(self):
+ """Test for the || operator"""
+ self.assertTrue(
+ Expression("PASS == NOTPASS || PASS != NOTPASS").evaluate(self.c)
+ )
+
+ def test_logical_ops(self):
+ """Test for the && and || operators precedence"""
+ # Would evaluate to false if precedence was wrong
+ self.assertTrue(
+ Expression("PASS == PASS || PASS != NOTPASS && PASS == NOTPASS").evaluate(
+ self.c
+ )
+ )
+
+ def test_defined(self):
+ """Test for the defined() value"""
+ self.assertTrue(Expression("defined(FAIL)").evaluate(self.c))
+ self.assertTrue(Expression("!defined(PASS)").evaluate(self.c))
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozbuild/test/test_jarmaker.py b/python/mozbuild/mozbuild/test/test_jarmaker.py
new file mode 100644
index 0000000000..24a8c7694a
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/test_jarmaker.py
@@ -0,0 +1,493 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import os.path
+import sys
+import unittest
+from filecmp import dircmp
+from shutil import copy2, rmtree
+from tempfile import mkdtemp
+from zipfile import ZipFile
+
+import mozunit
+import six
+from six import StringIO
+
+from mozbuild.jar import JarMaker
+
+if sys.platform == "win32":
+ import ctypes
+ from ctypes import POINTER, WinError
+
+ DWORD = ctypes.c_ulong
+ LPDWORD = POINTER(DWORD)
+ HANDLE = ctypes.c_void_p
+ GENERIC_READ = 0x80000000
+ FILE_SHARE_READ = 0x00000001
+ OPEN_EXISTING = 3
+ MAX_PATH = 260
+
+ class FILETIME(ctypes.Structure):
+ _fields_ = [("dwLowDateTime", DWORD), ("dwHighDateTime", DWORD)]
+
+ class BY_HANDLE_FILE_INFORMATION(ctypes.Structure):
+ _fields_ = [
+ ("dwFileAttributes", DWORD),
+ ("ftCreationTime", FILETIME),
+ ("ftLastAccessTime", FILETIME),
+ ("ftLastWriteTime", FILETIME),
+ ("dwVolumeSerialNumber", DWORD),
+ ("nFileSizeHigh", DWORD),
+ ("nFileSizeLow", DWORD),
+ ("nNumberOfLinks", DWORD),
+ ("nFileIndexHigh", DWORD),
+ ("nFileIndexLow", DWORD),
+ ]
+
+ # http://msdn.microsoft.com/en-us/library/aa363858
+ CreateFile = ctypes.windll.kernel32.CreateFileA
+ CreateFile.argtypes = [
+ ctypes.c_char_p,
+ DWORD,
+ DWORD,
+ ctypes.c_void_p,
+ DWORD,
+ DWORD,
+ HANDLE,
+ ]
+ CreateFile.restype = HANDLE
+
+ # http://msdn.microsoft.com/en-us/library/aa364952
+ GetFileInformationByHandle = ctypes.windll.kernel32.GetFileInformationByHandle
+ GetFileInformationByHandle.argtypes = [HANDLE, POINTER(BY_HANDLE_FILE_INFORMATION)]
+ GetFileInformationByHandle.restype = ctypes.c_int
+
+ # http://msdn.microsoft.com/en-us/library/aa364996
+ GetVolumePathName = ctypes.windll.kernel32.GetVolumePathNameA
+ GetVolumePathName.argtypes = [ctypes.c_char_p, ctypes.c_char_p, DWORD]
+ GetVolumePathName.restype = ctypes.c_int
+
+ # http://msdn.microsoft.com/en-us/library/aa364993
+ GetVolumeInformation = ctypes.windll.kernel32.GetVolumeInformationA
+ GetVolumeInformation.argtypes = [
+ ctypes.c_char_p,
+ ctypes.c_char_p,
+ DWORD,
+ LPDWORD,
+ LPDWORD,
+ LPDWORD,
+ ctypes.c_char_p,
+ DWORD,
+ ]
+ GetVolumeInformation.restype = ctypes.c_int
+
+
+def symlinks_supported(path):
+ if sys.platform == "win32":
+ # Add 1 for a trailing backslash if necessary, and 1 for the terminating
+ # null character.
+ volpath = ctypes.create_string_buffer(len(path) + 2)
+ rv = GetVolumePathName(six.ensure_binary(path), volpath, len(volpath))
+ if rv == 0:
+ raise WinError()
+
+ fsname = ctypes.create_string_buffer(MAX_PATH + 1)
+ rv = GetVolumeInformation(
+ volpath, None, 0, None, None, None, fsname, len(fsname)
+ )
+ if rv == 0:
+ raise WinError()
+
+ # Return true only if the fsname is NTFS
+ return fsname.value == "NTFS"
+ else:
+ return True
+
+
+def _getfileinfo(path):
+ """Return information for the given file. This only works on Windows."""
+ fh = CreateFile(
+ six.ensure_binary(path),
+ GENERIC_READ,
+ FILE_SHARE_READ,
+ None,
+ OPEN_EXISTING,
+ 0,
+ None,
+ )
+ if fh is None:
+ raise WinError()
+ info = BY_HANDLE_FILE_INFORMATION()
+ rv = GetFileInformationByHandle(fh, info)
+ if rv == 0:
+ raise WinError()
+ return info
+
+
+def is_symlink_to(dest, src):
+ if sys.platform == "win32":
+ # Check if both are on the same volume and have the same file ID
+ destinfo = _getfileinfo(dest)
+ srcinfo = _getfileinfo(src)
+ return (
+ destinfo.dwVolumeSerialNumber == srcinfo.dwVolumeSerialNumber
+ and destinfo.nFileIndexHigh == srcinfo.nFileIndexHigh
+ and destinfo.nFileIndexLow == srcinfo.nFileIndexLow
+ )
+ else:
+ # Read the link and check if it is correct
+ if not os.path.islink(dest):
+ return False
+ target = os.path.abspath(os.readlink(dest))
+ abssrc = os.path.abspath(src)
+ return target == abssrc
+
+
+class _TreeDiff(dircmp):
+ """Helper to report rich results on difference between two directories."""
+
+ def _fillDiff(self, dc, rv, basepath="{0}"):
+ rv["right_only"] += map(lambda l: basepath.format(l), dc.right_only)
+ rv["left_only"] += map(lambda l: basepath.format(l), dc.left_only)
+ rv["diff_files"] += map(lambda l: basepath.format(l), dc.diff_files)
+ rv["funny"] += map(lambda l: basepath.format(l), dc.common_funny)
+ rv["funny"] += map(lambda l: basepath.format(l), dc.funny_files)
+ for subdir, _dc in six.iteritems(dc.subdirs):
+ self._fillDiff(_dc, rv, basepath.format(subdir + "/{0}"))
+
+ def allResults(self, left, right):
+ rv = {"right_only": [], "left_only": [], "diff_files": [], "funny": []}
+ self._fillDiff(self, rv)
+ chunks = []
+ if rv["right_only"]:
+ chunks.append("{0} only in {1}".format(", ".join(rv["right_only"]), right))
+ if rv["left_only"]:
+ chunks.append("{0} only in {1}".format(", ".join(rv["left_only"]), left))
+ if rv["diff_files"]:
+ chunks.append("{0} differ".format(", ".join(rv["diff_files"])))
+ if rv["funny"]:
+ chunks.append("{0} don't compare".format(", ".join(rv["funny"])))
+ return "; ".join(chunks)
+
+
+class TestJarMaker(unittest.TestCase):
+ """
+ Unit tests for JarMaker.py
+ """
+
+ debug = False # set to True to debug failing tests on disk
+
+ def setUp(self):
+ self.tmpdir = mkdtemp()
+ self.srcdir = os.path.join(self.tmpdir, "src")
+ os.mkdir(self.srcdir)
+ self.builddir = os.path.join(self.tmpdir, "build")
+ os.mkdir(self.builddir)
+ self.refdir = os.path.join(self.tmpdir, "ref")
+ os.mkdir(self.refdir)
+ self.stagedir = os.path.join(self.tmpdir, "stage")
+ os.mkdir(self.stagedir)
+
+ def tearDown(self):
+ if self.debug:
+ print(self.tmpdir)
+ elif sys.platform != "win32":
+ # can't clean up on windows
+ rmtree(self.tmpdir)
+
+ def _jar_and_compare(self, infile, **kwargs):
+ jm = JarMaker(outputFormat="jar")
+ if "topsourcedir" not in kwargs:
+ kwargs["topsourcedir"] = self.srcdir
+ for attr in ("topsourcedir", "sourcedirs"):
+ if attr in kwargs:
+ setattr(jm, attr, kwargs[attr])
+ jm.makeJar(infile, self.builddir)
+ cwd = os.getcwd()
+ os.chdir(self.builddir)
+ try:
+ # expand build to stage
+ for path, dirs, files in os.walk("."):
+ stagedir = os.path.join(self.stagedir, path)
+ if not os.path.isdir(stagedir):
+ os.mkdir(stagedir)
+ for file in files:
+ if file.endswith(".jar"):
+ # expand jar
+ stagepath = os.path.join(stagedir, file)
+ os.mkdir(stagepath)
+ zf = ZipFile(os.path.join(path, file))
+ # extractall is only in 2.6, do this manually :-(
+ for entry_name in zf.namelist():
+ segs = entry_name.split("/")
+ fname = segs.pop()
+ dname = os.path.join(stagepath, *segs)
+ if not os.path.isdir(dname):
+ os.makedirs(dname)
+ if not fname:
+ # directory, we're done
+ continue
+ _c = zf.read(entry_name)
+ open(os.path.join(dname, fname), "wb").write(_c)
+ zf.close()
+ else:
+ copy2(os.path.join(path, file), stagedir)
+ # compare both dirs
+ os.chdir("..")
+ td = _TreeDiff("ref", "stage")
+ return td.allResults("reference", "build")
+ finally:
+ os.chdir(cwd)
+
+ def _create_simple_setup(self):
+ # create src content
+ jarf = open(os.path.join(self.srcdir, "jar.mn"), "w")
+ jarf.write(
+ """test.jar:
+ dir/foo (bar)
+"""
+ )
+ jarf.close()
+ open(os.path.join(self.srcdir, "bar"), "w").write("content\n")
+ # create reference
+ refpath = os.path.join(self.refdir, "chrome", "test.jar", "dir")
+ os.makedirs(refpath)
+ open(os.path.join(refpath, "foo"), "w").write("content\n")
+
+ def test_a_simple_jar(self):
+ """Test a simple jar.mn"""
+ self._create_simple_setup()
+ # call JarMaker
+ rv = self._jar_and_compare(
+ os.path.join(self.srcdir, "jar.mn"), sourcedirs=[self.srcdir]
+ )
+ self.assertTrue(not rv, rv)
+
+ def test_a_simple_symlink(self):
+ """Test a simple jar.mn with a symlink"""
+ if not symlinks_supported(self.srcdir):
+ raise unittest.SkipTest("symlinks not supported")
+
+ self._create_simple_setup()
+ jm = JarMaker(outputFormat="symlink")
+ jm.sourcedirs = [self.srcdir]
+ jm.topsourcedir = self.srcdir
+ jm.makeJar(os.path.join(self.srcdir, "jar.mn"), self.builddir)
+ # All we do is check that srcdir/bar points to builddir/chrome/test/dir/foo
+ srcbar = os.path.join(self.srcdir, "bar")
+ destfoo = os.path.join(self.builddir, "chrome", "test", "dir", "foo")
+ self.assertTrue(
+ is_symlink_to(destfoo, srcbar),
+ "{0} is not a symlink to {1}".format(destfoo, srcbar),
+ )
+
+ def _create_wildcard_setup(self):
+ # create src content
+ jarf = open(os.path.join(self.srcdir, "jar.mn"), "w")
+ jarf.write(
+ """test.jar:
+ dir/bar (*.js)
+ dir/hoge (qux/*)
+"""
+ )
+ jarf.close()
+ open(os.path.join(self.srcdir, "foo.js"), "w").write("foo.js\n")
+ open(os.path.join(self.srcdir, "bar.js"), "w").write("bar.js\n")
+ os.makedirs(os.path.join(self.srcdir, "qux", "foo"))
+ open(os.path.join(self.srcdir, "qux", "foo", "1"), "w").write("1\n")
+ open(os.path.join(self.srcdir, "qux", "foo", "2"), "w").write("2\n")
+ open(os.path.join(self.srcdir, "qux", "baz"), "w").write("baz\n")
+ # create reference
+ refpath = os.path.join(self.refdir, "chrome", "test.jar", "dir")
+ os.makedirs(os.path.join(refpath, "bar"))
+ os.makedirs(os.path.join(refpath, "hoge", "foo"))
+ open(os.path.join(refpath, "bar", "foo.js"), "w").write("foo.js\n")
+ open(os.path.join(refpath, "bar", "bar.js"), "w").write("bar.js\n")
+ open(os.path.join(refpath, "hoge", "foo", "1"), "w").write("1\n")
+ open(os.path.join(refpath, "hoge", "foo", "2"), "w").write("2\n")
+ open(os.path.join(refpath, "hoge", "baz"), "w").write("baz\n")
+
+ def test_a_wildcard_jar(self):
+ """Test a wildcard in jar.mn"""
+ self._create_wildcard_setup()
+ # call JarMaker
+ rv = self._jar_and_compare(
+ os.path.join(self.srcdir, "jar.mn"), sourcedirs=[self.srcdir]
+ )
+ self.assertTrue(not rv, rv)
+
+ def test_a_wildcard_symlink(self):
+ """Test a wildcard in jar.mn with symlinks"""
+ if not symlinks_supported(self.srcdir):
+ raise unittest.SkipTest("symlinks not supported")
+
+ self._create_wildcard_setup()
+ jm = JarMaker(outputFormat="symlink")
+ jm.sourcedirs = [self.srcdir]
+ jm.topsourcedir = self.srcdir
+ jm.makeJar(os.path.join(self.srcdir, "jar.mn"), self.builddir)
+
+ expected_symlinks = {
+ ("bar", "foo.js"): ("foo.js",),
+ ("bar", "bar.js"): ("bar.js",),
+ ("hoge", "foo", "1"): ("qux", "foo", "1"),
+ ("hoge", "foo", "2"): ("qux", "foo", "2"),
+ ("hoge", "baz"): ("qux", "baz"),
+ }
+ for dest, src in six.iteritems(expected_symlinks):
+ srcpath = os.path.join(self.srcdir, *src)
+ destpath = os.path.join(self.builddir, "chrome", "test", "dir", *dest)
+ self.assertTrue(
+ is_symlink_to(destpath, srcpath),
+ "{0} is not a symlink to {1}".format(destpath, srcpath),
+ )
+
+
+class Test_relativesrcdir(unittest.TestCase):
+ def setUp(self):
+ self.jm = JarMaker()
+ self.jm.topsourcedir = "/TOPSOURCEDIR"
+ self.jm.relativesrcdir = "browser/locales"
+ self.fake_empty_file = StringIO()
+ self.fake_empty_file.name = "fake_empty_file"
+
+ def tearDown(self):
+ del self.jm
+ del self.fake_empty_file
+
+ def test_en_US(self):
+ jm = self.jm
+ jm.makeJar(self.fake_empty_file, "/NO_OUTPUT_REQUIRED")
+ self.assertEqual(
+ jm.localedirs,
+ [
+ os.path.join(
+ os.path.abspath("/TOPSOURCEDIR"), "browser/locales", "en-US"
+ )
+ ],
+ )
+
+ def test_l10n_no_merge(self):
+ jm = self.jm
+ jm.l10nbase = "/L10N_BASE"
+ jm.makeJar(self.fake_empty_file, "/NO_OUTPUT_REQUIRED")
+ self.assertEqual(jm.localedirs, [os.path.join("/L10N_BASE", "browser")])
+
+ def test_l10n_merge(self):
+ jm = self.jm
+ jm.l10nbase = "/L10N_MERGE"
+ jm.makeJar(self.fake_empty_file, "/NO_OUTPUT_REQUIRED")
+ self.assertEqual(
+ jm.localedirs,
+ [
+ os.path.join("/L10N_MERGE", "browser"),
+ ],
+ )
+
+ def test_override(self):
+ jm = self.jm
+ jm.outputFormat = "flat" # doesn't touch chrome dir without files
+ jarcontents = StringIO(
+ """en-US.jar:
+relativesrcdir dom/locales:
+"""
+ )
+ jarcontents.name = "override.mn"
+ jm.makeJar(jarcontents, "/NO_OUTPUT_REQUIRED")
+ self.assertEqual(
+ jm.localedirs,
+ [os.path.join(os.path.abspath("/TOPSOURCEDIR"), "dom/locales", "en-US")],
+ )
+
+ def test_override_l10n(self):
+ jm = self.jm
+ jm.l10nbase = "/L10N_BASE"
+ jm.outputFormat = "flat" # doesn't touch chrome dir without files
+ jarcontents = StringIO(
+ """en-US.jar:
+relativesrcdir dom/locales:
+"""
+ )
+ jarcontents.name = "override.mn"
+ jm.makeJar(jarcontents, "/NO_OUTPUT_REQUIRED")
+ self.assertEqual(jm.localedirs, [os.path.join("/L10N_BASE", "dom")])
+
+
+class Test_fluent(unittest.TestCase):
+ """
+ Unit tests for JarMaker interaction with Fluent
+ """
+
+ debug = False # set to True to debug failing tests on disk
+
+ def setUp(self):
+ self.tmpdir = mkdtemp()
+ self.srcdir = os.path.join(self.tmpdir, "src")
+ os.mkdir(self.srcdir)
+ self.builddir = os.path.join(self.tmpdir, "build")
+ os.mkdir(self.builddir)
+ self.l10nbase = os.path.join(self.tmpdir, "l10n-base")
+ os.mkdir(self.l10nbase)
+ self.l10nmerge = os.path.join(self.tmpdir, "l10n-merge")
+ os.mkdir(self.l10nmerge)
+
+ def tearDown(self):
+ if self.debug:
+ print(self.tmpdir)
+ elif sys.platform != "win32":
+ # can't clean up on windows
+ rmtree(self.tmpdir)
+
+ def _create_fluent_setup(self):
+ # create src content
+ jarf = open(os.path.join(self.srcdir, "jar.mn"), "w")
+ jarf.write(
+ """[localization] test.jar:
+ app (%app/**/*.ftl)
+"""
+ )
+ jarf.close()
+ appdir = os.path.join(self.srcdir, "app", "locales", "en-US", "app")
+ os.makedirs(appdir)
+ open(os.path.join(appdir, "test.ftl"), "w").write("id = Value")
+ open(os.path.join(appdir, "test2.ftl"), "w").write("id2 = Value 2")
+
+ l10ndir = os.path.join(self.l10nbase, "app", "app")
+ os.makedirs(l10ndir)
+ open(os.path.join(l10ndir, "test.ftl"), "w").write("id = L10n Value")
+
+ def test_l10n_not_merge_ftl(self):
+ """Test that JarMaker doesn't merge source .ftl files"""
+ self._create_fluent_setup()
+ jm = JarMaker(outputFormat="symlink")
+ jm.sourcedirs = [self.srcdir]
+ jm.topsourcedir = self.srcdir
+ jm.l10nbase = self.l10nbase
+ jm.l10nmerge = self.l10nmerge
+ jm.relativesrcdir = "app/locales"
+ jm.makeJar(os.path.join(self.srcdir, "jar.mn"), self.builddir)
+
+ # test.ftl should be taken from the l10ndir, since it is present there
+ destpath = os.path.join(
+ self.builddir, "localization", "test", "app", "test.ftl"
+ )
+ srcpath = os.path.join(self.l10nbase, "app", "app", "test.ftl")
+ self.assertTrue(
+ is_symlink_to(destpath, srcpath),
+ "{0} should be a symlink to {1}".format(destpath, srcpath),
+ )
+
+ # test2.ftl on the other hand, is only present in en-US dir, and should
+ # not be linked from the build dir
+ destpath = os.path.join(
+ self.builddir, "localization", "test", "app", "test2.ftl"
+ )
+ self.assertFalse(
+ os.path.isfile(destpath), "test2.ftl should not be taken from en-US"
+ )
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozbuild/test/test_licenses.py b/python/mozbuild/mozbuild/test/test_licenses.py
new file mode 100644
index 0000000000..9f3f12d423
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/test_licenses.py
@@ -0,0 +1,33 @@
+import unittest
+
+import mozunit
+
+from mozbuild.vendor.vendor_rust import VendorRust
+
+
+class TestLicenses(unittest.TestCase):
+ """
+ Unit tests for the Rust Vendoring stuff
+ """
+
+ def setUp(self):
+ pass
+
+ def tearDown(self):
+ pass
+
+ def testLicense(self):
+ self.assertEqual(VendorRust.runtime_license("", "Apache-2.0"), True)
+ self.assertEqual(VendorRust.runtime_license("", "MIT"), True)
+ self.assertEqual(VendorRust.runtime_license("", "GPL"), False)
+ self.assertEqual(VendorRust.runtime_license("", "MIT /GPL"), True)
+ self.assertEqual(VendorRust.runtime_license("", "GPL/ Proprietary"), False)
+ self.assertEqual(VendorRust.runtime_license("", "GPL AND MIT"), False)
+ self.assertEqual(VendorRust.runtime_license("", "ISC\tAND\tMIT"), False)
+ self.assertEqual(VendorRust.runtime_license("", "GPL OR MIT"), True)
+ self.assertEqual(VendorRust.runtime_license("", "ALLIGATOR MIT"), False)
+ pass
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozbuild/test/test_line_endings.py b/python/mozbuild/mozbuild/test/test_line_endings.py
new file mode 100644
index 0000000000..f8cdd89174
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/test_line_endings.py
@@ -0,0 +1,45 @@
+import unittest
+
+import mozunit
+from mozfile import NamedTemporaryFile
+from six import StringIO
+
+from mozbuild.preprocessor import Preprocessor
+
+
+class TestLineEndings(unittest.TestCase):
+ """
+ Unit tests for the Context class
+ """
+
+ def setUp(self):
+ self.pp = Preprocessor()
+ self.pp.out = StringIO()
+ self.f = NamedTemporaryFile(mode="wb")
+
+ def tearDown(self):
+ self.f.close()
+
+ def createFile(self, lineendings):
+ for line, ending in zip([b"a", b"#literal b", b"c"], lineendings):
+ self.f.write(line + ending)
+ self.f.flush()
+
+ def testMac(self):
+ self.createFile([b"\x0D"] * 3)
+ self.pp.do_include(self.f.name)
+ self.assertEqual(self.pp.out.getvalue(), "a\nb\nc\n")
+
+ def testUnix(self):
+ self.createFile([b"\x0A"] * 3)
+ self.pp.do_include(self.f.name)
+ self.assertEqual(self.pp.out.getvalue(), "a\nb\nc\n")
+
+ def testWindows(self):
+ self.createFile([b"\x0D\x0A"] * 3)
+ self.pp.do_include(self.f.name)
+ self.assertEqual(self.pp.out.getvalue(), "a\nb\nc\n")
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozbuild/test/test_makeutil.py b/python/mozbuild/mozbuild/test/test_makeutil.py
new file mode 100644
index 0000000000..524851bfbd
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/test_makeutil.py
@@ -0,0 +1,164 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import unittest
+
+from mozunit import main
+from six import StringIO
+
+from mozbuild.makeutil import Makefile, Rule, read_dep_makefile, write_dep_makefile
+
+
+class TestMakefile(unittest.TestCase):
+ def test_rule(self):
+ out = StringIO()
+ rule = Rule()
+ rule.dump(out)
+ self.assertEqual(out.getvalue(), "")
+
+ out = StringIO()
+ rule.add_targets(["foo", "bar"])
+ rule.dump(out)
+ self.assertEqual(out.getvalue(), "foo bar:\n")
+
+ out = StringIO()
+ rule.add_targets(["baz"])
+ rule.add_dependencies(["qux", "hoge", "piyo"])
+ rule.dump(out)
+ self.assertEqual(out.getvalue(), "foo bar baz: qux hoge piyo\n")
+
+ out = StringIO()
+ rule = Rule(["foo", "bar"])
+ rule.add_dependencies(["baz"])
+ rule.add_commands(["echo $@"])
+ rule.add_commands(["$(BAZ) -o $@ $<", "$(TOUCH) $@"])
+ rule.dump(out)
+ self.assertEqual(
+ out.getvalue(),
+ "foo bar: baz\n"
+ + "\techo $@\n"
+ + "\t$(BAZ) -o $@ $<\n"
+ + "\t$(TOUCH) $@\n",
+ )
+
+ out = StringIO()
+ rule = Rule(["foo"])
+ rule.add_dependencies(["bar", "foo", "baz"])
+ rule.dump(out)
+ self.assertEqual(out.getvalue(), "foo: bar baz\n")
+
+ out = StringIO()
+ rule.add_targets(["bar"])
+ rule.dump(out)
+ self.assertEqual(out.getvalue(), "foo bar: baz\n")
+
+ out = StringIO()
+ rule.add_targets(["bar"])
+ rule.dump(out)
+ self.assertEqual(out.getvalue(), "foo bar: baz\n")
+
+ out = StringIO()
+ rule.add_dependencies(["bar"])
+ rule.dump(out)
+ self.assertEqual(out.getvalue(), "foo bar: baz\n")
+
+ out = StringIO()
+ rule.add_dependencies(["qux"])
+ rule.dump(out)
+ self.assertEqual(out.getvalue(), "foo bar: baz qux\n")
+
+ out = StringIO()
+ rule.add_dependencies(["qux"])
+ rule.dump(out)
+ self.assertEqual(out.getvalue(), "foo bar: baz qux\n")
+
+ out = StringIO()
+ rule.add_dependencies(["hoge", "hoge"])
+ rule.dump(out)
+ self.assertEqual(out.getvalue(), "foo bar: baz qux hoge\n")
+
+ out = StringIO()
+ rule.add_targets(["fuga", "fuga"])
+ rule.dump(out)
+ self.assertEqual(out.getvalue(), "foo bar fuga: baz qux hoge\n")
+
+ def test_makefile(self):
+ out = StringIO()
+ mk = Makefile()
+ rule = mk.create_rule(["foo"])
+ rule.add_dependencies(["bar", "baz", "qux"])
+ rule.add_commands(["echo foo"])
+ rule = mk.create_rule().add_targets(["bar", "baz"])
+ rule.add_dependencies(["hoge"])
+ rule.add_commands(["echo $@"])
+ mk.dump(out, removal_guard=False)
+ self.assertEqual(
+ out.getvalue(),
+ "foo: bar baz qux\n" + "\techo foo\n" + "bar baz: hoge\n" + "\techo $@\n",
+ )
+
+ out = StringIO()
+ mk.dump(out)
+ self.assertEqual(
+ out.getvalue(),
+ "foo: bar baz qux\n"
+ + "\techo foo\n"
+ + "bar baz: hoge\n"
+ + "\techo $@\n"
+ + "hoge qux:\n",
+ )
+
+ def test_statement(self):
+ out = StringIO()
+ mk = Makefile()
+ mk.create_rule(["foo"]).add_dependencies(["bar"]).add_commands(["echo foo"])
+ mk.add_statement("BAR = bar")
+ mk.create_rule(["$(BAR)"]).add_commands(["echo $@"])
+ mk.dump(out, removal_guard=False)
+ self.assertEqual(
+ out.getvalue(),
+ "foo: bar\n" + "\techo foo\n" + "BAR = bar\n" + "$(BAR):\n" + "\techo $@\n",
+ )
+
+ @unittest.skipIf(os.name != "nt", "Test only applicable on Windows.")
+ def test_path_normalization(self):
+ out = StringIO()
+ mk = Makefile()
+ rule = mk.create_rule(["c:\\foo"])
+ rule.add_dependencies(["c:\\bar", "c:\\baz\\qux"])
+ rule.add_commands(["echo c:\\foo"])
+ mk.dump(out)
+ self.assertEqual(
+ out.getvalue(),
+ "c:/foo: c:/bar c:/baz/qux\n" + "\techo c:\\foo\n" + "c:/bar c:/baz/qux:\n",
+ )
+
+ def test_read_dep_makefile(self):
+ input = StringIO(
+ os.path.abspath("foo")
+ + ": bar\n"
+ + "baz qux: \\ \n"
+ + "hoge \\\n"
+ + "piyo \\\n"
+ + "fuga\n"
+ + "fuga:\n"
+ )
+ result = list(read_dep_makefile(input))
+ self.assertEqual(len(result), 2)
+ self.assertEqual(
+ list(result[0].targets()), [os.path.abspath("foo").replace(os.sep, "/")]
+ )
+ self.assertEqual(list(result[0].dependencies()), ["bar"])
+ self.assertEqual(list(result[1].targets()), ["baz", "qux"])
+ self.assertEqual(list(result[1].dependencies()), ["hoge", "piyo", "fuga"])
+
+ def test_write_dep_makefile(self):
+ out = StringIO()
+ write_dep_makefile(out, "target", ["b", "c", "a"])
+ self.assertEqual(out.getvalue(), "target: b c a\n" + "a b c:\n")
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/test/test_manifest.py b/python/mozbuild/mozbuild/test/test_manifest.py
new file mode 100644
index 0000000000..e5675aba36
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/test_manifest.py
@@ -0,0 +1,2081 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+
+import mozfile
+from mozunit import main
+
+from mozbuild.vendor.moz_yaml import MozYamlVerifyError, load_moz_yaml
+
+
+class TestManifest(unittest.TestCase):
+ def process_test_vectors(self, test_vectors):
+ index = 0
+ for vector in test_vectors:
+ print("Testing index", index)
+ expected, yaml = vector
+ with mozfile.NamedTemporaryFile() as tf:
+ tf.write(yaml)
+ tf.flush()
+ if expected == "exception":
+ with self.assertRaises(MozYamlVerifyError):
+ load_moz_yaml(tf.name, require_license_file=False)
+ else:
+ self.assertDictEqual(
+ load_moz_yaml(tf.name, require_license_file=False), expected
+ )
+ index += 1
+
+ # ===========================================================================================
+ def test_simple(self):
+ simple_dict = {
+ "schema": "1",
+ "origin": {
+ "description": "2D Graphics Library",
+ "license": ["MPL-1.1", "LGPL-2.1"],
+ "name": "cairo",
+ "release": "version 1.6.4",
+ "revision": "AA001122334455",
+ "url": "https://www.cairographics.org/",
+ },
+ "bugzilla": {"component": "Graphics", "product": "Core"},
+ }
+
+ self.process_test_vectors(
+ [
+ (
+ simple_dict,
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: AA001122334455
+bugzilla:
+ product: Core
+ component: Graphics
+ """.strip(),
+ ),
+ (
+ simple_dict,
+ b"""
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: AA001122334455
+bugzilla:
+ product: Core
+ component: Graphics
+ """.strip(),
+ ),
+ ]
+ )
+
+ # ===========================================================================================
+ def test_updatebot(self):
+ self.process_test_vectors(
+ [
+ (
+ {
+ "schema": "1",
+ "origin": {
+ "description": "2D Graphics Library",
+ "license": ["MPL-1.1", "LGPL-2.1"],
+ "name": "cairo",
+ "release": "version 1.6.4",
+ "revision": "AA001122334455",
+ "url": "https://www.cairographics.org/",
+ },
+ "bugzilla": {"component": "Graphics", "product": "Core"},
+ "updatebot": {
+ "maintainer-phab": "tjr",
+ "maintainer-bz": "a@example.com",
+ },
+ },
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: AA001122334455
+bugzilla:
+ product: Core
+ component: Graphics
+updatebot:
+ maintainer-phab: tjr
+ maintainer-bz: a@example.com
+ """.strip(),
+ ),
+ # -------------------------------------------------
+ (
+ {
+ "schema": "1",
+ "origin": {
+ "description": "2D Graphics Library",
+ "license": ["MPL-1.1", "LGPL-2.1"],
+ "name": "cairo",
+ "release": "version 1.6.4",
+ "revision": "001122334455",
+ "url": "https://www.cairographics.org/",
+ },
+ "bugzilla": {"component": "Graphics", "product": "Core"},
+ "updatebot": {
+ "maintainer-phab": "tjr",
+ "maintainer-bz": "a@example.com",
+ },
+ },
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: 001122334455
+bugzilla:
+ product: Core
+ component: Graphics
+updatebot:
+ maintainer-phab: tjr
+ maintainer-bz: a@example.com
+ """.strip(),
+ ),
+ # -------------------------------------------------
+ (
+ {
+ "schema": "1",
+ "origin": {
+ "description": "2D Graphics Library",
+ "license": ["MPL-1.1", "LGPL-2.1"],
+ "name": "cairo",
+ "release": "version 1.6.4",
+ "revision": "001122334455",
+ "url": "https://www.cairographics.org/",
+ },
+ "bugzilla": {"component": "Graphics", "product": "Core"},
+ "updatebot": {
+ "try-preset": "foo",
+ "maintainer-phab": "tjr",
+ "maintainer-bz": "a@example.com",
+ },
+ },
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: 001122334455
+bugzilla:
+ product: Core
+ component: Graphics
+updatebot:
+ try-preset: foo
+ maintainer-phab: tjr
+ maintainer-bz: a@example.com
+ """.strip(),
+ ),
+ # -------------------------------------------------
+ (
+ {
+ "schema": "1",
+ "origin": {
+ "description": "2D Graphics Library",
+ "license": ["MPL-1.1", "LGPL-2.1"],
+ "name": "cairo",
+ "release": "version 1.6.4",
+ "revision": "AA001122334455",
+ "url": "https://www.cairographics.org/",
+ },
+ "bugzilla": {"component": "Graphics", "product": "Core"},
+ "vendoring": {
+ "url": "https://example.com",
+ "source-hosting": "gitlab",
+ },
+ "updatebot": {
+ "maintainer-phab": "tjr",
+ "maintainer-bz": "a@example.com",
+ "fuzzy-query": "!linux64",
+ "tasks": [{"type": "commit-alert"}],
+ },
+ },
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: AA001122334455
+vendoring:
+ url: https://example.com
+ source-hosting: gitlab
+bugzilla:
+ product: Core
+ component: Graphics
+updatebot:
+ fuzzy-query: "!linux64"
+ maintainer-phab: tjr
+ maintainer-bz: a@example.com
+ tasks:
+ - type: commit-alert
+ """.strip(),
+ ),
+ # -------------------------------------------------
+ (
+ "exception",
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: AA001122334455
+vendoring:
+ url: https://example.com
+ source-hosting: gitlab
+bugzilla:
+ product: Core
+ component: Graphics
+updatebot:
+ try-preset: foo
+ fuzzy-query: "!linux64"
+ maintainer-phab: tjr
+ maintainer-bz: a@example.com
+ tasks:
+ - type: commit-alert
+ """.strip(),
+ ),
+ # -------------------------------------------------
+ (
+ {
+ "schema": "1",
+ "origin": {
+ "description": "2D Graphics Library",
+ "license": ["MPL-1.1", "LGPL-2.1"],
+ "name": "cairo",
+ "release": "version 1.6.4",
+ "revision": "AA001122334455",
+ "url": "https://www.cairographics.org/",
+ },
+ "bugzilla": {"component": "Graphics", "product": "Core"},
+ "vendoring": {
+ "url": "https://example.com",
+ "source-hosting": "gitlab",
+ },
+ "updatebot": {
+ "maintainer-phab": "tjr",
+ "maintainer-bz": "a@example.com",
+ "fuzzy-paths": ["dir1/", "dir2"],
+ "tasks": [{"type": "commit-alert"}],
+ },
+ },
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: AA001122334455
+vendoring:
+ url: https://example.com
+ source-hosting: gitlab
+bugzilla:
+ product: Core
+ component: Graphics
+updatebot:
+ fuzzy-paths:
+ - dir1/
+ - dir2
+ maintainer-phab: tjr
+ maintainer-bz: a@example.com
+ tasks:
+ - type: commit-alert
+ """.strip(),
+ ),
+ # -------------------------------------------------
+ (
+ {
+ "schema": "1",
+ "origin": {
+ "description": "2D Graphics Library",
+ "license": ["MPL-1.1", "LGPL-2.1"],
+ "name": "cairo",
+ "release": "version 1.6.4",
+ "revision": "AA001122334455",
+ "url": "https://www.cairographics.org/",
+ },
+ "bugzilla": {"component": "Graphics", "product": "Core"},
+ "vendoring": {
+ "url": "https://example.com",
+ "source-hosting": "gitlab",
+ },
+ "updatebot": {
+ "maintainer-phab": "tjr",
+ "maintainer-bz": "a@example.com",
+ "fuzzy-paths": ["dir1/"],
+ "tasks": [{"type": "commit-alert"}],
+ },
+ },
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: AA001122334455
+vendoring:
+ url: https://example.com
+ source-hosting: gitlab
+bugzilla:
+ product: Core
+ component: Graphics
+updatebot:
+ fuzzy-paths: ['dir1/']
+ maintainer-phab: tjr
+ maintainer-bz: a@example.com
+ tasks:
+ - type: commit-alert
+ """.strip(),
+ ),
+ # -------------------------------------------------
+ (
+ {
+ "schema": "1",
+ "origin": {
+ "description": "2D Graphics Library",
+ "license": ["MPL-1.1", "LGPL-2.1"],
+ "name": "cairo",
+ "release": "version 1.6.4",
+ "revision": "AA001122334455",
+ "url": "https://www.cairographics.org/",
+ },
+ "bugzilla": {"component": "Graphics", "product": "Core"},
+ "vendoring": {
+ "url": "https://example.com",
+ "source-hosting": "gitlab",
+ "tracking": "commit",
+ "flavor": "rust",
+ },
+ "updatebot": {
+ "maintainer-phab": "tjr",
+ "maintainer-bz": "a@example.com",
+ "tasks": [
+ {"type": "commit-alert", "frequency": "release"},
+ {
+ "type": "vendoring",
+ "enabled": False,
+ "cc": ["b@example.com"],
+ "needinfo": ["c@example.com"],
+ "frequency": "1 weeks",
+ "platform": "windows",
+ },
+ ],
+ },
+ },
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: AA001122334455
+vendoring:
+ url: https://example.com
+ tracking: commit
+ source-hosting: gitlab
+ flavor: rust
+bugzilla:
+ product: Core
+ component: Graphics
+updatebot:
+ maintainer-phab: tjr
+ maintainer-bz: a@example.com
+ tasks:
+ - type: commit-alert
+ frequency: release
+ - type: vendoring
+ enabled: False
+ cc: ["b@example.com"]
+ needinfo: ["c@example.com"]
+ frequency: 1 weeks
+ platform: windows
+ """.strip(),
+ ),
+ # -------------------------------------------------
+ (
+ {
+ "schema": "1",
+ "origin": {
+ "description": "2D Graphics Library",
+ "license": ["MPL-1.1", "LGPL-2.1"],
+ "name": "cairo",
+ "release": "version 1.6.4",
+ "revision": "AA001122334455",
+ "url": "https://www.cairographics.org/",
+ },
+ "bugzilla": {"component": "Graphics", "product": "Core"},
+ "vendoring": {
+ "url": "https://example.com",
+ "source-hosting": "gitlab",
+ "tracking": "tag",
+ "flavor": "rust",
+ },
+ "updatebot": {
+ "maintainer-phab": "tjr",
+ "maintainer-bz": "a@example.com",
+ "tasks": [
+ {"type": "commit-alert", "frequency": "release"},
+ {
+ "type": "vendoring",
+ "enabled": False,
+ "cc": ["b@example.com"],
+ "needinfo": ["c@example.com"],
+ "frequency": "1 weeks, 4 commits",
+ "platform": "windows",
+ },
+ ],
+ },
+ },
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: AA001122334455
+vendoring:
+ url: https://example.com
+ tracking: tag
+ source-hosting: gitlab
+ flavor: rust
+bugzilla:
+ product: Core
+ component: Graphics
+updatebot:
+ maintainer-phab: tjr
+ maintainer-bz: a@example.com
+ tasks:
+ - type: commit-alert
+ frequency: release
+ - type: vendoring
+ enabled: False
+ cc: ["b@example.com"]
+ needinfo: ["c@example.com"]
+ frequency: 1 weeks, 4 commits
+ platform: windows
+ """.strip(),
+ ),
+ # -------------------------------------------------
+ (
+ "exception", # rust flavor cannot use update-actions
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: AA001122334455
+vendoring:
+ url: https://example.com
+ tracking: tag
+ source-hosting: gitlab
+ flavor: rust
+ update-actions:
+ - action: move-file
+ from: foo
+ to: bar
+bugzilla:
+ product: Core
+ component: Graphics
+updatebot:
+ maintainer-phab: tjr
+ maintainer-bz: a@example.com
+ tasks:
+ - type: commit-alert
+ frequency: release
+ - type: vendoring
+ enabled: False
+ cc: ["b@example.com"]
+ needinfo: ["c@example.com"]
+ frequency: 1 weeks, 4 commits
+ platform: windows
+ """.strip(),
+ ),
+ # -------------------------------------------------
+ (
+ {
+ "schema": "1",
+ "origin": {
+ "description": "2D Graphics Library",
+ "license": ["MPL-1.1", "LGPL-2.1"],
+ "name": "cairo",
+ "release": "version 1.6.4",
+ "revision": "AA001122334455",
+ "url": "https://www.cairographics.org/",
+ },
+ "bugzilla": {"component": "Graphics", "product": "Core"},
+ "vendoring": {
+ "url": "https://example.com",
+ "source-hosting": "gitlab",
+ },
+ "updatebot": {
+ "maintainer-phab": "tjr",
+ "maintainer-bz": "a@example.com",
+ "tasks": [
+ {
+ "type": "vendoring",
+ "enabled": False,
+ "cc": ["b@example.com", "c@example.com"],
+ "needinfo": ["d@example.com", "e@example.com"],
+ "frequency": "every",
+ },
+ {
+ "type": "commit-alert",
+ "filter": "none",
+ "source-extensions": [".c", ".cpp"],
+ "frequency": "2 weeks",
+ "platform": "linux",
+ },
+ ],
+ },
+ },
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: AA001122334455
+vendoring:
+ url: https://example.com
+ source-hosting: gitlab
+bugzilla:
+ product: Core
+ component: Graphics
+updatebot:
+ maintainer-phab: tjr
+ maintainer-bz: a@example.com
+ tasks:
+ - type: vendoring
+ enabled: False
+ cc:
+ - b@example.com
+ - c@example.com
+ needinfo:
+ - d@example.com
+ - e@example.com
+ frequency: every
+ - type: commit-alert
+ filter: none
+ frequency: 2 weeks
+ platform: linux
+ source-extensions:
+ - .c
+ - .cpp
+ """.strip(),
+ ),
+ # -------------------------------------------------
+ (
+ {
+ "schema": "1",
+ "origin": {
+ "description": "2D Graphics Library",
+ "license": ["MPL-1.1", "LGPL-2.1"],
+ "name": "cairo",
+ "release": "version 1.6.4",
+ "revision": "AA001122334455",
+ "url": "https://www.cairographics.org/",
+ },
+ "bugzilla": {"component": "Graphics", "product": "Core"},
+ "vendoring": {
+ "url": "https://example.com",
+ "source-hosting": "gitlab",
+ },
+ "updatebot": {
+ "maintainer-phab": "tjr",
+ "maintainer-bz": "a@example.com",
+ "tasks": [
+ {
+ "type": "vendoring",
+ "enabled": False,
+ "cc": ["b@example.com", "c@example.com"],
+ "needinfo": ["d@example.com", "e@example.com"],
+ "frequency": "every",
+ },
+ {
+ "type": "commit-alert",
+ "filter": "none",
+ "source-extensions": [".c", ".cpp"],
+ "frequency": "2 commits",
+ "platform": "linux",
+ },
+ ],
+ },
+ },
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: AA001122334455
+vendoring:
+ url: https://example.com
+ source-hosting: gitlab
+bugzilla:
+ product: Core
+ component: Graphics
+updatebot:
+ maintainer-phab: tjr
+ maintainer-bz: a@example.com
+ tasks:
+ - type: vendoring
+ enabled: False
+ cc:
+ - b@example.com
+ - c@example.com
+ needinfo:
+ - d@example.com
+ - e@example.com
+ frequency: every
+ - type: commit-alert
+ filter: none
+ frequency: 2 commits
+ platform: linux
+ source-extensions:
+ - .c
+ - .cpp
+ """.strip(),
+ ),
+ # -------------------------------------------------
+ (
+ {
+ "schema": "1",
+ "origin": {
+ "description": "2D Graphics Library",
+ "license": ["MPL-1.1", "LGPL-2.1"],
+ "name": "cairo",
+ "release": "version 1.6.4",
+ "revision": "AA001122334455",
+ "url": "https://www.cairographics.org/",
+ },
+ "bugzilla": {"component": "Graphics", "product": "Core"},
+ "vendoring": {
+ "url": "https://example.com",
+ "source-hosting": "gitlab",
+ },
+ "updatebot": {
+ "maintainer-phab": "tjr",
+ "maintainer-bz": "a@example.com",
+ "tasks": [
+ {
+ "type": "vendoring",
+ "enabled": False,
+ "cc": ["b@example.com", "c@example.com"],
+ "needinfo": ["d@example.com", "e@example.com"],
+ "frequency": "every",
+ "blocking": "1234",
+ },
+ {
+ "type": "commit-alert",
+ "filter": "none",
+ "source-extensions": [".c", ".cpp"],
+ "frequency": "2 commits",
+ "platform": "linux",
+ },
+ ],
+ },
+ },
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: AA001122334455
+vendoring:
+ url: https://example.com
+ source-hosting: gitlab
+bugzilla:
+ product: Core
+ component: Graphics
+updatebot:
+ maintainer-phab: tjr
+ maintainer-bz: a@example.com
+ tasks:
+ - type: vendoring
+ enabled: False
+ cc:
+ - b@example.com
+ - c@example.com
+ needinfo:
+ - d@example.com
+ - e@example.com
+ frequency: every
+ blocking: 1234
+ - type: commit-alert
+ filter: none
+ frequency: 2 commits
+ platform: linux
+ source-extensions:
+ - .c
+ - .cpp
+ """.strip(),
+ ),
+ # -------------------------------------------------
+ (
+ "exception",
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: AA001122334455
+vendoring:
+ url: https://example.com
+ source-hosting: gitlab
+bugzilla:
+ product: Core
+ component: Graphics
+updatebot:
+ maintainer-phab: tjr
+ maintainer-bz: a@example.com
+ tasks:
+ - type: vendoring
+ branch: foo
+ enabled: False
+ cc:
+ - b@example.com
+ - c@example.com
+ needinfo:
+ - d@example.com
+ - e@example.com
+ frequency: every
+ blocking: 1234
+ - type: commit-alert
+ filter: none
+ frequency: 2 commits
+ platform: linux
+ source-extensions:
+ - .c
+ - .cpp
+ """.strip(),
+ ),
+ # -------------------------------------------------
+ (
+ {
+ "schema": "1",
+ "origin": {
+ "license": ["MPL-1.1", "LGPL-2.1"],
+ "name": "cairo",
+ "description": "2D Graphics Library",
+ "url": "https://www.cairographics.org/",
+ "release": "version 1.6.4",
+ "revision": "AA001122334455",
+ },
+ "bugzilla": {"component": "Graphics", "product": "Core"},
+ "vendoring": {
+ "url": "https://example.com",
+ "source-hosting": "gitlab",
+ "flavor": "individual-files",
+ "individual-files": [
+ {"upstream": "foo", "destination": "bar"}
+ ],
+ },
+ },
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: AA001122334455
+vendoring:
+ url: https://example.com
+ source-hosting: gitlab
+ flavor: individual-files
+ individual-files:
+ - upstream: foo
+ destination: bar
+bugzilla:
+ product: Core
+ component: Graphics
+ """.strip(),
+ ),
+ # -------------------------------------------------
+ (
+ {
+ "schema": "1",
+ "origin": {
+ "license": ["MPL-1.1", "LGPL-2.1"],
+ "name": "cairo",
+ "description": "2D Graphics Library",
+ "url": "https://www.cairographics.org/",
+ "release": "version 1.6.4",
+ "revision": "AA001122334455",
+ },
+ "bugzilla": {"component": "Graphics", "product": "Core"},
+ "vendoring": {
+ "url": "https://example.com",
+ "source-hosting": "gitlab",
+ "flavor": "individual-files",
+ "individual-files": [
+ {"upstream": "foo", "destination": "bar"}
+ ],
+ "update-actions": [
+ {"action": "move-file", "from": "foo", "to": "bar"}
+ ],
+ },
+ },
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: AA001122334455
+vendoring:
+ url: https://example.com
+ source-hosting: gitlab
+ flavor: individual-files
+ individual-files:
+ - upstream: foo
+ destination: bar
+ update-actions:
+ - action: move-file
+ from: foo
+ to: bar
+bugzilla:
+ product: Core
+ component: Graphics
+ """.strip(),
+ ),
+ # -------------------------------------------------
+ (
+ {
+ "schema": "1",
+ "origin": {
+ "license": ["MPL-1.1", "LGPL-2.1"],
+ "name": "cairo",
+ "description": "2D Graphics Library",
+ "url": "https://www.cairographics.org/",
+ "release": "version 1.6.4",
+ "revision": "AA001122334455",
+ },
+ "bugzilla": {"component": "Graphics", "product": "Core"},
+ "vendoring": {
+ "url": "https://example.com",
+ "source-hosting": "gitlab",
+ "flavor": "individual-files",
+ "individual-files-default-destination": "bar",
+ "individual-files-default-upstream": "foo",
+ "individual-files-list": ["foo", "bar"],
+ "update-actions": [
+ {"action": "move-file", "from": "foo", "to": "bar"}
+ ],
+ },
+ },
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: AA001122334455
+vendoring:
+ url: https://example.com
+ source-hosting: gitlab
+ flavor: individual-files
+ individual-files-default-upstream: foo
+ individual-files-default-destination: bar
+ individual-files-list:
+ - foo
+ - bar
+ update-actions:
+ - action: move-file
+ from: foo
+ to: bar
+bugzilla:
+ product: Core
+ component: Graphics
+ """.strip(),
+ ),
+ # -------------------------------------------------
+ (
+ "exception", # can't have both types of indidivudal-files list
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: AA001122334455
+vendoring:
+ url: https://example.com
+ source-hosting: gitlab
+ flavor: individual-files
+ individual-files-list:
+ - foo
+ individual-files:
+ - upstream: foo
+ destination: bar
+ update-actions:
+ - action: move-file
+ from: foo
+ to: bar
+bugzilla:
+ product: Core
+ component: Graphics
+ """.strip(),
+ ),
+ # -------------------------------------------------
+ (
+ "exception", # can't have indidivudal-files-default-upstream
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: AA001122334455
+vendoring:
+ url: https://example.com
+ source-hosting: gitlab
+ flavor: individual-files
+ indidivudal-files-default-upstream: foo
+ individual-files:
+ - upstream: foo
+ destination: bar
+ update-actions:
+ - action: move-file
+ from: foo
+ to: bar
+bugzilla:
+ product: Core
+ component: Graphics
+ """.strip(),
+ ),
+ # -------------------------------------------------
+ (
+ "exception", # must have indidivudal-files-default-upstream
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: AA001122334455
+vendoring:
+ url: https://example.com
+ source-hosting: gitlab
+ flavor: individual-files
+ indidivudal-files-default-destination: foo
+ individual-files-list:
+ - foo
+ - bar
+ update-actions:
+ - action: move-file
+ from: foo
+ to: bar
+bugzilla:
+ product: Core
+ component: Graphics
+ """.strip(),
+ ),
+ # -------------------------------------------------
+ (
+ "exception",
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: AA001122334455
+vendoring:
+ url: https://example.com
+ source-hosting: gitlab
+ tracking: tag
+ flavor: individual-files
+ individual-files:
+ - upstream-src: foo
+ dst: bar
+bugzilla:
+ product: Core
+ component: Graphics
+ """.strip(),
+ ),
+ # -------------------------------------------------
+ (
+ "exception",
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: AA001122334455
+vendoring:
+ url: https://example.com
+ source-hosting: gitlab
+ flavor: individual-files
+bugzilla:
+ product: Core
+ component: Graphics
+ """.strip(),
+ ),
+ # -------------------------------------------------
+ (
+ "exception",
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: AA001122334455
+vendoring:
+ url: https://example.com
+ source-hosting: gitlab
+ flavor: rust
+ individual-files:
+ - upstream: foo
+ destination: bar
+bugzilla:
+ product: Core
+ component: Graphics
+ """.strip(),
+ ),
+ # -------------------------------------------------
+ (
+ "exception",
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: AA001122334455
+vendoring:
+ url: https://example.com
+ source-hosting: gitlab
+ flavor: rust
+ include:
+ - foo
+bugzilla:
+ product: Core
+ component: Graphics
+ """.strip(),
+ ),
+ # -------------------------------------------------
+ (
+ "exception",
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: AA001122334455
+vendoring:
+ url: https://example.com
+ source-hosting: gitlab
+bugzilla:
+ product: Core
+ component: Graphics
+updatebot:
+ maintainer-phab: tjr
+ maintainer-bz: a@example.com
+ tasks:
+ - type: vendoring
+ enabled: False
+ cc:
+ - b@example.com
+ - c@example.com
+ needinfo:
+ - d@example.com
+ - e@example.com
+ frequency: every
+ blocking: foo
+ - type: commit-alert
+ filter: none
+ frequency: 2 commits
+ platform: linux
+ source-extensions:
+ - .c
+ - .cpp
+ """.strip(),
+ ),
+ # -------------------------------------------------
+ (
+ "exception",
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: AA001122334455
+vendoring:
+ url: https://example.com
+ source-hosting: gitlab
+bugzilla:
+ product: Core
+ component: Graphics
+updatebot:
+ maintainer-phab: tjr
+ maintainer-bz: a@example.com
+ fuzzy-paths: "must-be-array"
+ tasks:
+ - type: vendoring
+ enabled: False
+ cc:
+ - b@example.com
+ - c@example.com
+ needinfo:
+ - d@example.com
+ - e@example.com
+ frequency: every
+ - type: commit-alert
+ filter: none
+ frequency: 2 commits
+ platform: linux
+ source-extensions:
+ - .c
+ - .cpp
+ """.strip(),
+ ),
+ # -------------------------------------------------
+ (
+ "exception",
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: AA001122334455
+vendoring:
+ url: https://example.com
+ source-hosting: gitlab
+bugzilla:
+ product: Core
+ component: Graphics
+updatebot:
+ maintainer-phab: tjr
+ maintainer-bz: a@example.com
+ tasks:
+ - type: vendoring
+ enabled: False
+ cc:
+ - b@example.com
+ - c@example.com
+ needinfo:
+ - d@example.com
+ - e@example.com
+ frequency: every
+ - type: commit-alert
+ filter: none
+ frequency: 2 commits, 4 weeks
+ platform: linux
+ source-extensions:
+ - .c
+ - .cpp
+ """.strip(),
+ ),
+ # -------------------------------------------------
+ (
+ "exception",
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: AA001122334455
+vendoring:
+ url: https://example.com
+ source-hosting: gitlab
+bugzilla:
+ product: Core
+ component: Graphics
+updatebot:
+ maintainer-phab: tjr
+ maintainer-bz: a@example.com
+ tasks:
+ - type: vendoring
+ enabled: False
+ cc:
+ - b@example.com
+ - c@example.com
+ needinfo:
+ - d@example.com
+ - e@example.com
+ frequency: every
+ - type: commit-alert
+ filter: none
+ frequency: 4 weeks, 2 commits, 3 weeks
+ platform: linux
+ source-extensions:
+ - .c
+ - .cpp
+ """.strip(),
+ ),
+ # -------------------------------------------------
+ (
+ "exception",
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: AA001122334455
+vendoring:
+ url: https://example.com
+ source-hosting: gitlab
+ flavor: chocolate
+bugzilla:
+ product: Core
+ component: Graphics
+updatebot:
+ maintainer-phab: tjr
+ maintainer-bz: a@example.com
+ tasks:
+ - type: vendoring
+ enabled: False
+ cc:
+ - b@example.com
+ - c@example.com
+ needinfo:
+ - d@example.com
+ - e@example.com
+ frequency: every
+ - type: commit-alert
+ filter: none
+ frequency: 2 weeks
+ platform: linux
+ source-extensions:
+ - .c
+ - .cpp
+ """.strip(),
+ ),
+ # -------------------------------------------------
+ (
+ "exception",
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: AA001122334455
+vendoring:
+ url: https://example.com
+ source-hosting: gitlab
+ flavor: chocolate
+bugzilla:
+ product: Core
+ component: Graphics
+updatebot:
+ maintainer-phab: tjr
+ maintainer-bz: a@example.com
+ tasks:
+ - type: vendoring
+ enabled: False
+ cc:
+ - b@example.com
+ - c@example.com
+ needinfo:
+ - d@example.com
+ - e@example.com
+ frequency: every
+ - type: commit-alert
+ filter: none
+ frequency: 01 commits
+ platform: linux
+ source-extensions:
+ - .c
+ - .cpp
+ """.strip(),
+ ),
+ # -------------------------------------------------
+ (
+ "exception",
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: AA001122334455
+vendoring:
+ url: https://example.com
+ source-hosting: gitlab
+bugzilla:
+ product: Core
+ component: Graphics
+updatebot:
+ maintainer-phab: tjr
+ maintainer-bz: a@example.com
+ tasks:
+ - type: vendoring
+ enabled: False
+ cc:
+ - b@example.com
+ - c@example.com
+ needinfo:
+ - d@example.com
+ - e@example.com
+ frequency: every
+ - type: commit-alert
+ filter: none
+ frequency: 2 weeks
+ platform: mac
+ source-extensions:
+ - .c
+ - .cpp
+ """.strip(),
+ ),
+ # -------------------------------------------------
+ (
+ "exception",
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: AA001122334455
+bugzilla:
+ product: Core
+ component: Graphics
+updatebot:
+ maintainer-phab: tjr
+ maintainer-bz: a@example.com
+ tasks:
+ - type: vendoring
+ enabled: False
+ cc:
+ - b@example.com
+ - c@example.com
+ - type: commit-alert
+ filter: none
+ source-extensions:
+ - .c
+ - .cpp
+ """.strip(),
+ ),
+ # -------------------------------------------------
+ (
+ "exception",
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+bugzilla:
+ product: Core
+ component: Graphics
+vendoring:
+ url: https://example.com
+ source-hosting: gitlab
+updatebot:
+ maintainer-phab: tjr
+ maintainer-bz: a@example.com
+ tasks:
+ - type: vendoring
+ enabled: False
+ cc:
+ - b@example.com
+ - c@example.com
+ - type: commit-alert
+ filter: none
+ source-extensions:
+ - .c
+ - .cpp
+ """.strip(),
+ ),
+ # -------------------------------------------------
+ (
+ "exception",
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: AA001122334455
+vendoring:
+ url: https://example.com
+ source-hosting: gitlab
+bugzilla:
+ product: Core
+ component: Graphics
+updatebot:
+ maintainer-phab: tjr
+ maintainer-bz: a@example.com
+ tasks:
+ - type: vendoring
+ filter: none
+ """.strip(),
+ ),
+ # -------------------------------------------------
+ (
+ "exception",
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: AA001122334455
+vendoring:
+ url: https://example.com
+ source-hosting: gitlab
+bugzilla:
+ product: Core
+ component: Graphics
+updatebot:
+ maintainer-phab: tjr
+ maintainer-bz: a@example.com
+ tasks:
+ - type: foo
+ """.strip(),
+ ),
+ # -------------------------------------------------
+ (
+ "exception",
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: AA001122334455
+vendoring:
+ url: https://example.com
+ source-hosting: gitlab
+bugzilla:
+ product: Core
+ component: Graphics
+updatebot:
+ maintainer-phab: tjr
+ maintainer-bz: a@example.com
+ tasks:
+ - type: vendoring
+ source-extensions:
+ - .c
+ - .cpp
+ """.strip(),
+ ),
+ # -------------------------------------------------
+ (
+ "exception",
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: AA001122334455
+vendoring:
+ url: https://example.com
+ source-hosting: gitlab
+bugzilla:
+ product: Core
+ component: Graphics
+updatebot:
+ maintainer-phab: tjr
+ maintainer-bz: a@example.com
+ tasks:
+ - type: commit-alert
+ filter: hogwash
+ """.strip(),
+ ),
+ (
+ "exception",
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: AA001122334455
+bugzilla:
+ product: Core
+ component: Graphics
+vendoring:
+ url: https://example.com
+ source-hosting: gitlab
+updatebot:
+ maintainer-phab: tjr
+ maintainer-bz: a@example.com
+ tasks:
+ - type: vendoring
+ enabled: False
+ cc:
+ - b@example.com
+ - c@example.com
+ - type: commit-alert
+ - type: commit-alert
+ filter: none
+ source-extensions:
+ - .c
+ - .cpp""".strip(),
+ ),
+ (
+ "exception",
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: AA001122334455
+bugzilla:
+ product: Core
+ component: Graphics
+vendoring:
+ url: https://example.com
+ source-hosting: gitlab
+updatebot:
+ maintainer-phab: tjr
+ maintainer-bz: a@example.com
+ tasks:
+ - type: vendoring
+ enabled: False
+ cc:
+ - b@example.com
+ - c@example.com
+ - type: vendoring
+ - type: commit-alert
+ filter: none
+ source-extensions:
+ - .c
+ - .cpp""".strip(),
+ ),
+ (
+ "exception",
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: AA001122334455
+bugzilla:
+ product: Core
+ component: Graphics
+vendoring:
+ url: https://example.com
+ source-hosting: gitlab
+updatebot:
+ maintainer-phab: tjr
+ maintainer-bz: a@example.com
+ tasks:
+ - type: vendoring
+ enabled: False
+ cc:
+ - b@example.com
+ - c@example.com
+ - type: commit-alert
+ frequency: every-release
+ filter: none
+ source-extensions:
+ - .c
+ - .cpp""".strip(),
+ ),
+ (
+ "exception",
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: AA001122334455
+bugzilla:
+ product: Core
+ component: Graphics
+vendoring:
+ url: https://example.com
+ source-hosting: gitlab
+updatebot:
+ maintainer-phab: tjr
+ maintainer-bz: a@example.com
+ tasks:
+ - type: vendoring
+ enabled: False
+ cc:
+ - b@example.com
+ - c@example.com
+ frequency: 2 months
+ - type: commit-alert
+ filter: none
+ source-extensions:
+ - .c
+ - .cpp""".strip(),
+ ),
+ (
+ "exception",
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: AA001122334455
+vendoring:
+ url: https://example.com
+ source-hosting: gitlab
+bugzilla:
+ product: Core
+ component: Graphics
+updatebot:
+ maintainer-phab: tjr
+ maintainer-bz: a@example.com
+ tasks:
+ - type: commit-alert
+ frequency: 0 weeks
+ """.strip(),
+ ),
+ ]
+ )
+
+ # ===========================================================================================
+ def test_malformed(self):
+ with mozfile.NamedTemporaryFile() as tf:
+ tf.write(b"blah")
+ tf.flush()
+ with self.assertRaises(MozYamlVerifyError):
+ load_moz_yaml(tf.name, require_license_file=False)
+
+ def test_schema(self):
+ with mozfile.NamedTemporaryFile() as tf:
+ tf.write(b"schema: 99")
+ tf.flush()
+ with self.assertRaises(MozYamlVerifyError):
+ load_moz_yaml(tf.name, require_license_file=False)
+
+ def test_json(self):
+ with mozfile.NamedTemporaryFile() as tf:
+ tf.write(
+ b'{"origin": {"release": "version 1.6.4", "url": "https://w'
+ b'ww.cairographics.org/", "description": "2D Graphics Libra'
+ b'ry", "license": ["MPL-1.1", "LGPL-2.1"], "name": "cairo"}'
+ b', "bugzilla": {"product": "Core", "component": "Graphics"'
+ b'}, "schema": 1}'
+ )
+ tf.flush()
+ with self.assertRaises(MozYamlVerifyError):
+ load_moz_yaml(tf.name, require_license_file=False)
+
+ def test_revision(self):
+ self.process_test_vectors(
+ [
+ (
+ {
+ "schema": "1",
+ "origin": {
+ "description": "2D Graphics Library",
+ "license": ["MPL-1.1", "LGPL-2.1"],
+ "name": "cairo",
+ "release": "version 1.6.4",
+ "revision": "v1.6.37",
+ "url": "https://www.cairographics.org/",
+ },
+ "bugzilla": {"component": "Graphics", "product": "Core"},
+ },
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: v1.6.37
+bugzilla:
+ product: Core
+ component: Graphics""".strip(),
+ ),
+ (
+ "exception",
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: 4.0.0.
+bugzilla:
+ product: Core
+ component: Graphics""".strip(),
+ ),
+ (
+ "exception",
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: 4.^.0
+bugzilla:
+ product: Core
+ component: Graphics""".strip(),
+ ),
+ (
+ "exception",
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: " "
+bugzilla:
+ product: Core
+ component: Graphics""".strip(),
+ ),
+ (
+ "exception",
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: ???
+bugzilla:
+ product: Core
+ component: Graphics""".strip(),
+ ),
+ (
+ "exception",
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: ]
+bugzilla:
+ product: Core
+ component: Graphics""".strip(),
+ ),
+ (
+ "exception",
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: AA001122334455
+bugzilla:
+ product: Core
+ component: Graphics
+vendoring:
+ url: https://example.com
+ source-hosting: gitlab
+ update-actions:
+ - action: run-script
+ cwd: '{cwd}'
+ script: 'script.py'
+ args: ['hi']
+ pattern: 'hi'
+""".strip(),
+ ),
+ (
+ "exception",
+ b"""
+---
+schema: 1
+origin:
+ name: cairo
+ description: 2D Graphics Library
+ url: https://www.cairographics.org/
+ release: version 1.6.4
+ license:
+ - MPL-1.1
+ - LGPL-2.1
+ revision: AA001122334455
+bugzilla:
+ product: Core
+ component: Graphics
+vendoring:
+ url: https://example.com
+ source-hosting: gitlab
+ update-actions:
+ - action: run-script
+ cwd: '{cwd}'
+ args: ['hi']
+""".strip(),
+ ),
+ ]
+ )
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/test/test_mozconfig.py b/python/mozbuild/mozbuild/test/test_mozconfig.py
new file mode 100644
index 0000000000..20827d7f29
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/test_mozconfig.py
@@ -0,0 +1,275 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import unittest
+from shutil import rmtree
+from tempfile import mkdtemp
+
+from mozfile.mozfile import NamedTemporaryFile
+from mozunit import main
+
+from mozbuild.mozconfig import MozconfigLoader, MozconfigLoadException
+
+
+class TestMozconfigLoader(unittest.TestCase):
+ def setUp(self):
+ self._old_env = dict(os.environ)
+ os.environ.pop("MOZCONFIG", None)
+ os.environ.pop("MOZ_OBJDIR", None)
+ os.environ.pop("CC", None)
+ os.environ.pop("CXX", None)
+ self._temp_dirs = set()
+
+ def tearDown(self):
+ os.environ.clear()
+ os.environ.update(self._old_env)
+
+ for d in self._temp_dirs:
+ rmtree(d)
+
+ def get_loader(self):
+ return MozconfigLoader(self.get_temp_dir())
+
+ def get_temp_dir(self):
+ d = mkdtemp()
+ self._temp_dirs.add(d)
+
+ return d
+
+ def test_read_no_mozconfig(self):
+ # This is basically to ensure changes to defaults incur a test failure.
+ result = self.get_loader().read_mozconfig()
+
+ self.assertEqual(
+ result,
+ {
+ "path": None,
+ "topobjdir": None,
+ "configure_args": None,
+ "make_flags": None,
+ "make_extra": None,
+ "env": None,
+ "vars": None,
+ },
+ )
+
+ def test_read_empty_mozconfig(self):
+ with NamedTemporaryFile(mode="w") as mozconfig:
+ result = self.get_loader().read_mozconfig(mozconfig.name)
+
+ self.assertEqual(result["path"], mozconfig.name)
+ self.assertIsNone(result["topobjdir"])
+ self.assertEqual(result["configure_args"], [])
+ self.assertEqual(result["make_flags"], [])
+ self.assertEqual(result["make_extra"], [])
+
+ for f in ("added", "removed", "modified"):
+ self.assertEqual(len(result["vars"][f]), 0)
+ self.assertEqual(len(result["env"][f]), 0)
+
+ self.assertEqual(result["env"]["unmodified"], {})
+
+ def test_read_capture_ac_options(self):
+ """Ensures ac_add_options calls are captured."""
+ with NamedTemporaryFile(mode="w") as mozconfig:
+ mozconfig.write("ac_add_options --enable-debug\n")
+ mozconfig.write("ac_add_options --disable-tests --enable-foo\n")
+ mozconfig.write('ac_add_options --foo="bar baz"\n')
+ mozconfig.flush()
+
+ result = self.get_loader().read_mozconfig(mozconfig.name)
+ self.assertEqual(
+ result["configure_args"],
+ ["--enable-debug", "--disable-tests", "--enable-foo", "--foo=bar baz"],
+ )
+
+ def test_read_ac_options_substitution(self):
+ """Ensure ac_add_options values are substituted."""
+ with NamedTemporaryFile(mode="w") as mozconfig:
+ mozconfig.write("ac_add_options --foo=@TOPSRCDIR@\n")
+ mozconfig.flush()
+
+ loader = self.get_loader()
+ result = loader.read_mozconfig(mozconfig.name)
+ self.assertEqual(result["configure_args"], ["--foo=%s" % loader.topsrcdir])
+
+ def test_read_capture_mk_options(self):
+ """Ensures mk_add_options calls are captured."""
+ with NamedTemporaryFile(mode="w") as mozconfig:
+ mozconfig.write("mk_add_options MOZ_OBJDIR=/foo/bar\n")
+ mozconfig.write('mk_add_options MOZ_MAKE_FLAGS="-j8 -s"\n')
+ mozconfig.write('mk_add_options FOO="BAR BAZ"\n')
+ mozconfig.write("mk_add_options BIZ=1\n")
+ mozconfig.flush()
+
+ result = self.get_loader().read_mozconfig(mozconfig.name)
+ self.assertEqual(result["topobjdir"], "/foo/bar")
+ self.assertEqual(result["make_flags"], ["-j8", "-s"])
+ self.assertEqual(result["make_extra"], ["FOO=BAR BAZ", "BIZ=1"])
+
+ def test_read_no_mozconfig_objdir_environ(self):
+ os.environ["MOZ_OBJDIR"] = "obj-firefox"
+ result = self.get_loader().read_mozconfig()
+ self.assertEqual(result["topobjdir"], "obj-firefox")
+
+ def test_read_empty_mozconfig_objdir_environ(self):
+ os.environ["MOZ_OBJDIR"] = "obj-firefox"
+ with NamedTemporaryFile(mode="w") as mozconfig:
+ result = self.get_loader().read_mozconfig(mozconfig.name)
+ self.assertEqual(result["topobjdir"], "obj-firefox")
+
+ def test_read_capture_mk_options_objdir_environ(self):
+ """Ensures mk_add_options calls are captured and override the environ."""
+ os.environ["MOZ_OBJDIR"] = "obj-firefox"
+ with NamedTemporaryFile(mode="w") as mozconfig:
+ mozconfig.write("mk_add_options MOZ_OBJDIR=/foo/bar\n")
+ mozconfig.flush()
+
+ result = self.get_loader().read_mozconfig(mozconfig.name)
+ self.assertEqual(result["topobjdir"], "/foo/bar")
+
+ def test_read_moz_objdir_substitution(self):
+ """Ensure @TOPSRCDIR@ substitution is recognized in MOZ_OBJDIR."""
+ with NamedTemporaryFile(mode="w") as mozconfig:
+ mozconfig.write("mk_add_options MOZ_OBJDIR=@TOPSRCDIR@/some-objdir")
+ mozconfig.flush()
+
+ loader = self.get_loader()
+ result = loader.read_mozconfig(mozconfig.name)
+
+ self.assertEqual(result["topobjdir"], "%s/some-objdir" % loader.topsrcdir)
+
+ def test_read_new_variables(self):
+ """New variables declared in mozconfig file are detected."""
+ with NamedTemporaryFile(mode="w") as mozconfig:
+ mozconfig.write("CC=/usr/local/bin/clang\n")
+ mozconfig.write("CXX=/usr/local/bin/clang++\n")
+ mozconfig.flush()
+
+ result = self.get_loader().read_mozconfig(mozconfig.name)
+
+ self.assertEqual(
+ result["vars"]["added"],
+ {"CC": "/usr/local/bin/clang", "CXX": "/usr/local/bin/clang++"},
+ )
+ self.assertEqual(result["env"]["added"], {})
+
+ def test_read_exported_variables(self):
+ """Exported variables are caught as new variables."""
+ with NamedTemporaryFile(mode="w") as mozconfig:
+ mozconfig.write("export MY_EXPORTED=woot\n")
+ mozconfig.flush()
+
+ result = self.get_loader().read_mozconfig(mozconfig.name)
+
+ self.assertEqual(result["vars"]["added"], {})
+ self.assertEqual(result["env"]["added"], {"MY_EXPORTED": "woot"})
+
+ def test_read_modify_variables(self):
+ """Variables modified by mozconfig are detected."""
+ old_path = os.path.realpath("/usr/bin/gcc")
+ new_path = os.path.realpath("/usr/local/bin/clang")
+ os.environ["CC"] = old_path
+
+ with NamedTemporaryFile(mode="w") as mozconfig:
+ mozconfig.write('CC="%s"\n' % new_path)
+ mozconfig.flush()
+
+ result = self.get_loader().read_mozconfig(mozconfig.name)
+
+ self.assertEqual(result["vars"]["modified"], {})
+ self.assertEqual(result["env"]["modified"], {"CC": (old_path, new_path)})
+
+ def test_read_unmodified_variables(self):
+ """Variables modified by mozconfig are detected."""
+ cc_path = os.path.realpath("/usr/bin/gcc")
+ os.environ["CC"] = cc_path
+
+ with NamedTemporaryFile(mode="w") as mozconfig:
+ mozconfig.flush()
+
+ result = self.get_loader().read_mozconfig(mozconfig.name)
+
+ self.assertEqual(result["vars"]["unmodified"], {})
+ self.assertEqual(result["env"]["unmodified"], {"CC": cc_path})
+
+ def test_read_removed_variables(self):
+ """Variables unset by the mozconfig are detected."""
+ cc_path = os.path.realpath("/usr/bin/clang")
+ os.environ["CC"] = cc_path
+
+ with NamedTemporaryFile(mode="w") as mozconfig:
+ mozconfig.write("unset CC\n")
+ mozconfig.flush()
+
+ result = self.get_loader().read_mozconfig(mozconfig.name)
+
+ self.assertEqual(result["vars"]["removed"], {})
+ self.assertEqual(result["env"]["removed"], {"CC": cc_path})
+
+ def test_read_multiline_variables(self):
+ """Ensure multi-line variables are captured properly."""
+ with NamedTemporaryFile(mode="w") as mozconfig:
+ mozconfig.write('multi="foo\nbar"\n')
+ mozconfig.write("single=1\n")
+ mozconfig.flush()
+
+ result = self.get_loader().read_mozconfig(mozconfig.name)
+
+ self.assertEqual(
+ result["vars"]["added"], {"multi": "foo\nbar", "single": "1"}
+ )
+ self.assertEqual(result["env"]["added"], {})
+
+ def test_read_topsrcdir_defined(self):
+ """Ensure $topsrcdir references work as expected."""
+ with NamedTemporaryFile(mode="w") as mozconfig:
+ mozconfig.write("TEST=$topsrcdir")
+ mozconfig.flush()
+
+ loader = self.get_loader()
+ result = loader.read_mozconfig(mozconfig.name)
+
+ self.assertEqual(
+ result["vars"]["added"]["TEST"], loader.topsrcdir.replace(os.sep, "/")
+ )
+ self.assertEqual(result["env"]["added"], {})
+
+ def test_read_empty_variable_value(self):
+ """Ensure empty variable values are parsed properly."""
+ with NamedTemporaryFile(mode="w") as mozconfig:
+ mozconfig.write("EMPTY=\n")
+ mozconfig.write("export EXPORT_EMPTY=\n")
+ mozconfig.flush()
+
+ result = self.get_loader().read_mozconfig(mozconfig.name)
+
+ self.assertEqual(
+ result["vars"]["added"],
+ {
+ "EMPTY": "",
+ },
+ )
+ self.assertEqual(result["env"]["added"], {"EXPORT_EMPTY": ""})
+
+ def test_read_load_exception(self):
+ """Ensure non-0 exit codes in mozconfigs are handled properly."""
+ with NamedTemporaryFile(mode="w") as mozconfig:
+ mozconfig.write('echo "hello world"\n')
+ mozconfig.write("exit 1\n")
+ mozconfig.flush()
+
+ with self.assertRaises(MozconfigLoadException) as e:
+ self.get_loader().read_mozconfig(mozconfig.name)
+
+ self.assertIn(
+ "Evaluation of your mozconfig exited with an error", str(e.exception)
+ )
+ self.assertEqual(e.exception.path, mozconfig.name.replace(os.sep, "/"))
+ self.assertEqual(e.exception.output, ["hello world"])
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/test/test_mozinfo.py b/python/mozbuild/mozbuild/test/test_mozinfo.py
new file mode 100755
index 0000000000..0d966b3dcc
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/test_mozinfo.py
@@ -0,0 +1,318 @@
+#!/usr/bin/env python
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import json
+import os
+import tempfile
+import unittest
+
+import mozunit
+import six
+from mozfile.mozfile import NamedTemporaryFile
+from six import StringIO
+
+from mozbuild.backend.configenvironment import ConfigEnvironment
+from mozbuild.mozinfo import build_dict, write_mozinfo
+
+
+class Base(object):
+ def _config(self, substs={}):
+ d = os.path.dirname(__file__)
+ return ConfigEnvironment(d, d, substs=substs)
+
+
+class TestBuildDict(unittest.TestCase, Base):
+ def test_missing(self):
+ """
+ Test that missing required values raises.
+ """
+
+ with self.assertRaises(Exception):
+ build_dict(self._config(substs=dict(OS_TARGET="foo")))
+
+ with self.assertRaises(Exception):
+ build_dict(self._config(substs=dict(TARGET_CPU="foo")))
+
+ with self.assertRaises(Exception):
+ build_dict(self._config(substs=dict(MOZ_WIDGET_TOOLKIT="foo")))
+
+ def test_win(self):
+ d = build_dict(
+ self._config(
+ dict(
+ OS_TARGET="WINNT",
+ TARGET_CPU="i386",
+ MOZ_WIDGET_TOOLKIT="windows",
+ )
+ )
+ )
+ self.assertEqual("win", d["os"])
+ self.assertEqual("x86", d["processor"])
+ self.assertEqual("windows", d["toolkit"])
+ self.assertEqual(32, d["bits"])
+
+ def test_linux(self):
+ d = build_dict(
+ self._config(
+ dict(
+ OS_TARGET="Linux",
+ TARGET_CPU="i386",
+ MOZ_WIDGET_TOOLKIT="gtk",
+ )
+ )
+ )
+ self.assertEqual("linux", d["os"])
+ self.assertEqual("x86", d["processor"])
+ self.assertEqual("gtk", d["toolkit"])
+ self.assertEqual(32, d["bits"])
+
+ d = build_dict(
+ self._config(
+ dict(
+ OS_TARGET="Linux",
+ TARGET_CPU="x86_64",
+ MOZ_WIDGET_TOOLKIT="gtk",
+ )
+ )
+ )
+ self.assertEqual("linux", d["os"])
+ self.assertEqual("x86_64", d["processor"])
+ self.assertEqual("gtk", d["toolkit"])
+ self.assertEqual(64, d["bits"])
+
+ def test_mac(self):
+ d = build_dict(
+ self._config(
+ dict(
+ OS_TARGET="Darwin",
+ TARGET_CPU="i386",
+ MOZ_WIDGET_TOOLKIT="cocoa",
+ )
+ )
+ )
+ self.assertEqual("mac", d["os"])
+ self.assertEqual("x86", d["processor"])
+ self.assertEqual("cocoa", d["toolkit"])
+ self.assertEqual(32, d["bits"])
+
+ d = build_dict(
+ self._config(
+ dict(
+ OS_TARGET="Darwin",
+ TARGET_CPU="x86_64",
+ MOZ_WIDGET_TOOLKIT="cocoa",
+ )
+ )
+ )
+ self.assertEqual("mac", d["os"])
+ self.assertEqual("x86_64", d["processor"])
+ self.assertEqual("cocoa", d["toolkit"])
+ self.assertEqual(64, d["bits"])
+
+ def test_android(self):
+ d = build_dict(
+ self._config(
+ dict(
+ OS_TARGET="Android",
+ TARGET_CPU="arm",
+ MOZ_WIDGET_TOOLKIT="android",
+ )
+ )
+ )
+ self.assertEqual("android", d["os"])
+ self.assertEqual("arm", d["processor"])
+ self.assertEqual("android", d["toolkit"])
+ self.assertEqual(32, d["bits"])
+
+ def test_x86(self):
+ """
+ Test that various i?86 values => x86.
+ """
+ d = build_dict(
+ self._config(
+ dict(
+ OS_TARGET="WINNT",
+ TARGET_CPU="i486",
+ MOZ_WIDGET_TOOLKIT="windows",
+ )
+ )
+ )
+ self.assertEqual("x86", d["processor"])
+
+ d = build_dict(
+ self._config(
+ dict(
+ OS_TARGET="WINNT",
+ TARGET_CPU="i686",
+ MOZ_WIDGET_TOOLKIT="windows",
+ )
+ )
+ )
+ self.assertEqual("x86", d["processor"])
+
+ def test_arm(self):
+ """
+ Test that all arm CPU architectures => arm.
+ """
+ d = build_dict(
+ self._config(
+ dict(
+ OS_TARGET="Linux",
+ TARGET_CPU="arm",
+ MOZ_WIDGET_TOOLKIT="gtk",
+ )
+ )
+ )
+ self.assertEqual("arm", d["processor"])
+
+ d = build_dict(
+ self._config(
+ dict(
+ OS_TARGET="Linux",
+ TARGET_CPU="armv7",
+ MOZ_WIDGET_TOOLKIT="gtk",
+ )
+ )
+ )
+ self.assertEqual("arm", d["processor"])
+
+ def test_unknown(self):
+ """
+ Test that unknown values pass through okay.
+ """
+ d = build_dict(
+ self._config(
+ dict(
+ OS_TARGET="RandOS",
+ TARGET_CPU="cptwo",
+ MOZ_WIDGET_TOOLKIT="foobar",
+ )
+ )
+ )
+ self.assertEqual("randos", d["os"])
+ self.assertEqual("cptwo", d["processor"])
+ self.assertEqual("foobar", d["toolkit"])
+ # unknown CPUs should not get a bits value
+ self.assertFalse("bits" in d)
+
+ def test_debug(self):
+ """
+ Test that debug values are properly detected.
+ """
+ d = build_dict(
+ self._config(
+ dict(
+ OS_TARGET="Linux",
+ TARGET_CPU="i386",
+ MOZ_WIDGET_TOOLKIT="gtk",
+ )
+ )
+ )
+ self.assertEqual(False, d["debug"])
+
+ d = build_dict(
+ self._config(
+ dict(
+ OS_TARGET="Linux",
+ TARGET_CPU="i386",
+ MOZ_WIDGET_TOOLKIT="gtk",
+ MOZ_DEBUG="1",
+ )
+ )
+ )
+ self.assertEqual(True, d["debug"])
+
+ def test_crashreporter(self):
+ """
+ Test that crashreporter values are properly detected.
+ """
+ d = build_dict(
+ self._config(
+ dict(
+ OS_TARGET="Linux",
+ TARGET_CPU="i386",
+ MOZ_WIDGET_TOOLKIT="gtk",
+ )
+ )
+ )
+ self.assertEqual(False, d["crashreporter"])
+
+ d = build_dict(
+ self._config(
+ dict(
+ OS_TARGET="Linux",
+ TARGET_CPU="i386",
+ MOZ_WIDGET_TOOLKIT="gtk",
+ MOZ_CRASHREPORTER="1",
+ )
+ )
+ )
+ self.assertEqual(True, d["crashreporter"])
+
+
+class TestWriteMozinfo(unittest.TestCase, Base):
+ """
+ Test the write_mozinfo function.
+ """
+
+ def setUp(self):
+ fd, f = tempfile.mkstemp()
+ self.f = six.ensure_text(f)
+ os.close(fd)
+
+ def tearDown(self):
+ os.unlink(self.f)
+
+ def test_basic(self):
+ """
+ Test that writing to a file produces correct output.
+ """
+ c = self._config(
+ dict(
+ OS_TARGET="WINNT",
+ TARGET_CPU="i386",
+ MOZ_WIDGET_TOOLKIT="windows",
+ )
+ )
+ tempdir = tempfile.gettempdir()
+ c.topsrcdir = tempdir
+ with NamedTemporaryFile(
+ dir=os.path.normpath(c.topsrcdir), mode="wt"
+ ) as mozconfig:
+ mozconfig.write("unused contents")
+ mozconfig.flush()
+ c.mozconfig = mozconfig.name
+ write_mozinfo(self.f, c)
+ with open(self.f) as f:
+ d = json.load(f)
+ self.assertEqual("win", d["os"])
+ self.assertEqual("x86", d["processor"])
+ self.assertEqual("windows", d["toolkit"])
+ self.assertEqual(tempdir, d["topsrcdir"])
+ self.assertEqual(mozconfig.name, d["mozconfig"])
+ self.assertEqual(32, d["bits"])
+
+ def test_fileobj(self):
+ """
+ Test that writing to a file-like object produces correct output.
+ """
+ s = StringIO()
+ c = self._config(
+ dict(
+ OS_TARGET="WINNT",
+ TARGET_CPU="i386",
+ MOZ_WIDGET_TOOLKIT="windows",
+ )
+ )
+ write_mozinfo(s, c)
+ d = json.loads(s.getvalue())
+ self.assertEqual("win", d["os"])
+ self.assertEqual("x86", d["processor"])
+ self.assertEqual("windows", d["toolkit"])
+ self.assertEqual(32, d["bits"])
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozbuild/test/test_preprocessor.py b/python/mozbuild/mozbuild/test/test_preprocessor.py
new file mode 100644
index 0000000000..82039c2bd7
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/test_preprocessor.py
@@ -0,0 +1,832 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import shutil
+import unittest
+from tempfile import mkdtemp
+
+from mozunit import MockedOpen, main
+from six import StringIO
+
+from mozbuild.preprocessor import Preprocessor
+
+
+class TestPreprocessor(unittest.TestCase):
+ """
+ Unit tests for the Context class
+ """
+
+ def setUp(self):
+ self.pp = Preprocessor()
+ self.pp.out = StringIO()
+
+ def do_include_compare(self, content_lines, expected_lines):
+ content = "%s" % "\n".join(content_lines)
+ expected = "%s".rstrip() % "\n".join(expected_lines)
+
+ with MockedOpen({"dummy": content}):
+ self.pp.do_include("dummy")
+ self.assertEqual(self.pp.out.getvalue().rstrip("\n"), expected)
+
+ def do_include_pass(self, content_lines):
+ self.do_include_compare(content_lines, ["PASS"])
+
+ def test_conditional_if_0(self):
+ self.do_include_pass(
+ [
+ "#if 0",
+ "FAIL",
+ "#else",
+ "PASS",
+ "#endif",
+ ]
+ )
+
+ def test_no_marker(self):
+ lines = [
+ "#if 0",
+ "PASS",
+ "#endif",
+ ]
+ self.pp.setMarker(None)
+ self.do_include_compare(lines, lines)
+
+ def test_string_value(self):
+ self.do_include_compare(
+ [
+ "#define FOO STRING",
+ "#if FOO",
+ "string value is true",
+ "#else",
+ "string value is false",
+ "#endif",
+ ],
+ ["string value is false"],
+ )
+
+ def test_number_value(self):
+ self.do_include_compare(
+ [
+ "#define FOO 1",
+ "#if FOO",
+ "number value is true",
+ "#else",
+ "number value is false",
+ "#endif",
+ ],
+ ["number value is true"],
+ )
+
+ def test_conditional_if_0_elif_1(self):
+ self.do_include_pass(
+ [
+ "#if 0",
+ "#elif 1",
+ "PASS",
+ "#else",
+ "FAIL",
+ "#endif",
+ ]
+ )
+
+ def test_conditional_if_1(self):
+ self.do_include_pass(
+ [
+ "#if 1",
+ "PASS",
+ "#else",
+ "FAIL",
+ "#endif",
+ ]
+ )
+
+ def test_conditional_if_0_or_1(self):
+ self.do_include_pass(
+ [
+ "#if 0 || 1",
+ "PASS",
+ "#else",
+ "FAIL",
+ "#endif",
+ ]
+ )
+
+ def test_conditional_if_1_elif_1_else(self):
+ self.do_include_pass(
+ [
+ "#if 1",
+ "PASS",
+ "#elif 1",
+ "FAIL",
+ "#else",
+ "FAIL",
+ "#endif",
+ ]
+ )
+
+ def test_conditional_if_1_if_1(self):
+ self.do_include_pass(
+ [
+ "#if 1",
+ "#if 1",
+ "PASS",
+ "#else",
+ "FAIL",
+ "#endif",
+ "#else",
+ "FAIL",
+ "#endif",
+ ]
+ )
+
+ def test_conditional_not_0(self):
+ self.do_include_pass(
+ [
+ "#if !0",
+ "PASS",
+ "#else",
+ "FAIL",
+ "#endif",
+ ]
+ )
+
+ def test_conditional_not_0_and_1(self):
+ self.do_include_pass(
+ [
+ "#if !0 && !1",
+ "FAIL",
+ "#else",
+ "PASS",
+ "#endif",
+ ]
+ )
+
+ def test_conditional_not_1(self):
+ self.do_include_pass(
+ [
+ "#if !1",
+ "FAIL",
+ "#else",
+ "PASS",
+ "#endif",
+ ]
+ )
+
+ def test_conditional_not_emptyval(self):
+ self.do_include_compare(
+ [
+ "#define EMPTYVAL",
+ "#ifndef EMPTYVAL",
+ "FAIL",
+ "#else",
+ "PASS",
+ "#endif",
+ "#ifdef EMPTYVAL",
+ "PASS",
+ "#else",
+ "FAIL",
+ "#endif",
+ ],
+ ["PASS", "PASS"],
+ )
+
+ def test_conditional_not_nullval(self):
+ self.do_include_pass(
+ [
+ "#define NULLVAL 0",
+ "#if !NULLVAL",
+ "PASS",
+ "#else",
+ "FAIL",
+ "#endif",
+ ]
+ )
+
+ def test_indentation(self):
+ self.do_include_pass(
+ [
+ " #define NULLVAL 0",
+ " #if !NULLVAL",
+ "PASS",
+ " #else",
+ "FAIL",
+ " #endif",
+ ]
+ )
+
+ def test_expand(self):
+ self.do_include_pass(
+ [
+ "#define ASVAR AS",
+ "#expand P__ASVAR__S",
+ ]
+ )
+
+ def test_undef_defined(self):
+ self.do_include_compare(
+ [
+ "#define BAR",
+ "#undef BAR",
+ "BAR",
+ ],
+ ["BAR"],
+ )
+
+ def test_undef_undefined(self):
+ self.do_include_compare(
+ [
+ "#undef BAR",
+ ],
+ [],
+ )
+
+ def test_filter_attemptSubstitution(self):
+ self.do_include_compare(
+ [
+ "#filter attemptSubstitution",
+ "@PASS@",
+ "#unfilter attemptSubstitution",
+ ],
+ ["@PASS@"],
+ )
+
+ def test_filter_emptyLines(self):
+ self.do_include_compare(
+ [
+ "lines with a",
+ "",
+ "blank line",
+ "#filter emptyLines",
+ "lines with",
+ "",
+ "no blank lines",
+ "#unfilter emptyLines",
+ "yet more lines with",
+ "",
+ "blank lines",
+ ],
+ [
+ "lines with a",
+ "",
+ "blank line",
+ "lines with",
+ "no blank lines",
+ "yet more lines with",
+ "",
+ "blank lines",
+ ],
+ )
+
+ def test_filter_dumbComments(self):
+ self.do_include_compare(
+ [
+ "#filter dumbComments",
+ "PASS//PASS // PASS",
+ " //FAIL",
+ "// FAIL",
+ "PASS //",
+ "PASS // FAIL",
+ "//",
+ "",
+ "#unfilter dumbComments",
+ "// PASS",
+ ],
+ [
+ "PASS//PASS // PASS",
+ "",
+ "",
+ "PASS //",
+ "PASS // FAIL",
+ "",
+ "",
+ "// PASS",
+ ],
+ )
+
+ def test_filter_dumbComments_and_emptyLines(self):
+ self.do_include_compare(
+ [
+ "#filter dumbComments emptyLines",
+ "PASS//PASS // PASS",
+ " //FAIL",
+ "// FAIL",
+ "PASS //",
+ "PASS // FAIL",
+ "//",
+ "",
+ "#unfilter dumbComments emptyLines",
+ "",
+ "// PASS",
+ ],
+ [
+ "PASS//PASS // PASS",
+ "PASS //",
+ "PASS // FAIL",
+ "",
+ "// PASS",
+ ],
+ )
+
+ def test_filter_substitution(self):
+ self.do_include_pass(
+ [
+ "#define VAR ASS",
+ "#filter substitution",
+ "P@VAR@",
+ "#unfilter substitution",
+ ]
+ )
+
+ def test_error(self):
+ with MockedOpen({"f": "#error spit this message out\n"}):
+ with self.assertRaises(Preprocessor.Error) as e:
+ self.pp.do_include("f")
+ self.assertEqual(e.args[0][-1], "spit this message out")
+
+ def test_ambigous_command(self):
+ comment = "# if I tell you a joke\n"
+ with MockedOpen({"f": comment}):
+ with self.assertRaises(Preprocessor.Error) as e:
+ self.pp.do_include("f")
+ the_exception = e.exception
+ self.assertEqual(the_exception.args[0][-1], comment)
+
+ def test_javascript_line(self):
+ # The preprocessor is reading the filename from somewhere not caught
+ # by MockedOpen.
+ tmpdir = mkdtemp()
+ try:
+ full = os.path.join(tmpdir, "javascript_line.js.in")
+ with open(full, "w") as fh:
+ fh.write(
+ "\n".join(
+ [
+ "// Line 1",
+ "#if 0",
+ "// line 3",
+ "#endif",
+ "// line 5",
+ "# comment",
+ "// line 7",
+ "// line 8",
+ "// line 9",
+ "# another comment",
+ "// line 11",
+ "#define LINE 1",
+ "// line 13, given line number overwritten with 2",
+ "",
+ ]
+ )
+ )
+
+ self.pp.do_include(full)
+ out = "\n".join(
+ [
+ "// Line 1",
+ '//@line 5 "CWDjavascript_line.js.in"',
+ "// line 5",
+ '//@line 7 "CWDjavascript_line.js.in"',
+ "// line 7",
+ "// line 8",
+ "// line 9",
+ '//@line 11 "CWDjavascript_line.js.in"',
+ "// line 11",
+ '//@line 2 "CWDjavascript_line.js.in"',
+ "// line 13, given line number overwritten with 2",
+ "",
+ ]
+ )
+ out = out.replace("CWD", tmpdir + os.path.sep)
+ self.assertEqual(self.pp.out.getvalue(), out)
+ finally:
+ shutil.rmtree(tmpdir)
+
+ def test_literal(self):
+ self.do_include_pass(
+ [
+ "#literal PASS",
+ ]
+ )
+
+ def test_var_directory(self):
+ self.do_include_pass(
+ [
+ "#ifdef DIRECTORY",
+ "PASS",
+ "#else",
+ "FAIL",
+ "#endif",
+ ]
+ )
+
+ def test_var_file(self):
+ self.do_include_pass(
+ [
+ "#ifdef FILE",
+ "PASS",
+ "#else",
+ "FAIL",
+ "#endif",
+ ]
+ )
+
+ def test_var_if_0(self):
+ self.do_include_pass(
+ [
+ "#define VAR 0",
+ "#if VAR",
+ "FAIL",
+ "#else",
+ "PASS",
+ "#endif",
+ ]
+ )
+
+ def test_var_if_0_elifdef(self):
+ self.do_include_pass(
+ [
+ "#if 0",
+ "#elifdef FILE",
+ "PASS",
+ "#else",
+ "FAIL",
+ "#endif",
+ ]
+ )
+
+ def test_var_if_0_elifndef(self):
+ self.do_include_pass(
+ [
+ "#if 0",
+ "#elifndef VAR",
+ "PASS",
+ "#else",
+ "FAIL",
+ "#endif",
+ ]
+ )
+
+ def test_var_ifdef_0(self):
+ self.do_include_pass(
+ [
+ "#define VAR 0",
+ "#ifdef VAR",
+ "PASS",
+ "#else",
+ "FAIL",
+ "#endif",
+ ]
+ )
+
+ def test_var_ifdef_1_or_undef(self):
+ self.do_include_pass(
+ [
+ "#define FOO 1",
+ "#if defined(FOO) || defined(BAR)",
+ "PASS",
+ "#else",
+ "FAIL",
+ "#endif",
+ ]
+ )
+
+ def test_var_ifdef_undef(self):
+ self.do_include_pass(
+ [
+ "#define VAR 0",
+ "#undef VAR",
+ "#ifdef VAR",
+ "FAIL",
+ "#else",
+ "PASS",
+ "#endif",
+ ]
+ )
+
+ def test_var_ifndef_0(self):
+ self.do_include_pass(
+ [
+ "#define VAR 0",
+ "#ifndef VAR",
+ "FAIL",
+ "#else",
+ "PASS",
+ "#endif",
+ ]
+ )
+
+ def test_var_ifndef_0_and_undef(self):
+ self.do_include_pass(
+ [
+ "#define FOO 0",
+ "#if !defined(FOO) && !defined(BAR)",
+ "FAIL",
+ "#else",
+ "PASS",
+ "#endif",
+ ]
+ )
+
+ def test_var_ifndef_undef(self):
+ self.do_include_pass(
+ [
+ "#define VAR 0",
+ "#undef VAR",
+ "#ifndef VAR",
+ "PASS",
+ "#else",
+ "FAIL",
+ "#endif",
+ ]
+ )
+
+ def test_var_line(self):
+ self.do_include_pass(
+ [
+ "#ifdef LINE",
+ "PASS",
+ "#else",
+ "FAIL",
+ "#endif",
+ ]
+ )
+
+ def test_filterDefine(self):
+ self.do_include_pass(
+ [
+ "#filter substitution",
+ "#define VAR AS",
+ "#define VAR2 P@VAR@",
+ "@VAR2@S",
+ ]
+ )
+
+ def test_number_value_equals(self):
+ self.do_include_pass(
+ [
+ "#define FOO 1000",
+ "#if FOO == 1000",
+ "PASS",
+ "#else",
+ "FAIL",
+ "#endif",
+ ]
+ )
+
+ def test_default_defines(self):
+ self.pp.handleCommandLine(["-DFOO"])
+ self.do_include_pass(
+ [
+ "#if FOO == 1",
+ "PASS",
+ "#else",
+ "FAIL",
+ ]
+ )
+
+ def test_number_value_equals_defines(self):
+ self.pp.handleCommandLine(["-DFOO=1000"])
+ self.do_include_pass(
+ [
+ "#if FOO == 1000",
+ "PASS",
+ "#else",
+ "FAIL",
+ ]
+ )
+
+ def test_octal_value_equals(self):
+ self.do_include_pass(
+ [
+ "#define FOO 0100",
+ "#if FOO == 0100",
+ "PASS",
+ "#else",
+ "FAIL",
+ "#endif",
+ ]
+ )
+
+ def test_octal_value_equals_defines(self):
+ self.pp.handleCommandLine(["-DFOO=0100"])
+ self.do_include_pass(
+ [
+ "#if FOO == 0100",
+ "PASS",
+ "#else",
+ "FAIL",
+ "#endif",
+ ]
+ )
+
+ def test_value_quoted_expansion(self):
+ """
+ Quoted values on the commandline don't currently have quotes stripped.
+ Pike says this is for compat reasons.
+ """
+ self.pp.handleCommandLine(['-DFOO="ABCD"'])
+ self.do_include_compare(
+ [
+ "#filter substitution",
+ "@FOO@",
+ ],
+ ['"ABCD"'],
+ )
+
+ def test_octal_value_quoted_expansion(self):
+ self.pp.handleCommandLine(['-DFOO="0100"'])
+ self.do_include_compare(
+ [
+ "#filter substitution",
+ "@FOO@",
+ ],
+ ['"0100"'],
+ )
+
+ def test_number_value_not_equals_quoted_defines(self):
+ self.pp.handleCommandLine(['-DFOO="1000"'])
+ self.do_include_pass(
+ [
+ "#if FOO == 1000",
+ "FAIL",
+ "#else",
+ "PASS",
+ "#endif",
+ ]
+ )
+
+ def test_octal_value_not_equals_quoted_defines(self):
+ self.pp.handleCommandLine(['-DFOO="0100"'])
+ self.do_include_pass(
+ [
+ "#if FOO == 0100",
+ "FAIL",
+ "#else",
+ "PASS",
+ "#endif",
+ ]
+ )
+
+ def test_undefined_variable(self):
+ with MockedOpen({"f": "#filter substitution\n@foo@"}):
+ with self.assertRaises(Preprocessor.Error) as e:
+ self.pp.do_include("f")
+ self.assertEqual(e.key, "UNDEFINED_VAR")
+
+ def test_include(self):
+ files = {
+ "foo/test": "\n".join(
+ [
+ "#define foo foobarbaz",
+ "#include @inc@",
+ "@bar@",
+ "",
+ ]
+ ),
+ "bar": "\n".join(
+ [
+ "#define bar barfoobaz",
+ "@foo@",
+ "",
+ ]
+ ),
+ "f": "\n".join(
+ [
+ "#filter substitution",
+ "#define inc ../bar",
+ "#include foo/test",
+ "",
+ ]
+ ),
+ }
+
+ with MockedOpen(files):
+ self.pp.do_include("f")
+ self.assertEqual(self.pp.out.getvalue(), "foobarbaz\nbarfoobaz\n")
+
+ def test_include_line(self):
+ files = {
+ "srcdir/test.js": "\n".join(
+ [
+ "#define foo foobarbaz",
+ "#include @inc@",
+ "@bar@",
+ "",
+ ]
+ ),
+ "srcdir/bar.js": "\n".join(
+ [
+ "#define bar barfoobaz",
+ "@foo@",
+ "",
+ ]
+ ),
+ "srcdir/foo.js": "\n".join(
+ [
+ "bazfoobar",
+ "#include bar.js",
+ "bazbarfoo",
+ "",
+ ]
+ ),
+ "objdir/baz.js": "baz\n",
+ "srcdir/f.js": "\n".join(
+ [
+ "#include foo.js",
+ "#filter substitution",
+ "#define inc bar.js",
+ "#include test.js",
+ "#include ../objdir/baz.js",
+ "fin",
+ "",
+ ]
+ ),
+ }
+
+ preprocessed = (
+ '//@line 1 "$SRCDIR/foo.js"\n'
+ "bazfoobar\n"
+ '//@line 2 "$SRCDIR/bar.js"\n'
+ "@foo@\n"
+ '//@line 3 "$SRCDIR/foo.js"\n'
+ "bazbarfoo\n"
+ '//@line 2 "$SRCDIR/bar.js"\n'
+ "foobarbaz\n"
+ '//@line 3 "$SRCDIR/test.js"\n'
+ "barfoobaz\n"
+ '//@line 1 "$OBJDIR/baz.js"\n'
+ "baz\n"
+ '//@line 6 "$SRCDIR/f.js"\n'
+ "fin\n"
+ )
+
+ # Try with separate srcdir/objdir
+ with MockedOpen(files):
+ self.pp.topsrcdir = os.path.abspath("srcdir")
+ self.pp.topobjdir = os.path.abspath("objdir")
+ self.pp.do_include("srcdir/f.js")
+ self.assertEqual(self.pp.out.getvalue(), preprocessed)
+
+ # Try again with relative objdir
+ self.setUp()
+ files["srcdir/objdir/baz.js"] = files["objdir/baz.js"]
+ del files["objdir/baz.js"]
+ files["srcdir/f.js"] = files["srcdir/f.js"].replace("../", "")
+ with MockedOpen(files):
+ self.pp.topsrcdir = os.path.abspath("srcdir")
+ self.pp.topobjdir = os.path.abspath("srcdir/objdir")
+ self.pp.do_include("srcdir/f.js")
+ self.assertEqual(self.pp.out.getvalue(), preprocessed)
+
+ def test_include_missing_file(self):
+ with MockedOpen({"f": "#include foo\n"}):
+ with self.assertRaises(Preprocessor.Error) as e:
+ self.pp.do_include("f")
+ self.assertEqual(e.exception.key, "FILE_NOT_FOUND")
+
+ def test_include_undefined_variable(self):
+ with MockedOpen({"f": "#filter substitution\n#include @foo@\n"}):
+ with self.assertRaises(Preprocessor.Error) as e:
+ self.pp.do_include("f")
+ self.assertEqual(e.exception.key, "UNDEFINED_VAR")
+
+ def test_include_literal_at(self):
+ files = {
+ "@foo@": "#define foo foobarbaz\n",
+ "f": "#include @foo@\n#filter substitution\n@foo@\n",
+ }
+
+ with MockedOpen(files):
+ self.pp.do_include("f")
+ self.assertEqual(self.pp.out.getvalue(), "foobarbaz\n")
+
+ def test_command_line_literal_at(self):
+ with MockedOpen({"@foo@.in": "@foo@\n"}):
+ self.pp.handleCommandLine(["-Fsubstitution", "-Dfoo=foobarbaz", "@foo@.in"])
+ self.assertEqual(self.pp.out.getvalue(), "foobarbaz\n")
+
+ def test_invalid_ifdef(self):
+ with MockedOpen({"dummy": "#ifdef FOO == BAR\nPASS\n#endif"}):
+ with self.assertRaises(Preprocessor.Error) as e:
+ self.pp.do_include("dummy")
+ self.assertEqual(e.exception.key, "INVALID_VAR")
+
+ with MockedOpen({"dummy": "#ifndef FOO == BAR\nPASS\n#endif"}):
+ with self.assertRaises(Preprocessor.Error) as e:
+ self.pp.do_include("dummy")
+ self.assertEqual(e.exception.key, "INVALID_VAR")
+
+ # Trailing whitespaces, while not nice, shouldn't be an error.
+ self.do_include_pass(
+ [
+ "#ifndef FOO ",
+ "PASS",
+ "#endif",
+ ]
+ )
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/test/test_pythonutil.py b/python/mozbuild/mozbuild/test/test_pythonutil.py
new file mode 100644
index 0000000000..6ebb5cc46e
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/test_pythonutil.py
@@ -0,0 +1,24 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+
+from mozunit import main
+
+from mozbuild.pythonutil import iter_modules_in_path
+
+
+def test_iter_modules_in_path():
+ tests_path = os.path.normcase(os.path.dirname(__file__))
+ paths = list(iter_modules_in_path(tests_path))
+ assert set(paths) == set(
+ [
+ os.path.join(os.path.abspath(tests_path), "__init__.py"),
+ os.path.join(os.path.abspath(tests_path), "test_pythonutil.py"),
+ ]
+ )
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/test/test_rewrite_mozbuild.py b/python/mozbuild/mozbuild/test/test_rewrite_mozbuild.py
new file mode 100644
index 0000000000..467295c9e9
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/test_rewrite_mozbuild.py
@@ -0,0 +1,515 @@
+# coding: utf-8
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import tempfile
+import unittest
+
+from mozunit import main
+
+import mozbuild.vendor.rewrite_mozbuild as mu
+
+SAMPLE_PIXMAN_MOZBUILD = """
+if CONFIG['OS_ARCH'] != 'Darwin' and CONFIG['CC_TYPE'] in ('clang', 'gcc'):
+ if CONFIG['HAVE_ARM_NEON']:
+ SOURCES += [
+ "pixman-arm-neon-asm-bilinear.S",
+ "pixman-arm-neon-asm.S",
+ ]
+ if CONFIG['HAVE_ARM_SIMD']:
+ SOURCES += [
+ 'pixman-arm-simd-asm-scaled.S',
+ 'pixman-arm-simd-asm.S']
+
+SOURCES += ['pixman-region32.c',
+ 'pixman-solid-fill.c',
+ 'pixman-trap.c',
+ 'pixman-utils.c',
+ 'pixman-x86.c',
+ 'pixman.c',
+]
+
+if use_sse2:
+ DEFINES['USE_SSE'] = True
+ DEFINES['USE_SSE2'] = True
+ SOURCES += ['pixman-sse2.c']
+ SOURCES['pixman-sse2.c'].flags += CONFIG['SSE_FLAGS'] + CONFIG['SSE2_FLAGS']
+ if CONFIG['CC_TYPE'] in ('clang', 'gcc'):
+ SOURCES['pixman-sse2.c'].flags += ['-Winline']
+"""
+
+SAMPLE_DAV1D_MOZBUILD = """
+SOURCES += [
+ '../../third_party/dav1d/src/cdf.c',
+ '../../third_party/dav1d/src/cpu.c',
+ ]
+EXPORTS = [
+ '../../third_party/dav1d/src/header1.h',
+ '../../third_party/dav1d/src/header2.h',
+ ]
+"""
+
+
+SAMPLE_JPEGXL_MOZBUILD = """
+SOURCES += [
+ "/third_party/jpeg-xl/lib/jxl/ac_strategy.cc",
+ "/third_party/jpeg-xl/lib/jxl/alpha.cc",
+ "/third_party/jpeg-xl/lib/jxl/ans_common.cc",
+ "/third_party/jpeg-xl/lib/jxl/aux_out.cc",
+ ]
+EXPORTS.bob.carol = [
+ "/third_party/jpeg-xl/lib/jxl/header1.hpp",
+ "/third_party/jpeg-xl/lib/jxl/header2.h",
+]
+"""
+
+
+def _make_mozbuild_directory_structure(mozbuild_path, contents):
+ d = tempfile.TemporaryDirectory()
+ os.makedirs(os.path.join(d.name, os.path.split(mozbuild_path)[0]))
+
+ arcconfig = open(os.path.join(d.name, ".arcconfig"), mode="w")
+ arcconfig.close()
+
+ mozbuild = open(os.path.join(d.name, mozbuild_path), mode="w")
+ mozbuild.write(contents)
+ mozbuild.close()
+
+ return d
+
+
+class TestUtils(unittest.TestCase):
+ def test_normalize_filename(self):
+ self.assertEqual(mu.normalize_filename("foo/bar/moz.build", "/"), "/")
+ self.assertEqual(
+ mu.normalize_filename("foo/bar/moz.build", "a.c"), "foo/bar/a.c"
+ )
+ self.assertEqual(
+ mu.normalize_filename("foo/bar/moz.build", "baz/a.c"), "foo/bar/baz/a.c"
+ )
+ self.assertEqual(mu.normalize_filename("foo/bar/moz.build", "/a.c"), "/a.c")
+
+ def test_unnormalize_filename(self):
+ test_vectors = [
+ ("foo/bar/moz.build", "/"),
+ ("foo/bar/moz.build", "a.c"),
+ ("foo/bar/moz.build", "baz/a.c"),
+ ("foo/bar/moz.build", "/a.c"),
+ ]
+
+ for vector in test_vectors:
+ mozbuild, file = vector
+ self.assertEqual(
+ mu.unnormalize_filename(
+ mozbuild, mu.normalize_filename(mozbuild, file)
+ ),
+ file,
+ )
+
+ def test_find_all_posible_assignments_from_filename(self):
+ test_vectors = [
+ # (
+ # target_filename_normalized
+ # source_assignments
+ # expected
+ # )
+ (
+ "root/dir/asm/blah.S",
+ {
+ "> SOURCES": ["root/dir/main.c"],
+ "> if conditional > SOURCES": ["root/dir/asm/blah.S"],
+ },
+ {"> if conditional > SOURCES": ["root/dir/asm/blah.S"]},
+ ),
+ (
+ "root/dir/dostuff.c",
+ {
+ "> SOURCES": ["root/dir/main.c"],
+ "> if conditional > SOURCES": ["root/dir/asm/blah.S"],
+ },
+ {"> SOURCES": ["root/dir/main.c"]},
+ ),
+ ]
+
+ for vector in test_vectors:
+ target_filename_normalized, source_assignments, expected = vector
+ actual = mu.find_all_posible_assignments_from_filename(
+ source_assignments, target_filename_normalized
+ )
+ self.assertEqual(actual, expected)
+
+ def test_filenames_directory_is_in_filename_list(self):
+ test_vectors = [
+ # (
+ # normalized filename
+ # list of normalized_filenames
+ # expected
+ # )
+ ("foo/bar/a.c", ["foo/b.c"], False),
+ ("foo/bar/a.c", ["foo/b.c", "foo/bar/c.c"], True),
+ ("foo/bar/a.c", ["foo/b.c", "foo/bar/baz/d.c"], False),
+ ]
+ for vector in test_vectors:
+ normalized_filename, list_of_normalized_filesnames, expected = vector
+ actual = mu.filenames_directory_is_in_filename_list(
+ normalized_filename, list_of_normalized_filesnames
+ )
+ self.assertEqual(actual, expected)
+
+ def test_guess_best_assignment(self):
+ test_vectors = [
+ # (
+ # filename_normalized
+ # source_assignments
+ # expected
+ # )
+ (
+ "foo/asm_arm.c",
+ {
+ "> SOURCES": ["foo/main.c", "foo/all_utility.c"],
+ "> if ASM > SOURCES": ["foo/asm_x86.c"],
+ },
+ "> if ASM > SOURCES",
+ )
+ ]
+ for vector in test_vectors:
+ normalized_filename, source_assignments, expected = vector
+ actual, _ = mu.guess_best_assignment(
+ source_assignments, normalized_filename
+ )
+ self.assertEqual(actual, expected)
+
+ def test_mozbuild_removing(self):
+ test_vectors = [
+ (
+ "media/dav1d/moz.build",
+ SAMPLE_DAV1D_MOZBUILD,
+ "third_party/dav1d/src/cdf.c",
+ "media/dav1d/",
+ "third-party/dav1d/",
+ " '../../third_party/dav1d/src/cdf.c',\n",
+ ),
+ (
+ "media/dav1d/moz.build",
+ SAMPLE_DAV1D_MOZBUILD,
+ "third_party/dav1d/src/header1.h",
+ "media/dav1d/",
+ "third-party/dav1d/",
+ " '../../third_party/dav1d/src/header1.h',\n",
+ ),
+ (
+ "media/jxl/moz.build",
+ SAMPLE_JPEGXL_MOZBUILD,
+ "third_party/jpeg-xl/lib/jxl/alpha.cc",
+ "media/jxl/",
+ "third-party/jpeg-xl/",
+ ' "/third_party/jpeg-xl/lib/jxl/alpha.cc",\n',
+ ),
+ (
+ "media/jxl/moz.build",
+ SAMPLE_JPEGXL_MOZBUILD,
+ "third_party/jpeg-xl/lib/jxl/header1.hpp",
+ "media/jxl/",
+ "third-party/jpeg-xl/",
+ ' "/third_party/jpeg-xl/lib/jxl/header1.hpp",\n',
+ ),
+ ]
+
+ for vector in test_vectors:
+ (
+ mozbuild_path,
+ mozbuild_contents,
+ file_to_remove,
+ moz_yaml_dir,
+ vendoring_dir,
+ replace_str,
+ ) = vector
+
+ startdir = os.getcwd()
+ try:
+ mozbuild_dir = _make_mozbuild_directory_structure(
+ mozbuild_path, mozbuild_contents
+ )
+ os.chdir(mozbuild_dir.name)
+
+ mu.remove_file_from_moz_build_file(
+ file_to_remove,
+ moz_yaml_dir=moz_yaml_dir,
+ vendoring_dir=vendoring_dir,
+ )
+
+ with open(os.path.join(mozbuild_dir.name, mozbuild_path)) as file:
+ contents = file.read()
+
+ expected_output = mozbuild_contents.replace(replace_str, "")
+ if contents != expected_output:
+ print("File to remove:", file_to_remove)
+ print("Contents:")
+ print("-------------------")
+ print(contents)
+ print("-------------------")
+ print("Expected:")
+ print("-------------------")
+ print(expected_output)
+ print("-------------------")
+ self.assertEqual(contents, expected_output)
+ finally:
+ os.chdir(startdir)
+
+ def test_mozbuild_adding(self):
+ test_vectors = [
+ (
+ "media/dav1d/moz.build",
+ SAMPLE_DAV1D_MOZBUILD,
+ "third_party/dav1d/src/cdf2.c",
+ "media/dav1d/",
+ "third-party/dav1d/",
+ "cdf.c',\n",
+ "cdf.c',\n '../../third_party/dav1d/src/cdf2.c',\n",
+ ),
+ (
+ "media/dav1d/moz.build",
+ SAMPLE_DAV1D_MOZBUILD,
+ "third_party/dav1d/src/header3.h",
+ "media/dav1d/",
+ "third-party/dav1d/",
+ "header2.h',\n",
+ "header2.h',\n '../../third_party/dav1d/src/header3.h',\n",
+ ),
+ (
+ "media/jxl/moz.build",
+ SAMPLE_JPEGXL_MOZBUILD,
+ "third_party/jpeg-xl/lib/jxl/alpha2.cc",
+ "media/jxl/",
+ "third-party/jpeg-xl/",
+ 'alpha.cc",\n',
+ 'alpha.cc",\n "/third_party/jpeg-xl/lib/jxl/alpha2.cc",\n',
+ ),
+ (
+ "media/jxl/moz.build",
+ SAMPLE_JPEGXL_MOZBUILD,
+ "third_party/jpeg-xl/lib/jxl/header3.hpp",
+ "media/jxl/",
+ "third-party/jpeg-xl/",
+ 'header2.h",\n',
+ 'header2.h",\n "/third_party/jpeg-xl/lib/jxl/header3.hpp",\n',
+ ),
+ ]
+
+ for vector in test_vectors:
+ (
+ mozbuild_path,
+ mozbuild_contents,
+ file_to_add,
+ moz_yaml_dir,
+ vendoring_dir,
+ search_str,
+ replace_str,
+ ) = vector
+
+ startdir = os.getcwd()
+ try:
+ mozbuild_dir = _make_mozbuild_directory_structure(
+ mozbuild_path, mozbuild_contents
+ )
+ os.chdir(mozbuild_dir.name)
+
+ mu.add_file_to_moz_build_file(
+ file_to_add, moz_yaml_dir=moz_yaml_dir, vendoring_dir=vendoring_dir
+ )
+
+ with open(os.path.join(mozbuild_dir.name, mozbuild_path)) as file:
+ contents = file.read()
+
+ expected_output = mozbuild_contents.replace(search_str, replace_str)
+ if contents != expected_output:
+ print("File to add:", file_to_add)
+ print("Contents:")
+ print("-------------------")
+ print(contents)
+ print("-------------------")
+ print("Expected:")
+ print("-------------------")
+ print(expected_output)
+ print("-------------------")
+ self.assertEqual(contents, expected_output)
+ finally:
+ os.chdir(startdir)
+
+ # This test is legacy. I'm keeping it around, but new test vectors should be added to the
+ # non-internal test to exercise the public API.
+ def test_mozbuild_adding_internal(self):
+ test_vectors = [
+ # (
+ # mozbuild_contents
+ # unnormalized_filename_to_add,
+ # unnormalized_list_of_files
+ # expected_output
+ # )
+ (
+ SAMPLE_PIXMAN_MOZBUILD,
+ "pixman-sse2-more.c",
+ ["pixman-sse2.c"],
+ SAMPLE_PIXMAN_MOZBUILD.replace(
+ "SOURCES += ['pixman-sse2.c']",
+ "SOURCES += ['pixman-sse2-more.c','pixman-sse2.c']",
+ ),
+ ),
+ (
+ SAMPLE_PIXMAN_MOZBUILD,
+ "pixman-trap-more.c",
+ [
+ "pixman-region32.c",
+ "pixman-solid-fill.c",
+ "pixman-trap.c",
+ "pixman-utils.c",
+ "pixman-x86.c",
+ "pixman.c",
+ ],
+ SAMPLE_PIXMAN_MOZBUILD.replace(
+ "'pixman-trap.c',", "'pixman-trap-more.c',\n 'pixman-trap.c',"
+ ),
+ ),
+ (
+ SAMPLE_PIXMAN_MOZBUILD,
+ "pixman-arm-neon-asm-more.S",
+ ["pixman-arm-neon-asm-bilinear.S", "pixman-arm-neon-asm.S"],
+ SAMPLE_PIXMAN_MOZBUILD.replace(
+ '"pixman-arm-neon-asm.S"',
+ '"pixman-arm-neon-asm-more.S",\n "pixman-arm-neon-asm.S"',
+ ),
+ ),
+ (
+ SAMPLE_PIXMAN_MOZBUILD,
+ "pixman-arm-simd-asm-smore.S",
+ ["pixman-arm-simd-asm-scaled.S", "pixman-arm-simd-asm.S"],
+ SAMPLE_PIXMAN_MOZBUILD.replace(
+ "'pixman-arm-simd-asm.S'",
+ "'pixman-arm-simd-asm-smore.S',\n 'pixman-arm-simd-asm.S'",
+ ),
+ ),
+ (
+ SAMPLE_PIXMAN_MOZBUILD,
+ "pixman-arm-simd-asn.S",
+ ["pixman-arm-simd-asm-scaled.S", "pixman-arm-simd-asm.S"],
+ SAMPLE_PIXMAN_MOZBUILD.replace(
+ "'pixman-arm-simd-asm.S'",
+ "'pixman-arm-simd-asm.S',\n 'pixman-arm-simd-asn.S'",
+ ),
+ ),
+ ]
+
+ for vector in test_vectors:
+ (
+ mozbuild_contents,
+ unnormalized_filename_to_add,
+ unnormalized_list_of_files,
+ expected_output,
+ ) = vector
+
+ fd, filename = tempfile.mkstemp(text=True)
+ os.close(fd)
+ file = open(filename, mode="w")
+ file.write(mozbuild_contents)
+ file.close()
+
+ mu.edit_moz_build_file_to_add_file(
+ filename, unnormalized_filename_to_add, unnormalized_list_of_files
+ )
+
+ with open(filename) as file:
+ contents = file.read()
+ os.remove(filename)
+
+ if contents != expected_output:
+ print("File to add:", unnormalized_filename_to_add)
+ print("Contents:")
+ print("-------------------")
+ print(contents)
+ print("-------------------")
+ print("Expected:")
+ print("-------------------")
+ print(expected_output)
+ print("-------------------")
+ self.assertEqual(contents, expected_output)
+
+ # This test is legacy. I'm keeping it around, but new test vectors should be added to the
+ # non-internal test to exercise the public API.
+ def test_mozbuild_removing_internal(self):
+ test_vectors = [
+ # (
+ # mozbuild_contents
+ # unnormalized_filename_to_add
+ # expected_output
+ # )
+ (
+ SAMPLE_PIXMAN_MOZBUILD,
+ "pixman-sse2.c",
+ SAMPLE_PIXMAN_MOZBUILD.replace(
+ "SOURCES += ['pixman-sse2.c']", "SOURCES += []"
+ ),
+ ),
+ (
+ SAMPLE_PIXMAN_MOZBUILD,
+ "pixman-trap.c",
+ SAMPLE_PIXMAN_MOZBUILD.replace(" 'pixman-trap.c',\n", ""),
+ ),
+ (
+ SAMPLE_PIXMAN_MOZBUILD,
+ "pixman-arm-neon-asm.S",
+ SAMPLE_PIXMAN_MOZBUILD.replace(
+ ' "pixman-arm-neon-asm.S",\n', ""
+ ),
+ ),
+ (
+ SAMPLE_PIXMAN_MOZBUILD,
+ "pixman-arm-simd-asm.S",
+ SAMPLE_PIXMAN_MOZBUILD.replace(
+ " 'pixman-arm-simd-asm.S'", " "
+ ),
+ ),
+ (
+ SAMPLE_PIXMAN_MOZBUILD,
+ "pixman-region32.c",
+ SAMPLE_PIXMAN_MOZBUILD.replace("'pixman-region32.c',", ""),
+ ),
+ ]
+
+ for vector in test_vectors:
+ (
+ mozbuild_contents,
+ unnormalized_filename_to_remove,
+ expected_output,
+ ) = vector
+
+ fd, filename = tempfile.mkstemp(text=True)
+ os.close(fd)
+ file = open(filename, mode="w")
+ file.write(mozbuild_contents)
+ file.close()
+
+ mu.edit_moz_build_file_to_remove_file(
+ filename, unnormalized_filename_to_remove
+ )
+
+ with open(filename) as file:
+ contents = file.read()
+ os.remove(filename)
+
+ if contents != expected_output:
+ print("File to remove:", unnormalized_filename_to_remove)
+ print("Contents:")
+ print("-------------------")
+ print(contents)
+ print("-------------------")
+ print("Expected:")
+ print("-------------------")
+ print(expected_output)
+ print("-------------------")
+ self.assertEqual(contents, expected_output)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/test/test_telemetry.py b/python/mozbuild/mozbuild/test/test_telemetry.py
new file mode 100644
index 0000000000..894e32ee2d
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/test_telemetry.py
@@ -0,0 +1,102 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+import os
+
+import buildconfig
+import mozunit
+
+from mozbuild.telemetry import filter_args
+
+TELEMETRY_LOAD_ERROR = """
+Error loading telemetry. mach output:
+=========================================================
+%s
+=========================================================
+"""
+
+
+def test_path_filtering():
+ srcdir_path = os.path.join(buildconfig.topsrcdir, "a")
+ srcdir_path_2 = os.path.join(buildconfig.topsrcdir, "a/b/c")
+ objdir_path = os.path.join(buildconfig.topobjdir, "x")
+ objdir_path_2 = os.path.join(buildconfig.topobjdir, "x/y/z")
+ home_path = os.path.join(os.path.expanduser("~"), "something_in_home")
+ other_path = "/other/path"
+ args = filter_args(
+ "pass",
+ [
+ "python",
+ "-c",
+ "pass",
+ srcdir_path,
+ srcdir_path_2,
+ objdir_path,
+ objdir_path_2,
+ home_path,
+ other_path,
+ ],
+ buildconfig.topsrcdir,
+ buildconfig.topobjdir,
+ cwd=buildconfig.topsrcdir,
+ )
+
+ expected = [
+ "a",
+ "a/b/c",
+ "$topobjdir/x",
+ "$topobjdir/x/y/z",
+ "$HOME/something_in_home",
+ "<path omitted>",
+ ]
+ assert args == expected
+
+
+def test_path_filtering_in_objdir():
+ srcdir_path = os.path.join(buildconfig.topsrcdir, "a")
+ srcdir_path_2 = os.path.join(buildconfig.topsrcdir, "a/b/c")
+ objdir_path = os.path.join(buildconfig.topobjdir, "x")
+ objdir_path_2 = os.path.join(buildconfig.topobjdir, "x/y/z")
+ other_path = "/other/path"
+ args = filter_args(
+ "pass",
+ [
+ "python",
+ "-c",
+ "pass",
+ srcdir_path,
+ srcdir_path_2,
+ objdir_path,
+ objdir_path_2,
+ other_path,
+ ],
+ buildconfig.topsrcdir,
+ buildconfig.topobjdir,
+ cwd=buildconfig.topobjdir,
+ )
+ expected = ["$topsrcdir/a", "$topsrcdir/a/b/c", "x", "x/y/z", "<path omitted>"]
+ assert args == expected
+
+
+def test_path_filtering_other_cwd(tmpdir):
+ srcdir_path = os.path.join(buildconfig.topsrcdir, "a")
+ srcdir_path_2 = os.path.join(buildconfig.topsrcdir, "a/b/c")
+ other_path = str(tmpdir.join("other"))
+ args = filter_args(
+ "pass",
+ ["python", "-c", "pass", srcdir_path, srcdir_path_2, other_path],
+ buildconfig.topsrcdir,
+ buildconfig.topobjdir,
+ cwd=str(tmpdir),
+ )
+ expected = [
+ "$topsrcdir/a",
+ "$topsrcdir/a/b/c",
+ # cwd-relative paths should be relativized
+ "other",
+ ]
+ assert args == expected
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozbuild/test/test_telemetry_settings.py b/python/mozbuild/mozbuild/test/test_telemetry_settings.py
new file mode 100644
index 0000000000..2d50141a15
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/test_telemetry_settings.py
@@ -0,0 +1,174 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+from unittest import mock
+from unittest.mock import Mock
+
+import mozunit
+import pytest
+import requests
+from mach.config import ConfigSettings
+from mach.decorators import SettingsProvider
+from mach.telemetry import (
+ initialize_telemetry_setting,
+ record_telemetry_settings,
+ resolve_is_employee,
+)
+
+from mozbuild.settings import TelemetrySettings
+
+
+@SettingsProvider
+class OtherSettings:
+ config_settings = [("foo.bar", "int", "", 1), ("build.abc", "string", "", "")]
+
+
+def record_enabled_telemetry(mozbuild_path, settings):
+ record_telemetry_settings(settings, mozbuild_path, True)
+
+
+@pytest.fixture
+def settings():
+ s = ConfigSettings()
+ s.register_provider(TelemetrySettings)
+ s.register_provider(OtherSettings)
+ return s
+
+
+def load_settings_file(mozbuild_path, settings):
+ settings.load_file(os.path.join(mozbuild_path, "machrc"))
+
+
+def write_config(mozbuild_path, contents):
+ with open(os.path.join(mozbuild_path, "machrc"), "w") as f:
+ f.write(contents)
+
+
+def test_nonexistent(tmpdir, settings):
+ record_enabled_telemetry(tmpdir, settings)
+ load_settings_file(tmpdir, settings)
+ assert settings.mach_telemetry.is_enabled
+
+
+def test_file_exists_no_build_section(tmpdir, settings):
+ write_config(
+ tmpdir,
+ """[foo]
+bar = 2
+""",
+ )
+ record_enabled_telemetry(tmpdir, settings)
+ load_settings_file(tmpdir, settings)
+ assert settings.mach_telemetry.is_enabled
+ assert settings.foo.bar == 2
+
+
+def test_existing_build_section(tmpdir, settings):
+ write_config(
+ tmpdir,
+ """[foo]
+bar = 2
+
+[build]
+abc = xyz
+""",
+ )
+ record_enabled_telemetry(tmpdir, settings)
+ load_settings_file(tmpdir, settings)
+ assert settings.mach_telemetry.is_enabled
+ assert settings.build.abc == "xyz"
+ assert settings.foo.bar == 2
+
+
+def test_malformed_file(tmpdir, settings):
+ """Ensure that a malformed config file doesn't cause breakage."""
+ write_config(
+ tmpdir,
+ """[foo
+bar = 1
+""",
+ )
+ record_enabled_telemetry(tmpdir, settings)
+ # Can't load_settings config, it will not have been written!
+
+
+def _initialize_telemetry(settings, is_employee, contributor_prompt_response=None):
+ with mock.patch(
+ "mach.telemetry.resolve_is_employee", return_value=is_employee
+ ), mock.patch(
+ "mach.telemetry.prompt_telemetry_message_contributor",
+ return_value=contributor_prompt_response,
+ ) as prompt_mock, mock.patch(
+ "subprocess.run", return_value=Mock(returncode=0)
+ ), mock.patch(
+ "mach.config.ConfigSettings"
+ ):
+ initialize_telemetry_setting(settings, "", "")
+ return prompt_mock.call_count == 1
+
+
+def test_initialize_new_contributor_deny_telemetry(settings):
+ did_prompt = _initialize_telemetry(settings, False, False)
+ assert did_prompt
+ assert not settings.mach_telemetry.is_enabled
+ assert settings.mach_telemetry.is_set_up
+ assert settings.mach_telemetry.is_done_first_time_setup
+
+
+def test_initialize_new_contributor_allow_telemetry(settings):
+ did_prompt = _initialize_telemetry(settings, False, True)
+ assert did_prompt
+ assert settings.mach_telemetry.is_enabled
+ assert settings.mach_telemetry.is_set_up
+ assert settings.mach_telemetry.is_done_first_time_setup
+
+
+def test_initialize_new_employee(settings):
+ did_prompt = _initialize_telemetry(settings, True)
+ assert not did_prompt
+ assert settings.mach_telemetry.is_enabled
+ assert settings.mach_telemetry.is_set_up
+ assert settings.mach_telemetry.is_done_first_time_setup
+
+
+def test_initialize_noop_when_telemetry_disabled_env(monkeypatch):
+ monkeypatch.setenv("DISABLE_TELEMETRY", "1")
+ with mock.patch("mach.telemetry.record_telemetry_settings") as record_mock:
+ did_prompt = _initialize_telemetry(None, False)
+ assert record_mock.call_count == 0
+ assert not did_prompt
+
+
+def test_initialize_noop_when_request_error(settings):
+ with mock.patch(
+ "mach.telemetry.resolve_is_employee",
+ side_effect=requests.exceptions.RequestException("Unlucky"),
+ ), mock.patch("mach.telemetry.record_telemetry_settings") as record_mock:
+ initialize_telemetry_setting(None, None, None)
+ assert record_mock.call_count == 0
+
+
+def test_resolve_is_employee():
+ def mock_and_run(is_employee_bugzilla, is_employee_vcs):
+ with mock.patch(
+ "mach.telemetry.resolve_is_employee_by_credentials",
+ return_value=is_employee_bugzilla,
+ ), mock.patch(
+ "mach.telemetry.resolve_is_employee_by_vcs", return_value=is_employee_vcs
+ ):
+ return resolve_is_employee(None)
+
+ assert not mock_and_run(False, False)
+ assert not mock_and_run(False, True)
+ assert not mock_and_run(False, None)
+ assert mock_and_run(True, False)
+ assert mock_and_run(True, True)
+ assert mock_and_run(True, None)
+ assert not mock_and_run(None, False)
+ assert mock_and_run(None, True)
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozbuild/test/test_util.py b/python/mozbuild/mozbuild/test/test_util.py
new file mode 100644
index 0000000000..9931b338b9
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/test_util.py
@@ -0,0 +1,889 @@
+# coding: utf-8
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import copy
+import hashlib
+import itertools
+import os
+import string
+import sys
+import unittest
+
+import pytest
+import six
+from mozfile.mozfile import NamedTemporaryFile
+from mozunit import main
+
+from mozbuild.util import (
+ EnumString,
+ EnumStringComparisonError,
+ HierarchicalStringList,
+ MozbuildDeletionError,
+ ReadOnlyDict,
+ StrictOrderingOnAppendList,
+ StrictOrderingOnAppendListWithAction,
+ StrictOrderingOnAppendListWithFlagsFactory,
+ TypedList,
+ TypedNamedTuple,
+ UnsortedError,
+ expand_variables,
+ group_unified_files,
+ hash_file,
+ hexdump,
+ memoize,
+ memoized_property,
+ pair,
+ resolve_target_to_make,
+)
+
+if sys.version_info[0] == 3:
+ str_type = "str"
+else:
+ str_type = "unicode"
+
+data_path = os.path.abspath(os.path.dirname(__file__))
+data_path = os.path.join(data_path, "data")
+
+
+class TestHashing(unittest.TestCase):
+ def test_hash_file_known_hash(self):
+ """Ensure a known hash value is recreated."""
+ data = b"The quick brown fox jumps over the lazy cog"
+ expected = "de9f2c7fd25e1b3afad3e85a0bd17d9b100db4b3"
+
+ temp = NamedTemporaryFile()
+ temp.write(data)
+ temp.flush()
+
+ actual = hash_file(temp.name)
+
+ self.assertEqual(actual, expected)
+
+ def test_hash_file_large(self):
+ """Ensure that hash_file seems to work with a large file."""
+ data = b"x" * 1048576
+
+ hasher = hashlib.sha1()
+ hasher.update(data)
+ expected = hasher.hexdigest()
+
+ temp = NamedTemporaryFile()
+ temp.write(data)
+ temp.flush()
+
+ actual = hash_file(temp.name)
+
+ self.assertEqual(actual, expected)
+
+
+class TestResolveTargetToMake(unittest.TestCase):
+ def setUp(self):
+ self.topobjdir = data_path
+
+ def assertResolve(self, path, expected):
+ # Handle Windows path separators.
+ (reldir, target) = resolve_target_to_make(self.topobjdir, path)
+ if reldir is not None:
+ reldir = reldir.replace(os.sep, "/")
+ if target is not None:
+ target = target.replace(os.sep, "/")
+ self.assertEqual((reldir, target), expected)
+
+ def test_root_path(self):
+ self.assertResolve("/test-dir", ("test-dir", None))
+ self.assertResolve("/test-dir/with", ("test-dir/with", None))
+ self.assertResolve("/test-dir/without", ("test-dir", None))
+ self.assertResolve("/test-dir/without/with", ("test-dir/without/with", None))
+
+ def test_dir(self):
+ self.assertResolve("test-dir", ("test-dir", None))
+ self.assertResolve("test-dir/with", ("test-dir/with", None))
+ self.assertResolve("test-dir/with", ("test-dir/with", None))
+ self.assertResolve("test-dir/without", ("test-dir", None))
+ self.assertResolve("test-dir/without/with", ("test-dir/without/with", None))
+
+ def test_top_level(self):
+ self.assertResolve("package", (None, "package"))
+ # Makefile handling shouldn't affect top-level targets.
+ self.assertResolve("Makefile", (None, "Makefile"))
+
+ def test_regular_file(self):
+ self.assertResolve("test-dir/with/file", ("test-dir/with", "file"))
+ self.assertResolve(
+ "test-dir/with/without/file", ("test-dir/with", "without/file")
+ )
+ self.assertResolve(
+ "test-dir/with/without/with/file", ("test-dir/with/without/with", "file")
+ )
+
+ self.assertResolve("test-dir/without/file", ("test-dir", "without/file"))
+ self.assertResolve(
+ "test-dir/without/with/file", ("test-dir/without/with", "file")
+ )
+ self.assertResolve(
+ "test-dir/without/with/without/file",
+ ("test-dir/without/with", "without/file"),
+ )
+
+ def test_Makefile(self):
+ self.assertResolve("test-dir/with/Makefile", ("test-dir", "with/Makefile"))
+ self.assertResolve(
+ "test-dir/with/without/Makefile", ("test-dir/with", "without/Makefile")
+ )
+ self.assertResolve(
+ "test-dir/with/without/with/Makefile",
+ ("test-dir/with", "without/with/Makefile"),
+ )
+
+ self.assertResolve(
+ "test-dir/without/Makefile", ("test-dir", "without/Makefile")
+ )
+ self.assertResolve(
+ "test-dir/without/with/Makefile", ("test-dir", "without/with/Makefile")
+ )
+ self.assertResolve(
+ "test-dir/without/with/without/Makefile",
+ ("test-dir/without/with", "without/Makefile"),
+ )
+
+
+class TestHierarchicalStringList(unittest.TestCase):
+ def setUp(self):
+ self.EXPORTS = HierarchicalStringList()
+
+ def test_exports_append(self):
+ self.assertEqual(self.EXPORTS._strings, [])
+ self.EXPORTS += ["foo.h"]
+ self.assertEqual(self.EXPORTS._strings, ["foo.h"])
+ self.EXPORTS += ["bar.h"]
+ self.assertEqual(self.EXPORTS._strings, ["foo.h", "bar.h"])
+
+ def test_exports_subdir(self):
+ self.assertEqual(self.EXPORTS._children, {})
+ self.EXPORTS.foo += ["foo.h"]
+ six.assertCountEqual(self, self.EXPORTS._children, {"foo": True})
+ self.assertEqual(self.EXPORTS.foo._strings, ["foo.h"])
+ self.EXPORTS.bar += ["bar.h"]
+ six.assertCountEqual(self, self.EXPORTS._children, {"foo": True, "bar": True})
+ self.assertEqual(self.EXPORTS.foo._strings, ["foo.h"])
+ self.assertEqual(self.EXPORTS.bar._strings, ["bar.h"])
+
+ def test_exports_multiple_subdir(self):
+ self.EXPORTS.foo.bar = ["foobar.h"]
+ six.assertCountEqual(self, self.EXPORTS._children, {"foo": True})
+ six.assertCountEqual(self, self.EXPORTS.foo._children, {"bar": True})
+ six.assertCountEqual(self, self.EXPORTS.foo.bar._children, {})
+ self.assertEqual(self.EXPORTS._strings, [])
+ self.assertEqual(self.EXPORTS.foo._strings, [])
+ self.assertEqual(self.EXPORTS.foo.bar._strings, ["foobar.h"])
+
+ def test_invalid_exports_append(self):
+ with self.assertRaises(ValueError) as ve:
+ self.EXPORTS += "foo.h"
+ six.assertRegex(
+ self,
+ str(ve.exception),
+ "Expected a list of strings, not <(?:type|class) '%s'>" % str_type,
+ )
+
+ def test_invalid_exports_set(self):
+ with self.assertRaises(ValueError) as ve:
+ self.EXPORTS.foo = "foo.h"
+
+ six.assertRegex(
+ self,
+ str(ve.exception),
+ "Expected a list of strings, not <(?:type|class) '%s'>" % str_type,
+ )
+
+ def test_invalid_exports_append_base(self):
+ with self.assertRaises(ValueError) as ve:
+ self.EXPORTS += "foo.h"
+
+ six.assertRegex(
+ self,
+ str(ve.exception),
+ "Expected a list of strings, not <(?:type|class) '%s'>" % str_type,
+ )
+
+ def test_invalid_exports_bool(self):
+ with self.assertRaises(ValueError) as ve:
+ self.EXPORTS += [True]
+
+ six.assertRegex(
+ self,
+ str(ve.exception),
+ "Expected a list of strings, not an element of " "<(?:type|class) 'bool'>",
+ )
+
+ def test_del_exports(self):
+ with self.assertRaises(MozbuildDeletionError):
+ self.EXPORTS.foo += ["bar.h"]
+ del self.EXPORTS.foo
+
+ def test_unsorted(self):
+ with self.assertRaises(UnsortedError):
+ self.EXPORTS += ["foo.h", "bar.h"]
+
+ with self.assertRaises(UnsortedError):
+ self.EXPORTS.foo = ["foo.h", "bar.h"]
+
+ with self.assertRaises(UnsortedError):
+ self.EXPORTS.foo += ["foo.h", "bar.h"]
+
+ def test_reassign(self):
+ self.EXPORTS.foo = ["foo.h"]
+
+ with self.assertRaises(KeyError):
+ self.EXPORTS.foo = ["bar.h"]
+
+ def test_walk(self):
+ l = HierarchicalStringList()
+ l += ["root1", "root2", "root3"]
+ l.child1 += ["child11", "child12", "child13"]
+ l.child1.grandchild1 += ["grandchild111", "grandchild112"]
+ l.child1.grandchild2 += ["grandchild121", "grandchild122"]
+ l.child2.grandchild1 += ["grandchild211", "grandchild212"]
+ l.child2.grandchild1 += ["grandchild213", "grandchild214"]
+
+ els = list((path, list(seq)) for path, seq in l.walk())
+ self.assertEqual(
+ els,
+ [
+ ("", ["root1", "root2", "root3"]),
+ ("child1", ["child11", "child12", "child13"]),
+ ("child1/grandchild1", ["grandchild111", "grandchild112"]),
+ ("child1/grandchild2", ["grandchild121", "grandchild122"]),
+ (
+ "child2/grandchild1",
+ [
+ "grandchild211",
+ "grandchild212",
+ "grandchild213",
+ "grandchild214",
+ ],
+ ),
+ ],
+ )
+
+ def test_merge(self):
+ l1 = HierarchicalStringList()
+ l1 += ["root1", "root2", "root3"]
+ l1.child1 += ["child11", "child12", "child13"]
+ l1.child1.grandchild1 += ["grandchild111", "grandchild112"]
+ l1.child1.grandchild2 += ["grandchild121", "grandchild122"]
+ l1.child2.grandchild1 += ["grandchild211", "grandchild212"]
+ l1.child2.grandchild1 += ["grandchild213", "grandchild214"]
+ l2 = HierarchicalStringList()
+ l2.child1 += ["child14", "child15"]
+ l2.child1.grandchild2 += ["grandchild123"]
+ l2.child3 += ["child31", "child32"]
+
+ l1 += l2
+ els = list((path, list(seq)) for path, seq in l1.walk())
+ self.assertEqual(
+ els,
+ [
+ ("", ["root1", "root2", "root3"]),
+ ("child1", ["child11", "child12", "child13", "child14", "child15"]),
+ ("child1/grandchild1", ["grandchild111", "grandchild112"]),
+ (
+ "child1/grandchild2",
+ ["grandchild121", "grandchild122", "grandchild123"],
+ ),
+ (
+ "child2/grandchild1",
+ [
+ "grandchild211",
+ "grandchild212",
+ "grandchild213",
+ "grandchild214",
+ ],
+ ),
+ ("child3", ["child31", "child32"]),
+ ],
+ )
+
+
+class TestStrictOrderingOnAppendList(unittest.TestCase):
+ def test_init(self):
+ l = StrictOrderingOnAppendList()
+ self.assertEqual(len(l), 0)
+
+ l = StrictOrderingOnAppendList(["a", "b", "c"])
+ self.assertEqual(len(l), 3)
+
+ with self.assertRaises(UnsortedError):
+ StrictOrderingOnAppendList(["c", "b", "a"])
+
+ self.assertEqual(len(l), 3)
+
+ def test_extend(self):
+ l = StrictOrderingOnAppendList()
+ l.extend(["a", "b"])
+ self.assertEqual(len(l), 2)
+ self.assertIsInstance(l, StrictOrderingOnAppendList)
+
+ with self.assertRaises(UnsortedError):
+ l.extend(["d", "c"])
+
+ self.assertEqual(len(l), 2)
+
+ def test_slicing(self):
+ l = StrictOrderingOnAppendList()
+ l[:] = ["a", "b"]
+ self.assertEqual(len(l), 2)
+ self.assertIsInstance(l, StrictOrderingOnAppendList)
+
+ with self.assertRaises(UnsortedError):
+ l[:] = ["b", "a"]
+
+ self.assertEqual(len(l), 2)
+
+ def test_add(self):
+ l = StrictOrderingOnAppendList()
+ l2 = l + ["a", "b"]
+ self.assertEqual(len(l), 0)
+ self.assertEqual(len(l2), 2)
+ self.assertIsInstance(l2, StrictOrderingOnAppendList)
+
+ with self.assertRaises(UnsortedError):
+ l2 = l + ["b", "a"]
+
+ self.assertEqual(len(l), 0)
+
+ def test_iadd(self):
+ l = StrictOrderingOnAppendList()
+ l += ["a", "b"]
+ self.assertEqual(len(l), 2)
+ self.assertIsInstance(l, StrictOrderingOnAppendList)
+
+ with self.assertRaises(UnsortedError):
+ l += ["b", "a"]
+
+ self.assertEqual(len(l), 2)
+
+ def test_add_after_iadd(self):
+ l = StrictOrderingOnAppendList(["b"])
+ l += ["a"]
+ l2 = l + ["c", "d"]
+ self.assertEqual(len(l), 2)
+ self.assertEqual(len(l2), 4)
+ self.assertIsInstance(l2, StrictOrderingOnAppendList)
+ with self.assertRaises(UnsortedError):
+ l2 = l + ["d", "c"]
+
+ self.assertEqual(len(l), 2)
+
+ def test_add_StrictOrderingOnAppendList(self):
+ l = StrictOrderingOnAppendList()
+ l += ["c", "d"]
+ l += ["a", "b"]
+ l2 = StrictOrderingOnAppendList()
+ with self.assertRaises(UnsortedError):
+ l2 += list(l)
+ # Adding a StrictOrderingOnAppendList to another shouldn't throw
+ l2 += l
+
+
+class TestStrictOrderingOnAppendListWithAction(unittest.TestCase):
+ def setUp(self):
+ self.action = lambda a: (a, id(a))
+
+ def assertSameList(self, expected, actual):
+ self.assertEqual(len(expected), len(actual))
+ for idx, item in enumerate(actual):
+ self.assertEqual(item, expected[idx])
+
+ def test_init(self):
+ l = StrictOrderingOnAppendListWithAction(action=self.action)
+ self.assertEqual(len(l), 0)
+ original = ["a", "b", "c"]
+ l = StrictOrderingOnAppendListWithAction(["a", "b", "c"], action=self.action)
+ expected = [self.action(i) for i in original]
+ self.assertSameList(expected, l)
+
+ with self.assertRaises(ValueError):
+ StrictOrderingOnAppendListWithAction("abc", action=self.action)
+
+ with self.assertRaises(ValueError):
+ StrictOrderingOnAppendListWithAction()
+
+ def test_extend(self):
+ l = StrictOrderingOnAppendListWithAction(action=self.action)
+ original = ["a", "b"]
+ l.extend(original)
+ expected = [self.action(i) for i in original]
+ self.assertSameList(expected, l)
+
+ with self.assertRaises(ValueError):
+ l.extend("ab")
+
+ def test_slicing(self):
+ l = StrictOrderingOnAppendListWithAction(action=self.action)
+ original = ["a", "b"]
+ l[:] = original
+ expected = [self.action(i) for i in original]
+ self.assertSameList(expected, l)
+
+ with self.assertRaises(ValueError):
+ l[:] = "ab"
+
+ def test_add(self):
+ l = StrictOrderingOnAppendListWithAction(action=self.action)
+ original = ["a", "b"]
+ l2 = l + original
+ expected = [self.action(i) for i in original]
+ self.assertSameList(expected, l2)
+
+ with self.assertRaises(ValueError):
+ l + "abc"
+
+ def test_iadd(self):
+ l = StrictOrderingOnAppendListWithAction(action=self.action)
+ original = ["a", "b"]
+ l += original
+ expected = [self.action(i) for i in original]
+ self.assertSameList(expected, l)
+
+ with self.assertRaises(ValueError):
+ l += "abc"
+
+
+class TestStrictOrderingOnAppendListWithFlagsFactory(unittest.TestCase):
+ def test_strict_ordering_on_append_list_with_flags_factory(self):
+ cls = StrictOrderingOnAppendListWithFlagsFactory(
+ {
+ "foo": bool,
+ "bar": int,
+ }
+ )
+
+ l = cls()
+ l += ["a", "b"]
+
+ with self.assertRaises(Exception):
+ l["a"] = "foo"
+
+ with self.assertRaises(Exception):
+ l["c"]
+
+ self.assertEqual(l["a"].foo, False)
+ l["a"].foo = True
+ self.assertEqual(l["a"].foo, True)
+
+ with self.assertRaises(TypeError):
+ l["a"].bar = "bar"
+
+ self.assertEqual(l["a"].bar, 0)
+ l["a"].bar = 42
+ self.assertEqual(l["a"].bar, 42)
+
+ l["b"].foo = True
+ self.assertEqual(l["b"].foo, True)
+
+ with self.assertRaises(AttributeError):
+ l["b"].baz = False
+
+ l["b"].update(foo=False, bar=12)
+ self.assertEqual(l["b"].foo, False)
+ self.assertEqual(l["b"].bar, 12)
+
+ with self.assertRaises(AttributeError):
+ l["b"].update(xyz=1)
+
+ def test_strict_ordering_on_append_list_with_flags_factory_extend(self):
+ FooList = StrictOrderingOnAppendListWithFlagsFactory(
+ {"foo": bool, "bar": six.text_type}
+ )
+ foo = FooList(["a", "b", "c"])
+ foo["a"].foo = True
+ foo["b"].bar = "bar"
+
+ # Don't allow extending lists with different flag definitions.
+ BarList = StrictOrderingOnAppendListWithFlagsFactory(
+ {"foo": six.text_type, "baz": bool}
+ )
+ bar = BarList(["d", "e", "f"])
+ bar["d"].foo = "foo"
+ bar["e"].baz = True
+ with self.assertRaises(ValueError):
+ foo + bar
+ with self.assertRaises(ValueError):
+ bar + foo
+
+ # It's not obvious what to do with duplicate list items with possibly
+ # different flag values, so don't allow that case.
+ with self.assertRaises(ValueError):
+ foo + foo
+
+ def assertExtended(l):
+ self.assertEqual(len(l), 6)
+ self.assertEqual(l["a"].foo, True)
+ self.assertEqual(l["b"].bar, "bar")
+ self.assertTrue("c" in l)
+ self.assertEqual(l["d"].foo, True)
+ self.assertEqual(l["e"].bar, "bar")
+ self.assertTrue("f" in l)
+
+ # Test extend.
+ zot = FooList(["d", "e", "f"])
+ zot["d"].foo = True
+ zot["e"].bar = "bar"
+ zot.extend(foo)
+ assertExtended(zot)
+
+ # Test __add__.
+ zot = FooList(["d", "e", "f"])
+ zot["d"].foo = True
+ zot["e"].bar = "bar"
+ assertExtended(foo + zot)
+ assertExtended(zot + foo)
+
+ # Test __iadd__.
+ foo += zot
+ assertExtended(foo)
+
+ # Test __setitem__.
+ foo[3:] = []
+ self.assertEqual(len(foo), 3)
+ foo[3:] = zot
+ assertExtended(foo)
+
+
+class TestMemoize(unittest.TestCase):
+ def test_memoize(self):
+ self._count = 0
+
+ @memoize
+ def wrapped(a, b):
+ self._count += 1
+ return a + b
+
+ self.assertEqual(self._count, 0)
+ self.assertEqual(wrapped(1, 1), 2)
+ self.assertEqual(self._count, 1)
+ self.assertEqual(wrapped(1, 1), 2)
+ self.assertEqual(self._count, 1)
+ self.assertEqual(wrapped(2, 1), 3)
+ self.assertEqual(self._count, 2)
+ self.assertEqual(wrapped(1, 2), 3)
+ self.assertEqual(self._count, 3)
+ self.assertEqual(wrapped(1, 2), 3)
+ self.assertEqual(self._count, 3)
+ self.assertEqual(wrapped(1, 1), 2)
+ self.assertEqual(self._count, 3)
+
+ def test_memoize_method(self):
+ class foo(object):
+ def __init__(self):
+ self._count = 0
+
+ @memoize
+ def wrapped(self, a, b):
+ self._count += 1
+ return a + b
+
+ instance = foo()
+ refcount = sys.getrefcount(instance)
+ self.assertEqual(instance._count, 0)
+ self.assertEqual(instance.wrapped(1, 1), 2)
+ self.assertEqual(instance._count, 1)
+ self.assertEqual(instance.wrapped(1, 1), 2)
+ self.assertEqual(instance._count, 1)
+ self.assertEqual(instance.wrapped(2, 1), 3)
+ self.assertEqual(instance._count, 2)
+ self.assertEqual(instance.wrapped(1, 2), 3)
+ self.assertEqual(instance._count, 3)
+ self.assertEqual(instance.wrapped(1, 2), 3)
+ self.assertEqual(instance._count, 3)
+ self.assertEqual(instance.wrapped(1, 1), 2)
+ self.assertEqual(instance._count, 3)
+
+ # Memoization of methods is expected to not keep references to
+ # instances, so the refcount shouldn't have changed after executing the
+ # memoized method.
+ self.assertEqual(refcount, sys.getrefcount(instance))
+
+ def test_memoized_property(self):
+ class foo(object):
+ def __init__(self):
+ self._count = 0
+
+ @memoized_property
+ def wrapped(self):
+ self._count += 1
+ return 42
+
+ instance = foo()
+ self.assertEqual(instance._count, 0)
+ self.assertEqual(instance.wrapped, 42)
+ self.assertEqual(instance._count, 1)
+ self.assertEqual(instance.wrapped, 42)
+ self.assertEqual(instance._count, 1)
+
+
+class TestTypedList(unittest.TestCase):
+ def test_init(self):
+ cls = TypedList(int)
+ l = cls()
+ self.assertEqual(len(l), 0)
+
+ l = cls([1, 2, 3])
+ self.assertEqual(len(l), 3)
+
+ with self.assertRaises(ValueError):
+ cls([1, 2, "c"])
+
+ def test_extend(self):
+ cls = TypedList(int)
+ l = cls()
+ l.extend([1, 2])
+ self.assertEqual(len(l), 2)
+ self.assertIsInstance(l, cls)
+
+ with self.assertRaises(ValueError):
+ l.extend([3, "c"])
+
+ self.assertEqual(len(l), 2)
+
+ def test_slicing(self):
+ cls = TypedList(int)
+ l = cls()
+ l[:] = [1, 2]
+ self.assertEqual(len(l), 2)
+ self.assertIsInstance(l, cls)
+
+ with self.assertRaises(ValueError):
+ l[:] = [3, "c"]
+
+ self.assertEqual(len(l), 2)
+
+ def test_add(self):
+ cls = TypedList(int)
+ l = cls()
+ l2 = l + [1, 2]
+ self.assertEqual(len(l), 0)
+ self.assertEqual(len(l2), 2)
+ self.assertIsInstance(l2, cls)
+
+ with self.assertRaises(ValueError):
+ l2 = l + [3, "c"]
+
+ self.assertEqual(len(l), 0)
+
+ def test_iadd(self):
+ cls = TypedList(int)
+ l = cls()
+ l += [1, 2]
+ self.assertEqual(len(l), 2)
+ self.assertIsInstance(l, cls)
+
+ with self.assertRaises(ValueError):
+ l += [3, "c"]
+
+ self.assertEqual(len(l), 2)
+
+ def test_add_coercion(self):
+ objs = []
+
+ class Foo(object):
+ def __init__(self, obj):
+ objs.append(obj)
+
+ cls = TypedList(Foo)
+ l = cls()
+ l += [1, 2]
+ self.assertEqual(len(objs), 2)
+ self.assertEqual(type(l[0]), Foo)
+ self.assertEqual(type(l[1]), Foo)
+
+ # Adding a TypedList to a TypedList shouldn't trigger coercion again
+ l2 = cls()
+ l2 += l
+ self.assertEqual(len(objs), 2)
+ self.assertEqual(type(l2[0]), Foo)
+ self.assertEqual(type(l2[1]), Foo)
+
+ # Adding a TypedList to a TypedList shouldn't even trigger the code
+ # that does coercion at all.
+ l2 = cls()
+ list.__setitem__(l, slice(0, -1), [1, 2])
+ l2 += l
+ self.assertEqual(len(objs), 2)
+ self.assertEqual(type(l2[0]), int)
+ self.assertEqual(type(l2[1]), int)
+
+ def test_memoized(self):
+ cls = TypedList(int)
+ cls2 = TypedList(str)
+ self.assertEqual(TypedList(int), cls)
+ self.assertNotEqual(cls, cls2)
+
+
+class TypedTestStrictOrderingOnAppendList(unittest.TestCase):
+ def test_init(self):
+ class Unicode(six.text_type):
+ def __new__(cls, other):
+ if not isinstance(other, six.text_type):
+ raise ValueError()
+ return six.text_type.__new__(cls, other)
+
+ cls = TypedList(Unicode, StrictOrderingOnAppendList)
+ l = cls()
+ self.assertEqual(len(l), 0)
+
+ l = cls(["a", "b", "c"])
+ self.assertEqual(len(l), 3)
+
+ with self.assertRaises(UnsortedError):
+ cls(["c", "b", "a"])
+
+ with self.assertRaises(ValueError):
+ cls(["a", "b", 3])
+
+ self.assertEqual(len(l), 3)
+
+
+class TestTypedNamedTuple(unittest.TestCase):
+ def test_simple(self):
+ FooBar = TypedNamedTuple("FooBar", [("foo", six.text_type), ("bar", int)])
+
+ t = FooBar(foo="foo", bar=2)
+ self.assertEqual(type(t), FooBar)
+ self.assertEqual(t.foo, "foo")
+ self.assertEqual(t.bar, 2)
+ self.assertEqual(t[0], "foo")
+ self.assertEqual(t[1], 2)
+
+ FooBar("foo", 2)
+
+ with self.assertRaises(TypeError):
+ FooBar("foo", "not integer")
+ with self.assertRaises(TypeError):
+ FooBar(2, 4)
+
+ # Passing a tuple as the first argument is the same as passing multiple
+ # arguments.
+ t1 = ("foo", 3)
+ t2 = FooBar(t1)
+ self.assertEqual(type(t2), FooBar)
+ self.assertEqual(FooBar(t1), FooBar("foo", 3))
+
+
+class TestGroupUnifiedFiles(unittest.TestCase):
+ FILES = ["%s.cpp" % letter for letter in string.ascii_lowercase]
+
+ def test_multiple_files(self):
+ mapping = list(group_unified_files(self.FILES, "Unified", "cpp", 5))
+
+ def check_mapping(index, expected_num_source_files):
+ (unified_file, source_files) = mapping[index]
+
+ self.assertEqual(unified_file, "Unified%d.cpp" % index)
+ self.assertEqual(len(source_files), expected_num_source_files)
+
+ all_files = list(itertools.chain(*[files for (_, files) in mapping]))
+ self.assertEqual(len(all_files), len(self.FILES))
+ self.assertEqual(set(all_files), set(self.FILES))
+
+ expected_amounts = [5, 5, 5, 5, 5, 1]
+ for i, amount in enumerate(expected_amounts):
+ check_mapping(i, amount)
+
+
+class TestMisc(unittest.TestCase):
+ def test_pair(self):
+ self.assertEqual(list(pair([1, 2, 3, 4, 5, 6])), [(1, 2), (3, 4), (5, 6)])
+
+ self.assertEqual(
+ list(pair([1, 2, 3, 4, 5, 6, 7])), [(1, 2), (3, 4), (5, 6), (7, None)]
+ )
+
+ def test_expand_variables(self):
+ self.assertEqual(expand_variables("$(var)", {"var": "value"}), "value")
+
+ self.assertEqual(
+ expand_variables("$(a) and $(b)", {"a": "1", "b": "2"}), "1 and 2"
+ )
+
+ self.assertEqual(
+ expand_variables("$(a) and $(undefined)", {"a": "1", "b": "2"}), "1 and "
+ )
+
+ self.assertEqual(
+ expand_variables(
+ "before $(string) between $(list) after",
+ {"string": "abc", "list": ["a", "b", "c"]},
+ ),
+ "before abc between a b c after",
+ )
+
+
+class TestEnumString(unittest.TestCase):
+ def test_string(self):
+ CompilerType = EnumString.subclass("gcc", "clang", "clang-cl")
+
+ type = CompilerType("gcc")
+ self.assertEqual(type, "gcc")
+ self.assertNotEqual(type, "clang")
+ self.assertNotEqual(type, "clang-cl")
+ self.assertIn(type, ("gcc", "clang-cl"))
+ self.assertNotIn(type, ("clang", "clang-cl"))
+
+ with self.assertRaises(EnumStringComparisonError):
+ self.assertEqual(type, "foo")
+
+ with self.assertRaises(EnumStringComparisonError):
+ self.assertNotEqual(type, "foo")
+
+ with self.assertRaises(EnumStringComparisonError):
+ self.assertIn(type, ("foo", "gcc"))
+
+ with self.assertRaises(ValueError):
+ type = CompilerType("foo")
+
+
+class TestHexDump(unittest.TestCase):
+ @unittest.skipUnless(six.PY3, "requires Python 3")
+ def test_hexdump(self):
+ self.assertEqual(
+ hexdump("abcdef123💩ZYXWVU".encode("utf-8")),
+ [
+ "00 61 62 63 64 65 66 31 32 33 f0 9f 92 a9 5a 59 58 |abcdef123....ZYX|\n",
+ "10 57 56 55 |WVU |\n",
+ ],
+ )
+
+
+def test_read_only_dict():
+ d = ReadOnlyDict(foo="bar")
+ with pytest.raises(Exception):
+ d["foo"] = "baz"
+
+ with pytest.raises(Exception):
+ d.update({"foo": "baz"})
+
+ with pytest.raises(Exception):
+ del d["foo"]
+
+ # ensure copy still works
+ d_copy = d.copy()
+ assert d == d_copy
+ # TODO Returning a dict here feels like a bug, but there are places in-tree
+ # relying on this behaviour.
+ assert isinstance(d_copy, dict)
+
+ d_copy = copy.copy(d)
+ assert d == d_copy
+ assert isinstance(d_copy, ReadOnlyDict)
+
+ d_copy = copy.deepcopy(d)
+ assert d == d_copy
+ assert isinstance(d_copy, ReadOnlyDict)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/test/test_util_fileavoidwrite.py b/python/mozbuild/mozbuild/test/test_util_fileavoidwrite.py
new file mode 100644
index 0000000000..38c8941562
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/test_util_fileavoidwrite.py
@@ -0,0 +1,110 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""Tests for the FileAvoidWrite object."""
+
+import locale
+import pathlib
+
+import pytest
+from mozunit import main
+
+from mozbuild.util import FileAvoidWrite
+
+
+@pytest.fixture
+def tmp_path(tmpdir):
+ """Backport of the tmp_path fixture from pytest 3.9.1."""
+ return pathlib.Path(str(tmpdir))
+
+
+def test_overwrite_contents(tmp_path):
+ file = tmp_path / "file.txt"
+ file.write_text("abc")
+
+ faw = FileAvoidWrite(str(file))
+ faw.write("bazqux")
+
+ assert faw.close() == (True, True)
+ assert file.read_text() == "bazqux"
+
+
+def test_store_new_contents(tmp_path):
+ file = tmp_path / "file.txt"
+
+ faw = FileAvoidWrite(str(file))
+ faw.write("content")
+
+ assert faw.close() == (False, True)
+ assert file.read_text() == "content"
+
+
+def test_change_binary_file_contents(tmp_path):
+ file = tmp_path / "file.dat"
+ file.write_bytes(b"\0")
+
+ faw = FileAvoidWrite(str(file), readmode="rb")
+ faw.write(b"\0\0\0")
+
+ assert faw.close() == (True, True)
+ assert file.read_bytes() == b"\0\0\0"
+
+
+def test_obj_as_context_manager(tmp_path):
+ file = tmp_path / "file.txt"
+
+ with FileAvoidWrite(str(file)) as fh:
+ fh.write("foobar")
+
+ assert file.read_text() == "foobar"
+
+
+def test_no_write_happens_if_file_contents_same(tmp_path):
+ file = tmp_path / "file.txt"
+ file.write_text("content")
+ original_write_time = file.stat().st_mtime
+
+ faw = FileAvoidWrite(str(file))
+ faw.write("content")
+
+ assert faw.close() == (True, False)
+ assert file.stat().st_mtime == original_write_time
+
+
+def test_diff_not_created_by_default(tmp_path):
+ file = tmp_path / "file.txt"
+ faw = FileAvoidWrite(str(file))
+ faw.write("dummy")
+ faw.close()
+ assert faw.diff is None
+
+
+def test_diff_update(tmp_path):
+ file = tmp_path / "diffable.txt"
+ file.write_text("old")
+
+ faw = FileAvoidWrite(str(file), capture_diff=True)
+ faw.write("new")
+ faw.close()
+
+ diff = "\n".join(faw.diff)
+ assert "-old" in diff
+ assert "+new" in diff
+
+
+@pytest.mark.skipif(
+ locale.getdefaultlocale()[1] == "cp1252",
+ reason="Fails on win32 terminals with cp1252 encoding",
+)
+def test_write_unicode(tmp_path):
+ # Unicode grinning face :D
+ binary_emoji = b"\xf0\x9f\x98\x80"
+
+ file = tmp_path / "file.dat"
+ faw = FileAvoidWrite(str(file))
+ faw.write(binary_emoji)
+ faw.close()
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozbuild/test/test_vendor.py b/python/mozbuild/mozbuild/test/test_vendor.py
new file mode 100644
index 0000000000..07ba088337
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/test_vendor.py
@@ -0,0 +1,48 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import shutil
+import subprocess
+import tempfile
+from unittest.mock import Mock
+
+import mozunit
+from buildconfig import topsrcdir
+
+from mozbuild.vendor.vendor_python import VendorPython
+
+
+def test_up_to_date_vendor():
+ with tempfile.TemporaryDirectory() as work_dir:
+ subprocess.check_call(["hg", "init", work_dir])
+ os.makedirs(os.path.join(work_dir, "third_party"))
+ shutil.copytree(
+ os.path.join(topsrcdir, os.path.join("third_party", "python")),
+ os.path.join(work_dir, os.path.join("third_party", "python")),
+ )
+
+ # Run the vendoring process
+ vendor = VendorPython(
+ work_dir, None, Mock(), topobjdir=os.path.join(work_dir, "obj")
+ )
+ vendor.vendor()
+
+ # Verify that re-vendoring did not cause file changes.
+ # Note that we don't want hg-ignored generated files
+ # to bust the diff, so we exclude them (pycache, egg-info).
+ subprocess.check_call(
+ [
+ "diff",
+ "-r",
+ os.path.join(topsrcdir, os.path.join("third_party", "python")),
+ os.path.join(work_dir, os.path.join("third_party", "python")),
+ "--exclude=__pycache__",
+ "--strip-trailing-cr",
+ ]
+ )
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozbuild/test/test_vendor_tools.py b/python/mozbuild/mozbuild/test/test_vendor_tools.py
new file mode 100644
index 0000000000..271be6d7da
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/test_vendor_tools.py
@@ -0,0 +1,90 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import mozunit
+
+from mozbuild.vendor.vendor_manifest import list_of_paths_to_readable_string
+
+
+def test_list_of_paths_to_readable_string():
+ paths = ["/tmp/a", "/tmp/b"]
+ s = list_of_paths_to_readable_string(paths)
+ assert not s.endswith(", ]")
+ assert s.endswith("]")
+ assert "/tmp/a" in s
+ assert "/tmp/b" in s
+
+ paths = ["/tmp/a", "/tmp/b", "/tmp/c", "/tmp/d"]
+ s = list_of_paths_to_readable_string(paths)
+ assert not s.endswith(", ")
+ assert s.endswith("]")
+ assert "/tmp/a" not in s
+ assert "/tmp/b" not in s
+ assert "4 items in /tmp" in s
+
+ paths = [
+ "/tmp/a",
+ "/tmp/b",
+ "/tmp/c",
+ "/tmp/d",
+ "/tmp/d",
+ "/tmp/d",
+ "/tmp/d",
+ "/tmp/d",
+ "/tmp/d",
+ "/tmp/d",
+ ]
+ s = list_of_paths_to_readable_string(paths)
+ assert not s.endswith(", ")
+ assert s.endswith("]")
+ assert "/tmp/a" not in s
+ assert " a" not in s
+ assert "/tmp/b" not in s
+ assert "10 (omitted) items in /tmp" in s
+
+ paths = ["/tmp", "/foo"]
+ s = list_of_paths_to_readable_string(paths)
+ assert not s.endswith(", ")
+ assert s.endswith("]")
+ assert "/tmp" in s
+ assert "/foo" in s
+
+ paths = [
+ "/tmp/a",
+ "/tmp/b",
+ "/tmp/c",
+ "/tmp/d",
+ "/tmp/d",
+ "/tmp/d",
+ "/tmp/d",
+ "/tmp/d",
+ "/tmp/d",
+ "/tmp/d",
+ ]
+ paths.extend(["/foo/w", "/foo/x", "/foo/y", "/foo/z"])
+ paths.extend(["/bar/m", "/bar/n"])
+ paths.extend(["/etc"])
+ s = list_of_paths_to_readable_string(paths)
+ assert not s.endswith(", ")
+ assert s.endswith("]")
+ assert "/tmp/a" not in s
+ assert " d" not in s
+ assert "/tmp/b" not in s
+ assert "10 (omitted) items in /tmp" in s
+
+ assert "/foo/w" not in s
+ assert "/foo/x" not in s
+ assert "4 items in /foo" in s
+ assert " w" in s
+
+ assert "/bar/m" in s
+ assert "/bar/n" in s
+
+ assert "/etc" in s
+
+ assert len(s) < len(str(paths))
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozbuild/test/vendor_requirements.in b/python/mozbuild/mozbuild/test/vendor_requirements.in
new file mode 100644
index 0000000000..852826fc1a
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/vendor_requirements.in
@@ -0,0 +1,5 @@
+# Until bug 1724273 lands, python-testing code that uses a site is not possible. Work around
+# this by representing the "vendor" site's dependency as a separate "requirements.txt" file,
+# which can be used by python-test's "requirements" feature.
+poetry==1.4
+poetry-core==1.5.1
diff --git a/python/mozbuild/mozbuild/test/vendor_requirements.txt b/python/mozbuild/mozbuild/test/vendor_requirements.txt
new file mode 100644
index 0000000000..10a32a524c
--- /dev/null
+++ b/python/mozbuild/mozbuild/test/vendor_requirements.txt
@@ -0,0 +1,416 @@
+#
+# This file is autogenerated by pip-compile with Python 3.9
+# by the following command:
+#
+# pip-compile --generate-hashes --output-file=python/mozbuild/mozbuild/test/vendor_requirements.txt python/mozbuild/mozbuild/test/vendor_requirements.in
+#
+appdirs==1.4.4 \
+ --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41 \
+ --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128
+ # via virtualenv
+attrs==22.2.0 \
+ --hash=sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836 \
+ --hash=sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99
+ # via jsonschema
+build==0.10.0 \
+ --hash=sha256:af266720050a66c893a6096a2f410989eeac74ff9a68ba194b3f6473e8e26171 \
+ --hash=sha256:d5b71264afdb5951d6704482aac78de887c80691c52b88a9ad195983ca2c9269
+ # via poetry
+cachecontrol[filecache]==0.12.10 \
+ --hash=sha256:b0d43d8f71948ef5ebdee5fe236b86c6ffc7799370453dccb0e894c20dfa487c \
+ --hash=sha256:d8aca75b82eec92d84b5d6eb8c8f66ea16f09d2adb09dbca27fe2d5fc8d3732d
+ # via poetry
+certifi==2021.10.8 \
+ --hash=sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872 \
+ --hash=sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569
+ # via requests
+charset-normalizer==2.0.12 \
+ --hash=sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597 \
+ --hash=sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df
+ # via requests
+cleo==2.0.1 \
+ --hash=sha256:6eb133670a3ed1f3b052d53789017b6e50fca66d1287e6e6696285f4cb8ea448 \
+ --hash=sha256:eb4b2e1f3063c11085cebe489a6e9124163c226575a3c3be69b2e51af4a15ec5
+ # via poetry
+colorama==0.4.6 \
+ --hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \
+ --hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6
+ # via build
+crashtest==0.4.1 \
+ --hash=sha256:80d7b1f316ebfbd429f648076d6275c877ba30ba48979de4191714a75266f0ce \
+ --hash=sha256:8d23eac5fa660409f57472e3851dab7ac18aba459a8d19cbbba86d3d5aecd2a5
+ # via
+ # cleo
+ # poetry
+distlib==0.3.4 \
+ --hash=sha256:6564fe0a8f51e734df6333d08b8b94d4ea8ee6b99b5ed50613f731fd4089f34b \
+ --hash=sha256:e4b58818180336dc9c529bfb9a0b58728ffc09ad92027a3f30b7cd91e3458579
+ # via virtualenv
+dulwich==0.21.3 \
+ --hash=sha256:026427b5ef0f1fe138ed22078e49b00175b58b11e5c18e2be00f06ee0782603b \
+ --hash=sha256:03ed9448f2944166e28aa8d3f4c8feeceb5c6880e9ffe5ab274869d45abd9589 \
+ --hash=sha256:058aaba18aefe18fcd84b216fd34d032ad453967dcf3dee263278951cd43e2d4 \
+ --hash=sha256:075c8e9d2694ff16fc6e8a5ec0c771b7c33be12e4ebecc346fd74315d3d84605 \
+ --hash=sha256:08ee426b609dab552839b5c7394ae9af2112c164bb727b7f85a69980eced9251 \
+ --hash=sha256:092829f27a2c87cdf6b6523216822859ecf01d281ddfae0e58cad1f44adafff6 \
+ --hash=sha256:0b541bd58426a30753ab12cc024ba29b6699d197d9d0d9f130b9768ab20e0e6a \
+ --hash=sha256:0cd83f84e58aa59fb9d85cf15e74be83a5be876ac5876d5030f60fcce7ab36f1 \
+ --hash=sha256:1799c04bd53ec404ebd2c82c1d66197a31e5f0549c95348bb7d3f57a28c94241 \
+ --hash=sha256:1cf246530b8d574b33a9614da76881b96c190c0fe78f76ab016c88082c0da051 \
+ --hash=sha256:208d01a9cda1bae16c92e8c54e806701a16969346aba44b8d6921c6c227277a9 \
+ --hash=sha256:21ee962211839bb6e52d41f363ce9dbb0638d341a1c02263e163d69012f58b25 \
+ --hash=sha256:250ec581682af846cb85844f8032b7642dd278006b1c3abd5e8e718eba0b1b00 \
+ --hash=sha256:25376efc6ea2ee9daa868a120d4f9c905dcb7774f68931be921fba41a657f58a \
+ --hash=sha256:2bf2be68fddfc0adfe43be99ab31f6b0f16b9ef1e40464679ba831ff615ad4a3 \
+ --hash=sha256:33f73e8f902c6397cc73a727db1f6e75add8ce894bfbb1a15daa2f7a4138a744 \
+ --hash=sha256:3b048f84c94c3284f29bf228f1094ccc48763d76ede5c35632153bd7f697b846 \
+ --hash=sha256:40f8f461eba87ef2e8ce0005ca2c12f1b4fdbbafd3a717b8570060d7cd35ee0c \
+ --hash=sha256:512bb4b04e403a38860f7eb22abeeaefba3c4a9c08bc7beec8885494c5828034 \
+ --hash=sha256:5a1137177b62eec949c0f1564eef73920f842af5ebfc260c20d9cd47e8ecd519 \
+ --hash=sha256:6618e35268d116bffddd6dbec360a40c54b3164f8af0513d95d8698f36e2eacc \
+ --hash=sha256:67dbf4dd7586b2d437f539d5dc930ebceaf74a4150720644d6ea7e5ffc1cb2ff \
+ --hash=sha256:6f8d45f5fcdb52c60c902a951f549faad9979314e7e069f4fa3d14eb409b16a0 \
+ --hash=sha256:73f9feba3da1ae66f0b521d7c2727db7f5025a83facdc73f4f39abe2b6d4f00d \
+ --hash=sha256:7aaf5c4528e83e3176e7dbb01dcec34fb41c93279a8f8527cf33e5df88bfb910 \
+ --hash=sha256:7c69c95d5242171d07396761f759a8a4d566e9a01bf99612f9b9e309e70a80fc \
+ --hash=sha256:7ca3b453d767eb83b3ec58f0cfcdc934875a341cdfdb0dc55c1431c96608cf83 \
+ --hash=sha256:7f2cb11fe789b72feeae7cdf6e27375c33ed6915f8ca5ea7ce81b5e234c75a9e \
+ --hash=sha256:89af4ee347f361338bad5c27b023f9d19e7aed17aa75cb519f28e6cf1658a0ba \
+ --hash=sha256:8ad7de37c9ff817bc5d26f89100f87b7f1a5cc25e5eaaa54f11dc66cca9652e4 \
+ --hash=sha256:8ba1fe3fb415fd34cae5ca090fb82030b6e8423d6eb2c4c9c4fbf50b15c7664c \
+ --hash=sha256:9213a114dd19cfca19715088f12f143e918c5e1b4e26f7acf1a823d7da9e1413 \
+ --hash=sha256:9f08e5cc10143d3da2a2cf735d8b932ef4e4e1d74b0c74ce66c52eab02068be8 \
+ --hash=sha256:a275b3a579dfd923d6330f6e5c2886dbdb5da4e004c5abecb107eb347d301412 \
+ --hash=sha256:a2e6270923bf5ec0e9f720d689579a904f401c62193222d000d8cb8e880684e9 \
+ --hash=sha256:a98989ff1ed20825728495ffb859cd700a120850074184d2e1ec08a0b1ab8ab3 \
+ --hash=sha256:ae38c6d24d7aff003a241c8f1dd268eb1c6f7625d91e3435836ff5a5eed05ce5 \
+ --hash=sha256:af7a417e19068b1abeb9addd3c045a2d6e40d15365af6aa3cbe2d47305b5bb11 \
+ --hash=sha256:b09b6166876d2cba8f331a548932b09e11c9386db0525c9ca15c399b666746fc \
+ --hash=sha256:b9fc609a3d4009ee31212f435f5a75720ef24280f6d23edfd53f77b562a79c5b \
+ --hash=sha256:ba3d42cd83d7f89b9c1b2f76df971e8ab58815f8060da4dc67b9ae9dba1b34cc \
+ --hash=sha256:baf5b3b901272837bee2311ecbd28fdbe960d288a070dc72bdfdf48cfcbb8090 \
+ --hash=sha256:bb54fe45deb55e4caae4ea2c1dba93ee79fb5c377287b14056d4c30fb156920e \
+ --hash=sha256:be0801ae3f9017c6437bcd23a4bf2b2aa88e465f7efeed4b079944d07e3df994 \
+ --hash=sha256:c349431f5c8aa99b8744550d0bb4615f63e73450584202ac5db0e5d7da4d82ff \
+ --hash=sha256:c80ade5cdb0ea447e7f43b32abc2f4a628dcdfa64dc8ee5ab4262987e5e0814f \
+ --hash=sha256:c8d1837c3d2d8e56aacc13a91ec7540b3baadc1b254fbdf225a2d15b72b654c3 \
+ --hash=sha256:c97561c22fc05d0f6ba370d9bd67f86c313c38f31a1793e0ee9acb78ee28e4b8 \
+ --hash=sha256:cf1f6edc968619a4355481c29d5571726723bc12924e2b25bd3348919f9bc992 \
+ --hash=sha256:cf7af6458cf6343a2a0632ae2fc5f04821b2ffefc7b8a27f4eacb726ef89c682 \
+ --hash=sha256:d0ac29adf468a838884e1507d81e872096238c76fe7da7f3325507e4390b6867 \
+ --hash=sha256:d7ad871d044a96f794170f2434e832c6b42804d0b53721377d03f865245cd273 \
+ --hash=sha256:ddb790f2fdc22984fba643866b21d04733c5cf7c3ace2a1e99e0c1c1d2336aab \
+ --hash=sha256:e3b686b49adeb7fc45791dfae96ffcffeba1038e8b7603f369d6661f59e479fc \
+ --hash=sha256:e7b8cb38a93de87b980f882f0dcd19f2e3ad43216f34e06916315cb3a03e6964 \
+ --hash=sha256:f4f8ff776ca38ce272d9c164a7f77db8a54a8cad6d9468124317adf8732be07d
+ # via poetry
+filelock==3.10.0 \
+ --hash=sha256:3199fd0d3faea8b911be52b663dfccceb84c95949dd13179aa21436d1a79c4ce \
+ --hash=sha256:e90b34656470756edf8b19656785c5fea73afa1953f3e1b0d645cef11cab3182
+ # via
+ # poetry
+ # virtualenv
+html5lib==1.1 \
+ --hash=sha256:0d78f8fde1c230e99fe37986a60526d7049ed4bf8a9fadbad5f00e22e58e041d \
+ --hash=sha256:b2e5b40261e20f354d198eae92afc10d750afb487ed5e50f9c4eaf07c184146f
+ # via poetry
+idna==3.3 \
+ --hash=sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff \
+ --hash=sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d
+ # via requests
+importlib-metadata==6.1.0 \
+ --hash=sha256:43ce9281e097583d758c2c708c4376371261a02c34682491a8e98352365aad20 \
+ --hash=sha256:ff80f3b5394912eb1b108fcfd444dc78b7f1f3e16b16188054bd01cb9cb86f09
+ # via
+ # keyring
+ # poetry
+installer==0.6.0 \
+ --hash=sha256:ae7c62d1d6158b5c096419102ad0d01fdccebf857e784cee57f94165635fe038 \
+ --hash=sha256:f3bd36cd261b440a88a1190b1becca0578fee90b4b62decc796932fdd5ae8839
+ # via poetry
+jaraco-classes==3.2.3 \
+ --hash=sha256:2353de3288bc6b82120752201c6b1c1a14b058267fa424ed5ce5984e3b922158 \
+ --hash=sha256:89559fa5c1d3c34eff6f631ad80bb21f378dbcbb35dd161fd2c6b93f5be2f98a
+ # via keyring
+jsonschema==4.17.3 \
+ --hash=sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d \
+ --hash=sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6
+ # via poetry
+keyring==23.13.1 \
+ --hash=sha256:771ed2a91909389ed6148631de678f82ddc73737d85a927f382a8a1b157898cd \
+ --hash=sha256:ba2e15a9b35e21908d0aaf4e0a47acc52d6ae33444df0da2b49d41a46ef6d678
+ # via poetry
+lockfile==0.12.2 \
+ --hash=sha256:6aed02de03cba24efabcd600b30540140634fc06cfa603822d508d5361e9f799 \
+ --hash=sha256:6c3cb24f344923d30b2785d5ad75182c8ea7ac1b6171b08657258ec7429d50fa
+ # via
+ # cachecontrol
+ # poetry
+more-itertools==9.1.0 \
+ --hash=sha256:cabaa341ad0389ea83c17a94566a53ae4c9d07349861ecb14dc6d0345cf9ac5d \
+ --hash=sha256:d2bc7f02446e86a68911e58ded76d6561eea00cddfb2a91e7019bbb586c799f3
+ # via jaraco-classes
+msgpack==1.0.3 \
+ --hash=sha256:0d8c332f53ffff01953ad25131272506500b14750c1d0ce8614b17d098252fbc \
+ --hash=sha256:1c58cdec1cb5fcea8c2f1771d7b5fec79307d056874f746690bd2bdd609ab147 \
+ --hash=sha256:2c3ca57c96c8e69c1a0d2926a6acf2d9a522b41dc4253a8945c4c6cd4981a4e3 \
+ --hash=sha256:2f30dd0dc4dfe6231ad253b6f9f7128ac3202ae49edd3f10d311adc358772dba \
+ --hash=sha256:2f97c0f35b3b096a330bb4a1a9247d0bd7e1f3a2eba7ab69795501504b1c2c39 \
+ --hash=sha256:36a64a10b16c2ab31dcd5f32d9787ed41fe68ab23dd66957ca2826c7f10d0b85 \
+ --hash=sha256:3d875631ecab42f65f9dce6f55ce6d736696ced240f2634633188de2f5f21af9 \
+ --hash=sha256:40fb89b4625d12d6027a19f4df18a4de5c64f6f3314325049f219683e07e678a \
+ --hash=sha256:47d733a15ade190540c703de209ffbc42a3367600421b62ac0c09fde594da6ec \
+ --hash=sha256:494471d65b25a8751d19c83f1a482fd411d7ca7a3b9e17d25980a74075ba0e88 \
+ --hash=sha256:51fdc7fb93615286428ee7758cecc2f374d5ff363bdd884c7ea622a7a327a81e \
+ --hash=sha256:6eef0cf8db3857b2b556213d97dd82de76e28a6524853a9beb3264983391dc1a \
+ --hash=sha256:6f4c22717c74d44bcd7af353024ce71c6b55346dad5e2cc1ddc17ce8c4507c6b \
+ --hash=sha256:73a80bd6eb6bcb338c1ec0da273f87420829c266379c8c82fa14c23fb586cfa1 \
+ --hash=sha256:89908aea5f46ee1474cc37fbc146677f8529ac99201bc2faf4ef8edc023c2bf3 \
+ --hash=sha256:8a3a5c4b16e9d0edb823fe54b59b5660cc8d4782d7bf2c214cb4b91a1940a8ef \
+ --hash=sha256:96acc674bb9c9be63fa8b6dabc3248fdc575c4adc005c440ad02f87ca7edd079 \
+ --hash=sha256:973ad69fd7e31159eae8f580f3f707b718b61141838321c6fa4d891c4a2cca52 \
+ --hash=sha256:9b6f2d714c506e79cbead331de9aae6837c8dd36190d02da74cb409b36162e8a \
+ --hash=sha256:9c0903bd93cbd34653dd63bbfcb99d7539c372795201f39d16fdfde4418de43a \
+ --hash=sha256:9fce00156e79af37bb6db4e7587b30d11e7ac6a02cb5bac387f023808cd7d7f4 \
+ --hash=sha256:a598d0685e4ae07a0672b59792d2cc767d09d7a7f39fd9bd37ff84e060b1a996 \
+ --hash=sha256:b0a792c091bac433dfe0a70ac17fc2087d4595ab835b47b89defc8bbabcf5c73 \
+ --hash=sha256:bb87f23ae7d14b7b3c21009c4b1705ec107cb21ee71975992f6aca571fb4a42a \
+ --hash=sha256:bf1e6bfed4860d72106f4e0a1ab519546982b45689937b40257cfd820650b920 \
+ --hash=sha256:c1ba333b4024c17c7591f0f372e2daa3c31db495a9b2af3cf664aef3c14354f7 \
+ --hash=sha256:c2140cf7a3ec475ef0938edb6eb363fa704159e0bf71dde15d953bacc1cf9d7d \
+ --hash=sha256:c7e03b06f2982aa98d4ddd082a210c3db200471da523f9ac197f2828e80e7770 \
+ --hash=sha256:d02cea2252abc3756b2ac31f781f7a98e89ff9759b2e7450a1c7a0d13302ff50 \
+ --hash=sha256:da24375ab4c50e5b7486c115a3198d207954fe10aaa5708f7b65105df09109b2 \
+ --hash=sha256:e4c309a68cb5d6bbd0c50d5c71a25ae81f268c2dc675c6f4ea8ab2feec2ac4e2 \
+ --hash=sha256:f01b26c2290cbd74316990ba84a14ac3d599af9cebefc543d241a66e785cf17d \
+ --hash=sha256:f201d34dc89342fabb2a10ed7c9a9aaaed9b7af0f16a5923f1ae562b31258dea \
+ --hash=sha256:f74da1e5fcf20ade12c6bf1baa17a2dc3604958922de8dc83cbe3eff22e8b611
+ # via cachecontrol
+packaging==20.9 \
+ --hash=sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5 \
+ --hash=sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a
+ # via
+ # build
+ # poetry
+pexpect==4.8.0 \
+ --hash=sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937 \
+ --hash=sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c
+ # via poetry
+pkginfo==1.9.6 \
+ --hash=sha256:4b7a555a6d5a22169fcc9cf7bfd78d296b0361adad412a346c1226849af5e546 \
+ --hash=sha256:8fd5896e8718a4372f0ea9cc9d96f6417c9b986e23a4d116dda26b62cc29d046
+ # via poetry
+platformdirs==2.6.2 \
+ --hash=sha256:83c8f6d04389165de7c9b6f0c682439697887bca0aa2f1c87ef1826be3584490 \
+ --hash=sha256:e1fea1fe471b9ff8332e229df3cb7de4f53eeea4998d3b6bfff542115e998bd2
+ # via poetry
+poetry==1.4.0 \
+ --hash=sha256:151ad741e163a329c8b13ea602dde979b7616fc350cfcff74b604e93263934a8 \
+ --hash=sha256:f88a7a812a5d8c1f5a378e0924f898926b2ac10c3b5c03f7282f2182f90d8507
+ # via
+ # -r python/mozbuild/mozbuild/test/vendor_requirements.in
+ # poetry-plugin-export
+poetry-core==1.5.1 \
+ --hash=sha256:41887261358863f25831fa0ad1fe7e451fc32d1c81fcf7710ba5174cc0047c6d \
+ --hash=sha256:b1900dea81eb18feb7323d404e5f10430205541a4a683a912893f9d2b5807797
+ # via
+ # -r python/mozbuild/mozbuild/test/vendor_requirements.in
+ # poetry
+ # poetry-plugin-export
+poetry-plugin-export==1.3.0 \
+ --hash=sha256:61ae5ec1db233aba947a48e1ce54c6ff66afd0e1c87195d6bce64c73a5ae658c \
+ --hash=sha256:6e5919bf84afcb08cdd419a03f909f490d8671f00633a3c6df8ba09b0820dc2f
+ # via poetry
+ptyprocess==0.7.0 \
+ --hash=sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35 \
+ --hash=sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220
+ # via pexpect
+pyparsing==3.0.8 \
+ --hash=sha256:7bf433498c016c4314268d95df76c81b842a4cb2b276fa3312cfb1e1d85f6954 \
+ --hash=sha256:ef7b523f6356f763771559412c0d7134753f037822dad1b16945b7b846f7ad06
+ # via packaging
+pyproject-hooks==1.0.0 \
+ --hash=sha256:283c11acd6b928d2f6a7c73fa0d01cb2bdc5f07c57a2eeb6e83d5e56b97976f8 \
+ --hash=sha256:f271b298b97f5955d53fb12b72c1fb1948c22c1a6b70b315c54cedaca0264ef5
+ # via
+ # build
+ # poetry
+pyrsistent==0.19.3 \
+ --hash=sha256:016ad1afadf318eb7911baa24b049909f7f3bb2c5b1ed7b6a8f21db21ea3faa8 \
+ --hash=sha256:1a2994773706bbb4995c31a97bc94f1418314923bd1048c6d964837040376440 \
+ --hash=sha256:20460ac0ea439a3e79caa1dbd560344b64ed75e85d8703943e0b66c2a6150e4a \
+ --hash=sha256:3311cb4237a341aa52ab8448c27e3a9931e2ee09561ad150ba94e4cfd3fc888c \
+ --hash=sha256:3a8cb235fa6d3fd7aae6a4f1429bbb1fec1577d978098da1252f0489937786f3 \
+ --hash=sha256:3ab2204234c0ecd8b9368dbd6a53e83c3d4f3cab10ecaf6d0e772f456c442393 \
+ --hash=sha256:42ac0b2f44607eb92ae88609eda931a4f0dfa03038c44c772e07f43e738bcac9 \
+ --hash=sha256:49c32f216c17148695ca0e02a5c521e28a4ee6c5089f97e34fe24163113722da \
+ --hash=sha256:4b774f9288dda8d425adb6544e5903f1fb6c273ab3128a355c6b972b7df39dcf \
+ --hash=sha256:4c18264cb84b5e68e7085a43723f9e4c1fd1d935ab240ce02c0324a8e01ccb64 \
+ --hash=sha256:5a474fb80f5e0d6c9394d8db0fc19e90fa540b82ee52dba7d246a7791712f74a \
+ --hash=sha256:64220c429e42a7150f4bfd280f6f4bb2850f95956bde93c6fda1b70507af6ef3 \
+ --hash=sha256:878433581fc23e906d947a6814336eee031a00e6defba224234169ae3d3d6a98 \
+ --hash=sha256:99abb85579e2165bd8522f0c0138864da97847875ecbd45f3e7e2af569bfc6f2 \
+ --hash=sha256:a2471f3f8693101975b1ff85ffd19bb7ca7dd7c38f8a81701f67d6b4f97b87d8 \
+ --hash=sha256:aeda827381f5e5d65cced3024126529ddc4289d944f75e090572c77ceb19adbf \
+ --hash=sha256:b735e538f74ec31378f5a1e3886a26d2ca6351106b4dfde376a26fc32a044edc \
+ --hash=sha256:c147257a92374fde8498491f53ffa8f4822cd70c0d85037e09028e478cababb7 \
+ --hash=sha256:c4db1bd596fefd66b296a3d5d943c94f4fac5bcd13e99bffe2ba6a759d959a28 \
+ --hash=sha256:c74bed51f9b41c48366a286395c67f4e894374306b197e62810e0fdaf2364da2 \
+ --hash=sha256:c9bb60a40a0ab9aba40a59f68214eed5a29c6274c83b2cc206a359c4a89fa41b \
+ --hash=sha256:cc5d149f31706762c1f8bda2e8c4f8fead6e80312e3692619a75301d3dbb819a \
+ --hash=sha256:ccf0d6bd208f8111179f0c26fdf84ed7c3891982f2edaeae7422575f47e66b64 \
+ --hash=sha256:e42296a09e83028b3476f7073fcb69ffebac0e66dbbfd1bd847d61f74db30f19 \
+ --hash=sha256:e8f2b814a3dc6225964fa03d8582c6e0b6650d68a232df41e3cc1b66a5d2f8d1 \
+ --hash=sha256:f0774bf48631f3a20471dd7c5989657b639fd2d285b861237ea9e82c36a415a9 \
+ --hash=sha256:f0e7c4b2f77593871e918be000b96c8107da48444d57005b6a6bc61fb4331b2c
+ # via jsonschema
+pywin32-ctypes==0.2.0 \
+ --hash=sha256:24ffc3b341d457d48e8922352130cf2644024a4ff09762a2261fd34c36ee5942 \
+ --hash=sha256:9dc2d991b3479cc2df15930958b674a48a227d5361d413827a4cfd0b5876fc98
+ # via keyring
+rapidfuzz==2.13.7 \
+ --hash=sha256:020858dd89b60ce38811cd6e37875c4c3c8d7fcd8bc20a0ad2ed1f464b34dc4e \
+ --hash=sha256:042644133244bfa7b20de635d500eb9f46af7097f3d90b1724f94866f17cb55e \
+ --hash=sha256:08590905a95ccfa43f4df353dcc5d28c15d70664299c64abcad8721d89adce4f \
+ --hash=sha256:114810491efb25464016fd554fdf1e20d390309cecef62587494fc474d4b926f \
+ --hash=sha256:1333fb3d603d6b1040e365dca4892ba72c7e896df77a54eae27dc07db90906e3 \
+ --hash=sha256:16080c05a63d6042643ae9b6cfec1aefd3e61cef53d0abe0df3069b9d4b72077 \
+ --hash=sha256:16ffad751f43ab61001187b3fb4a9447ec2d1aedeff7c5bac86d3b95f9980cc3 \
+ --hash=sha256:1f50d1227e6e2a0e3ae1fb1c9a2e1c59577d3051af72c7cab2bcc430cb5e18da \
+ --hash=sha256:1fbad8fb28d98980f5bff33c7842efef0315d42f0cd59082108482a7e6b61410 \
+ --hash=sha256:23524635840500ce6f4d25005c9529a97621689c85d2f727c52eed1782839a6a \
+ --hash=sha256:24d3fea10680d085fd0a4d76e581bfb2b1074e66e78fd5964d4559e1fcd2a2d4 \
+ --hash=sha256:24eb6b843492bdc63c79ee4b2f104059b7a2201fef17f25177f585d3be03405a \
+ --hash=sha256:25b4cedf2aa19fb7212894ce5f5219010cce611b60350e9a0a4d492122e7b351 \
+ --hash=sha256:27be9c63215d302ede7d654142a2e21f0d34ea6acba512a4ae4cfd52bbaa5b59 \
+ --hash=sha256:2c836f0f2d33d4614c3fbaf9a1eb5407c0fe23f8876f47fd15b90f78daa64c34 \
+ --hash=sha256:3a9bd02e1679c0fd2ecf69b72d0652dbe2a9844eaf04a36ddf4adfbd70010e95 \
+ --hash=sha256:3d8b081988d0a49c486e4e845a547565fee7c6e7ad8be57ff29c3d7c14c6894c \
+ --hash=sha256:3dcffe1f3cbda0dc32133a2ae2255526561ca594f15f9644384549037b355245 \
+ --hash=sha256:3f11a7eff7bc6301cd6a5d43f309e22a815af07e1f08eeb2182892fca04c86cb \
+ --hash=sha256:42085d4b154a8232767de8296ac39c8af5bccee6b823b0507de35f51c9cbc2d7 \
+ --hash=sha256:424f82c35dbe4f83bdc3b490d7d696a1dc6423b3d911460f5493b7ffae999fd2 \
+ --hash=sha256:43fb8cb030f888c3f076d40d428ed5eb4331f5dd6cf1796cfa39c67bf0f0fc1e \
+ --hash=sha256:460853983ab88f873173e27cc601c5276d469388e6ad6e08c4fd57b2a86f1064 \
+ --hash=sha256:467c1505362823a5af12b10234cb1c4771ccf124c00e3fc9a43696512bd52293 \
+ --hash=sha256:46b9b8aa09998bc48dd800854e8d9b74bc534d7922c1d6e1bbf783e7fa6ac29c \
+ --hash=sha256:53dcae85956853b787c27c1cb06f18bb450e22cf57a4ad3444cf03b8ff31724a \
+ --hash=sha256:585206112c294e335d84de5d5f179c0f932837752d7420e3de21db7fdc476278 \
+ --hash=sha256:5ada0a14c67452358c1ee52ad14b80517a87b944897aaec3e875279371a9cb96 \
+ --hash=sha256:5e2b3d020219baa75f82a4e24b7c8adcb598c62f0e54e763c39361a9e5bad510 \
+ --hash=sha256:6120f2995f5154057454c5de99d86b4ef3b38397899b5da1265467e8980b2f60 \
+ --hash=sha256:68a89bb06d5a331511961f4d3fa7606f8e21237467ba9997cae6f67a1c2c2b9e \
+ --hash=sha256:7496e8779905b02abc0ab4ba2a848e802ab99a6e20756ffc967a0de4900bd3da \
+ --hash=sha256:759a3361711586a29bc753d3d1bdb862983bd9b9f37fbd7f6216c24f7c972554 \
+ --hash=sha256:75c45dcd595f8178412367e302fd022860ea025dc4a78b197b35428081ed33d5 \
+ --hash=sha256:7d005e058d86f2a968a8d28ca6f2052fab1f124a39035aa0523261d6baf21e1f \
+ --hash=sha256:7f7930adf84301797c3f09c94b9c5a9ed90a9e8b8ed19b41d2384937e0f9f5bd \
+ --hash=sha256:8109e0324d21993d5b2d111742bf5958f3516bf8c59f297c5d1cc25a2342eb66 \
+ --hash=sha256:81642a24798851b118f82884205fc1bd9ff70b655c04018c467824b6ecc1fabc \
+ --hash=sha256:8450d15f7765482e86ef9be2ad1a05683cd826f59ad236ef7b9fb606464a56aa \
+ --hash=sha256:875d51b3497439a72e2d76183e1cb5468f3f979ab2ddfc1d1f7dde3b1ecfb42f \
+ --hash=sha256:8b477b43ced896301665183a5e0faec0f5aea2373005648da8bdcb3c4b73f280 \
+ --hash=sha256:8d3e252d4127c79b4d7c2ae47271636cbaca905c8bb46d80c7930ab906cf4b5c \
+ --hash=sha256:916bc2e6cf492c77ad6deb7bcd088f0ce9c607aaeabc543edeb703e1fbc43e31 \
+ --hash=sha256:988f8f6abfba7ee79449f8b50687c174733b079521c3cc121d65ad2d38831846 \
+ --hash=sha256:99a84ab9ac9a823e7e93b4414f86344052a5f3e23b23aa365cda01393ad895bd \
+ --hash=sha256:9be02162af0376d64b840f2fc8ee3366794fc149f1e06d095a6a1d42447d97c5 \
+ --hash=sha256:a5585189b3d90d81ccd62d4f18530d5ac8972021f0aaaa1ffc6af387ff1dce75 \
+ --hash=sha256:ae33a72336059213996fe4baca4e0e4860913905c2efb7c991eab33b95a98a0a \
+ --hash=sha256:af4f7c3c904ca709493eb66ca9080b44190c38e9ecb3b48b96d38825d5672559 \
+ --hash=sha256:b20141fa6cee041917801de0bab503447196d372d4c7ee9a03721b0a8edf5337 \
+ --hash=sha256:b3210869161a864f3831635bb13d24f4708c0aa7208ef5baac1ac4d46e9b4208 \
+ --hash=sha256:b34e8c0e492949ecdd5da46a1cfc856a342e2f0389b379b1a45a3cdcd3176a6e \
+ --hash=sha256:b52ac2626945cd21a2487aeefed794c14ee31514c8ae69b7599170418211e6f6 \
+ --hash=sha256:b5dd713a1734574c2850c566ac4286594bacbc2d60b9170b795bee4b68656625 \
+ --hash=sha256:b5f705652360d520c2de52bee11100c92f59b3e3daca308ebb150cbc58aecdad \
+ --hash=sha256:b6389c50d8d214c9cd11a77f6d501529cb23279a9c9cafe519a3a4b503b5f72a \
+ --hash=sha256:b6bad92de071cbffa2acd4239c1779f66851b60ffbbda0e4f4e8a2e9b17e7eef \
+ --hash=sha256:b75dd0928ce8e216f88660ab3d5c5ffe990f4dd682fd1709dba29d5dafdde6de \
+ --hash=sha256:c2523f8180ebd9796c18d809e9a19075a1060b1a170fde3799e83db940c1b6d5 \
+ --hash=sha256:c31022d9970177f6affc6d5dd757ed22e44a10890212032fabab903fdee3bfe7 \
+ --hash=sha256:c36fd260084bb636b9400bb92016c6bd81fd80e59ed47f2466f85eda1fc9f782 \
+ --hash=sha256:c3741cb0bf9794783028e8b0cf23dab917fa5e37a6093b94c4c2f805f8e36b9f \
+ --hash=sha256:c3fbe449d869ea4d0909fc9d862007fb39a584fb0b73349a6aab336f0d90eaed \
+ --hash=sha256:c66546e30addb04a16cd864f10f5821272a1bfe6462ee5605613b4f1cb6f7b48 \
+ --hash=sha256:c71d9d512b76f05fa00282227c2ae884abb60e09f08b5ca3132b7e7431ac7f0d \
+ --hash=sha256:c8601a66fbfc0052bb7860d2eacd303fcde3c14e87fdde409eceff516d659e77 \
+ --hash=sha256:c88adbcb933f6b8612f6c593384bf824e562bb35fc8a0f55fac690ab5b3486e5 \
+ --hash=sha256:ca00fafd2756bc9649bf80f1cf72c647dce38635f0695d7ce804bc0f759aa756 \
+ --hash=sha256:ca8a23097c1f50e0fdb4de9e427537ca122a18df2eead06ed39c3a0bef6d9d3a \
+ --hash=sha256:cda1e2f66bb4ba7261a0f4c2d052d5d909798fca557cbff68f8a79a87d66a18f \
+ --hash=sha256:cdfc04f7647c29fb48da7a04082c34cdb16f878d3c6d098d62d5715c0ad3000c \
+ --hash=sha256:cf62dacb3f9234f3fddd74e178e6d25c68f2067fde765f1d95f87b1381248f58 \
+ --hash=sha256:d00df2e4a81ffa56a6b1ec4d2bc29afdcb7f565e0b8cd3092fece2290c4c7a79 \
+ --hash=sha256:d248a109699ce9992304e79c1f8735c82cc4c1386cd8e27027329c0549f248a2 \
+ --hash=sha256:d63def9bbc6b35aef4d76dc740301a4185867e8870cbb8719ec9de672212fca8 \
+ --hash=sha256:d82f20c0060ffdaadaf642b88ab0aa52365b56dffae812e188e5bdb998043588 \
+ --hash=sha256:dbcf5371ea704759fcce772c66a07647751d1f5dbdec7818331c9b31ae996c77 \
+ --hash=sha256:e8914dad106dacb0775718e54bf15e528055c4e92fb2677842996f2d52da5069 \
+ --hash=sha256:ebe303cd9839af69dd1f7942acaa80b1ba90bacef2e7ded9347fbed4f1654672 \
+ --hash=sha256:ec55a81ac2b0f41b8d6fb29aad16e55417036c7563bad5568686931aa4ff08f7 \
+ --hash=sha256:effe182767d102cb65dfbbf74192237dbd22d4191928d59415aa7d7c861d8c88 \
+ --hash=sha256:f42b82f268689f429def9ecfb86fa65ceea0eaf3fed408b570fe113311bf5ce7 \
+ --hash=sha256:f6fe570e20e293eb50491ae14ddeef71a6a7e5f59d7e791393ffa99b13f1f8c2 \
+ --hash=sha256:f799d1d6c33d81e983d3682571cc7d993ae7ff772c19b3aabb767039c33f6d1e \
+ --hash=sha256:f891b98f8bc6c9d521785816085e9657212621e93f223917fb8e32f318b2957e \
+ --hash=sha256:fa263135b892686e11d5b84f6a1892523123a00b7e5882eff4fbdabb38667347 \
+ --hash=sha256:fa4c598ed77f74ec973247ca776341200b0f93ec3883e34c222907ce72cb92a4 \
+ --hash=sha256:fe56659ccadbee97908132135de4b875543353351e0c92e736b7c57aee298b5a \
+ --hash=sha256:fe59a0c21a032024edb0c8e43f5dee5623fef0b65a1e3c1281836d9ce199af3b
+ # via cleo
+requests==2.27.1 \
+ --hash=sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61 \
+ --hash=sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d
+ # via
+ # cachecontrol
+ # poetry
+ # requests-toolbelt
+requests-toolbelt==0.9.1 \
+ --hash=sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f \
+ --hash=sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0
+ # via poetry
+shellingham==1.5.0.post1 \
+ --hash=sha256:368bf8c00754fd4f55afb7bbb86e272df77e4dc76ac29dbcbb81a59e9fc15744 \
+ --hash=sha256:823bc5fb5c34d60f285b624e7264f4dda254bc803a3774a147bf99c0e3004a28
+ # via poetry
+six==1.16.0 \
+ --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \
+ --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254
+ # via
+ # html5lib
+ # virtualenv
+tomli==2.0.1 \
+ --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \
+ --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f
+ # via
+ # build
+ # poetry
+ # pyproject-hooks
+tomlkit==0.11.6 \
+ --hash=sha256:07de26b0d8cfc18f871aec595fda24d95b08fef89d147caa861939f37230bf4b \
+ --hash=sha256:71b952e5721688937fb02cf9d354dbcf0785066149d2855e44531ebdd2b65d73
+ # via poetry
+trove-classifiers==2023.3.9 \
+ --hash=sha256:06fd10c95d285e7ddebd59e6a4ba299f03d7417d38d369248a4a40c9754a68fa \
+ --hash=sha256:ee42f2f8c1d4bcfe35f746e472f07633570d485fab45407effc0379270a3bb03
+ # via poetry
+urllib3==1.26.9 \
+ --hash=sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14 \
+ --hash=sha256:aabaf16477806a5e1dd19aa41f8c2b7950dd3c746362d7e3223dbe6de6ac448e
+ # via
+ # dulwich
+ # poetry
+ # requests
+virtualenv==20.4.4 \
+ --hash=sha256:09c61377ef072f43568207dc8e46ddeac6bcdcaf288d49011bda0e7f4d38c4a2 \
+ --hash=sha256:a935126db63128861987a7d5d30e23e8ec045a73840eeccb467c148514e29535
+ # via poetry
+webencodings==0.5.1 \
+ --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \
+ --hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923
+ # via html5lib
+zipp==3.6.0 \
+ --hash=sha256:71c644c5369f4a6e07636f0aa966270449561fcea2e3d6747b8d23efaa9d7832 \
+ --hash=sha256:9fe5ea21568a0a70e50f273397638d39b03353731e6cbbb3fd8502a33fec40bc
+ # via importlib-metadata
diff --git a/python/mozbuild/mozbuild/testing.py b/python/mozbuild/mozbuild/testing.py
new file mode 100644
index 0000000000..f951434f97
--- /dev/null
+++ b/python/mozbuild/mozbuild/testing.py
@@ -0,0 +1,266 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import sys
+
+import manifestparser
+import mozpack.path as mozpath
+from mozpack.copier import FileCopier
+from mozpack.manifests import InstallManifest
+
+# These definitions provide a single source of truth for modules attempting
+# to get a view of all tests for a build. Used by the emitter to figure out
+# how to read/install manifests and by test dependency annotations in Files()
+# entries to enumerate test flavors.
+
+# While there are multiple test manifests, the behavior is very similar
+# across them. We enforce this by having common handling of all
+# manifests and outputting a single class type with the differences
+# described inside the instance.
+#
+# Keys are variable prefixes and values are tuples describing how these
+# manifests should be handled:
+#
+# (flavor, install_root, install_subdir, package_tests)
+#
+# flavor identifies the flavor of this test.
+# install_root is the path prefix to install the files starting from the root
+# directory and not as specified by the manifest location. (bug 972168)
+# install_subdir is the path of where to install the files in
+# the tests directory.
+# package_tests indicates whether to package test files into the test
+# package; suites that compile the test files should not install
+# them into the test package.
+#
+TEST_MANIFESTS = dict(
+ A11Y=("a11y", "testing/mochitest", "a11y", True),
+ BROWSER_CHROME=("browser-chrome", "testing/mochitest", "browser", True),
+ ANDROID_INSTRUMENTATION=("instrumentation", "instrumentation", ".", False),
+ FIREFOX_UI_FUNCTIONAL=("firefox-ui-functional", "firefox-ui", ".", False),
+ FIREFOX_UI_UPDATE=("firefox-ui-update", "firefox-ui", ".", False),
+ PYTHON_UNITTEST=("python", "python", ".", False),
+ CRAMTEST=("cram", "cram", ".", False),
+ TELEMETRY_TESTS_CLIENT=(
+ "telemetry-tests-client",
+ "toolkit/components/telemetry/tests/marionette/",
+ ".",
+ False,
+ ),
+ # marionette tests are run from the srcdir
+ # TODO(ato): make packaging work as for other test suites
+ MARIONETTE=("marionette", "marionette", ".", False),
+ MARIONETTE_UNIT=("marionette", "marionette", ".", False),
+ MARIONETTE_WEBAPI=("marionette", "marionette", ".", False),
+ MOCHITEST=("mochitest", "testing/mochitest", "tests", True),
+ MOCHITEST_CHROME=("chrome", "testing/mochitest", "chrome", True),
+ WEBRTC_SIGNALLING_TEST=("steeplechase", "steeplechase", ".", True),
+ XPCSHELL_TESTS=("xpcshell", "xpcshell", ".", True),
+ PERFTESTS=("perftest", "testing/perf", "perf", True),
+)
+
+# reftests, wpt, and puppeteer all have their own manifest formats
+# and are processed separately
+REFTEST_FLAVORS = ("crashtest", "reftest")
+PUPPETEER_FLAVORS = ("puppeteer",)
+WEB_PLATFORM_TESTS_FLAVORS = ("web-platform-tests",)
+
+
+def all_test_flavors():
+ return (
+ [v[0] for v in TEST_MANIFESTS.values()]
+ + list(REFTEST_FLAVORS)
+ + list(PUPPETEER_FLAVORS)
+ + list(WEB_PLATFORM_TESTS_FLAVORS)
+ )
+
+
+class TestInstallInfo(object):
+ def __init__(self):
+ self.seen = set()
+ self.pattern_installs = []
+ self.installs = []
+ self.external_installs = set()
+ self.deferred_installs = set()
+
+ def __ior__(self, other):
+ self.pattern_installs.extend(other.pattern_installs)
+ self.installs.extend(other.installs)
+ self.external_installs |= other.external_installs
+ self.deferred_installs |= other.deferred_installs
+ return self
+
+
+class SupportFilesConverter(object):
+ """Processes a "support-files" entry from a test object, either from
+ a parsed object from a test manifests or its representation in
+ moz.build and returns the installs to perform for this test object.
+
+ Processing the same support files multiple times will not have any further
+ effect, and the structure of the parsed objects from manifests will have a
+ lot of repeated entries, so this class takes care of memoizing.
+ """
+
+ def __init__(self):
+ self._fields = (
+ ("head", set()),
+ ("support-files", set()),
+ ("generated-files", set()),
+ )
+
+ def convert_support_files(self, test, install_root, manifest_dir, out_dir):
+ # Arguments:
+ # test - The test object to process.
+ # install_root - The directory under $objdir/_tests that will contain
+ # the tests for this harness (examples are "testing/mochitest",
+ # "xpcshell").
+ # manifest_dir - Absoulute path to the (srcdir) directory containing the
+ # manifest that included this test
+ # out_dir - The path relative to $objdir/_tests used as the destination for the
+ # test, based on the relative path to the manifest in the srcdir and
+ # the install_root.
+ info = TestInstallInfo()
+ for field, seen in self._fields:
+ value = test.get(field, "")
+ for pattern in value.split():
+
+ # We track uniqueness locally (per test) where duplicates are forbidden,
+ # and globally, where they are permitted. If a support file appears multiple
+ # times for a single test, there are unnecessary entries in the manifest. But
+ # many entries will be shared across tests that share defaults.
+ key = field, pattern, out_dir
+ if key in info.seen:
+ raise ValueError(
+ "%s appears multiple times in a test manifest under a %s field,"
+ " please omit the duplicate entry." % (pattern, field)
+ )
+ info.seen.add(key)
+ if key in seen:
+ continue
+ seen.add(key)
+
+ if field == "generated-files":
+ info.external_installs.add(
+ mozpath.normpath(mozpath.join(out_dir, pattern))
+ )
+ # '!' indicates our syntax for inter-directory support file
+ # dependencies. These receive special handling in the backend.
+ elif pattern[0] == "!":
+ info.deferred_installs.add(pattern)
+ # We only support globbing on support-files because
+ # the harness doesn't support * for head.
+ elif "*" in pattern and field == "support-files":
+ info.pattern_installs.append((manifest_dir, pattern, out_dir))
+ # "absolute" paths identify files that are to be
+ # placed in the install_root directory (no globs)
+ elif pattern[0] == "/":
+ full = mozpath.normpath(
+ mozpath.join(manifest_dir, mozpath.basename(pattern))
+ )
+ info.installs.append(
+ (full, mozpath.join(install_root, pattern[1:]))
+ )
+ else:
+ full = mozpath.normpath(mozpath.join(manifest_dir, pattern))
+ dest_path = mozpath.join(out_dir, pattern)
+
+ # If the path resolves to a different directory
+ # tree, we take special behavior depending on the
+ # entry type.
+ if not full.startswith(manifest_dir):
+ # If it's a support file, we install the file
+ # into the current destination directory.
+ # This implementation makes installing things
+ # with custom prefixes impossible. If this is
+ # needed, we can add support for that via a
+ # special syntax later.
+ if field == "support-files":
+ dest_path = mozpath.join(out_dir, os.path.basename(pattern))
+ # If it's not a support file, we ignore it.
+ # This preserves old behavior so things like
+ # head files doesn't get installed multiple
+ # times.
+ else:
+ continue
+ info.installs.append((full, mozpath.normpath(dest_path)))
+ return info
+
+
+def install_test_files(topsrcdir, topobjdir, tests_root):
+ """Installs the requested test files to the objdir. This is invoked by
+ test runners to avoid installing tens of thousands of test files when
+ only a few tests need to be run.
+ """
+
+ manifest = InstallManifest(
+ mozpath.join(topobjdir, "_build_manifests", "install", "_test_files")
+ )
+
+ harness_files_manifest = mozpath.join(
+ topobjdir, "_build_manifests", "install", tests_root
+ )
+
+ if os.path.isfile(harness_files_manifest):
+ # If the backend has generated an install manifest for test harness
+ # files they are treated as a monolith and installed each time we
+ # run tests. Fortunately there are not very many.
+ manifest |= InstallManifest(harness_files_manifest)
+
+ copier = FileCopier()
+ manifest.populate_registry(copier)
+ copier.copy(mozpath.join(topobjdir, tests_root), remove_unaccounted=False)
+
+
+# Convenience methods for test manifest reading.
+def read_manifestparser_manifest(context, manifest_path):
+ path = manifest_path.full_path
+ return manifestparser.TestManifest(
+ manifests=[path],
+ strict=True,
+ rootdir=context.config.topsrcdir,
+ finder=context._finder,
+ handle_defaults=False,
+ )
+
+
+def read_reftest_manifest(context, manifest_path):
+ import reftest
+
+ path = manifest_path.full_path
+ manifest = reftest.ReftestManifest(finder=context._finder)
+ manifest.load(path)
+ return manifest
+
+
+def read_wpt_manifest(context, paths):
+ manifest_path, tests_root = paths
+ full_path = mozpath.normpath(mozpath.join(context.srcdir, manifest_path))
+ old_path = sys.path[:]
+ try:
+ # Setup sys.path to include all the dependencies required to import
+ # the web-platform-tests manifest parser. web-platform-tests provides
+ # a the localpaths.py to do the path manipulation, which we load,
+ # providing the __file__ variable so it can resolve the relative
+ # paths correctly.
+ paths_file = os.path.join(
+ context.config.topsrcdir,
+ "testing",
+ "web-platform",
+ "tests",
+ "tools",
+ "localpaths.py",
+ )
+ _globals = {"__file__": paths_file}
+ execfile(paths_file, _globals)
+ import manifest as wptmanifest
+ finally:
+ sys.path = old_path
+ f = context._finder.get(full_path)
+ try:
+ rv = wptmanifest.manifest.load(tests_root, f)
+ except wptmanifest.manifest.ManifestVersionMismatch:
+ # If we accidentially end up with a committed manifest that's the wrong
+ # version, then return an empty manifest here just to not break the build
+ rv = wptmanifest.manifest.Manifest()
+ return rv
diff --git a/python/mozbuild/mozbuild/toolchains.py b/python/mozbuild/mozbuild/toolchains.py
new file mode 100644
index 0000000000..c5418089bb
--- /dev/null
+++ b/python/mozbuild/mozbuild/toolchains.py
@@ -0,0 +1,32 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+
+import six
+
+
+def toolchain_task_definitions():
+ import gecko_taskgraph # noqa: triggers override of the `graph_config_schema`
+ from taskgraph.generator import load_tasks_for_kind
+
+ # Don't import globally to allow this module being imported without
+ # the taskgraph module being available (e.g. standalone js)
+ params = {"level": os.environ.get("MOZ_SCM_LEVEL", "3")}
+ root_dir = os.path.join(
+ os.path.dirname(__file__), "..", "..", "..", "taskcluster", "ci"
+ )
+ toolchains = load_tasks_for_kind(params, "toolchain", root_dir=root_dir)
+ aliased = {}
+ for t in toolchains.values():
+ aliases = t.attributes.get("toolchain-alias")
+ if not aliases:
+ aliases = []
+ if isinstance(aliases, six.text_type):
+ aliases = [aliases]
+ for alias in aliases:
+ aliased["toolchain-{}".format(alias)] = t
+ toolchains.update(aliased)
+
+ return toolchains
diff --git a/python/mozbuild/mozbuild/util.py b/python/mozbuild/mozbuild/util.py
new file mode 100644
index 0000000000..c1f24445ea
--- /dev/null
+++ b/python/mozbuild/mozbuild/util.py
@@ -0,0 +1,1407 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This file contains miscellaneous utility functions that don't belong anywhere
+# in particular.
+
+import argparse
+import collections
+import collections.abc
+import copy
+import ctypes
+import difflib
+import errno
+import functools
+import hashlib
+import io
+import itertools
+import os
+import re
+import stat
+import sys
+import time
+from collections import OrderedDict
+from io import BytesIO, StringIO
+from pathlib import Path
+
+import six
+from packaging.version import Version
+
+MOZBUILD_METRICS_PATH = os.path.abspath(
+ os.path.join(__file__, "..", "..", "metrics.yaml")
+)
+
+if sys.platform == "win32":
+ _kernel32 = ctypes.windll.kernel32
+ _FILE_ATTRIBUTE_NOT_CONTENT_INDEXED = 0x2000
+ system_encoding = "mbcs"
+else:
+ system_encoding = "utf-8"
+
+
+def exec_(object, globals=None, locals=None):
+ """Wrapper around the exec statement to avoid bogus errors like:
+
+ SyntaxError: unqualified exec is not allowed in function ...
+ it is a nested function.
+
+ or
+
+ SyntaxError: unqualified exec is not allowed in function ...
+ it contains a nested function with free variable
+
+ which happen with older versions of python 2.7.
+ """
+ exec(object, globals, locals)
+
+
+def _open(path, mode):
+ if "b" in mode:
+ return io.open(path, mode)
+ return io.open(path, mode, encoding="utf-8", newline="\n")
+
+
+def hash_file(path, hasher=None):
+ """Hashes a file specified by the path given and returns the hex digest."""
+
+ # If the default hashing function changes, this may invalidate
+ # lots of cached data. Don't change it lightly.
+ h = hasher or hashlib.sha1()
+
+ with open(path, "rb") as fh:
+ while True:
+ data = fh.read(8192)
+
+ if not len(data):
+ break
+
+ h.update(data)
+
+ return h.hexdigest()
+
+
+class EmptyValue(six.text_type):
+ """A dummy type that behaves like an empty string and sequence.
+
+ This type exists in order to support
+ :py:class:`mozbuild.frontend.reader.EmptyConfig`. It should likely not be
+ used elsewhere.
+ """
+
+ def __init__(self):
+ super(EmptyValue, self).__init__()
+
+
+class ReadOnlyNamespace(object):
+ """A class for objects with immutable attributes set at initialization."""
+
+ def __init__(self, **kwargs):
+ for k, v in six.iteritems(kwargs):
+ super(ReadOnlyNamespace, self).__setattr__(k, v)
+
+ def __delattr__(self, key):
+ raise Exception("Object does not support deletion.")
+
+ def __setattr__(self, key, value):
+ raise Exception("Object does not support assignment.")
+
+ def __ne__(self, other):
+ return not (self == other)
+
+ def __eq__(self, other):
+ return self is other or (
+ hasattr(other, "__dict__") and self.__dict__ == other.__dict__
+ )
+
+ def __repr__(self):
+ return "<%s %r>" % (self.__class__.__name__, self.__dict__)
+
+
+class ReadOnlyDict(dict):
+ """A read-only dictionary."""
+
+ def __init__(self, *args, **kwargs):
+ dict.__init__(self, *args, **kwargs)
+
+ def __delitem__(self, key):
+ raise Exception("Object does not support deletion.")
+
+ def __setitem__(self, key, value):
+ raise Exception("Object does not support assignment.")
+
+ def update(self, *args, **kwargs):
+ raise Exception("Object does not support update.")
+
+ def __copy__(self, *args, **kwargs):
+ return ReadOnlyDict(**dict.copy(self, *args, **kwargs))
+
+ def __deepcopy__(self, memo):
+ result = {}
+ for k, v in self.items():
+ result[k] = copy.deepcopy(v, memo)
+
+ return ReadOnlyDict(**result)
+
+
+class undefined_default(object):
+ """Represents an undefined argument value that isn't None."""
+
+
+undefined = undefined_default()
+
+
+class ReadOnlyDefaultDict(ReadOnlyDict):
+ """A read-only dictionary that supports default values on retrieval."""
+
+ def __init__(self, default_factory, *args, **kwargs):
+ ReadOnlyDict.__init__(self, *args, **kwargs)
+ self._default_factory = default_factory
+
+ def __missing__(self, key):
+ value = self._default_factory()
+ dict.__setitem__(self, key, value)
+ return value
+
+
+def ensureParentDir(path):
+ """Ensures the directory parent to the given file exists."""
+ d = os.path.dirname(path)
+ if d and not os.path.exists(path):
+ try:
+ os.makedirs(d)
+ except OSError as error:
+ if error.errno != errno.EEXIST:
+ raise
+
+
+def mkdir(path, not_indexed=False):
+ """Ensure a directory exists.
+
+ If ``not_indexed`` is True, an attribute is set that disables content
+ indexing on the directory.
+ """
+ try:
+ os.makedirs(path)
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+
+ if not_indexed:
+ if sys.platform == "win32":
+ if isinstance(path, six.string_types):
+ fn = _kernel32.SetFileAttributesW
+ else:
+ fn = _kernel32.SetFileAttributesA
+
+ fn(path, _FILE_ATTRIBUTE_NOT_CONTENT_INDEXED)
+ elif sys.platform == "darwin":
+ with open(os.path.join(path, ".metadata_never_index"), "a"):
+ pass
+
+
+def simple_diff(filename, old_lines, new_lines):
+ """Returns the diff between old_lines and new_lines, in unified diff form,
+ as a list of lines.
+
+ old_lines and new_lines are lists of non-newline terminated lines to
+ compare.
+ old_lines can be None, indicating a file creation.
+ new_lines can be None, indicating a file deletion.
+ """
+
+ old_name = "/dev/null" if old_lines is None else filename
+ new_name = "/dev/null" if new_lines is None else filename
+
+ return difflib.unified_diff(
+ old_lines or [], new_lines or [], old_name, new_name, n=4, lineterm=""
+ )
+
+
+class FileAvoidWrite(BytesIO):
+ """File-like object that buffers output and only writes if content changed.
+
+ We create an instance from an existing filename. New content is written to
+ it. When we close the file object, if the content in the in-memory buffer
+ differs from what is on disk, then we write out the new content. Otherwise,
+ the original file is untouched.
+
+ Instances can optionally capture diffs of file changes. This feature is not
+ enabled by default because it a) doesn't make sense for binary files b)
+ could add unwanted overhead to calls.
+
+ Additionally, there is dry run mode where the file is not actually written
+ out, but reports whether the file was existing and would have been updated
+ still occur, as well as diff capture if requested.
+ """
+
+ def __init__(self, filename, capture_diff=False, dry_run=False, readmode="r"):
+ BytesIO.__init__(self)
+ self.name = filename
+ assert type(capture_diff) == bool
+ assert type(dry_run) == bool
+ assert "r" in readmode
+ self._capture_diff = capture_diff
+ self._write_to_file = not dry_run
+ self.diff = None
+ self.mode = readmode
+ self._binary_mode = "b" in readmode
+
+ def write(self, buf):
+ BytesIO.write(self, six.ensure_binary(buf))
+
+ def avoid_writing_to_file(self):
+ self._write_to_file = False
+
+ def close(self):
+ """Stop accepting writes, compare file contents, and rewrite if needed.
+
+ Returns a tuple of bools indicating what action was performed:
+
+ (file existed, file updated)
+
+ If ``capture_diff`` was specified at construction time and the
+ underlying file was changed, ``.diff`` will be populated with the diff
+ of the result.
+ """
+ # Use binary data if the caller explicitly asked for it.
+ ensure = six.ensure_binary if self._binary_mode else six.ensure_text
+ buf = ensure(self.getvalue())
+
+ BytesIO.close(self)
+ existed = False
+ old_content = None
+
+ try:
+ existing = _open(self.name, self.mode)
+ existed = True
+ except IOError:
+ pass
+ else:
+ try:
+ old_content = existing.read()
+ if old_content == buf:
+ return True, False
+ except IOError:
+ pass
+ finally:
+ existing.close()
+
+ if self._write_to_file:
+ ensureParentDir(self.name)
+ # Maintain 'b' if specified. 'U' only applies to modes starting with
+ # 'r', so it is dropped.
+ writemode = "w"
+ if self._binary_mode:
+ writemode += "b"
+ buf = six.ensure_binary(buf)
+ else:
+ buf = six.ensure_text(buf)
+ with _open(self.name, writemode) as file:
+ file.write(buf)
+
+ self._generate_diff(buf, old_content)
+
+ return existed, True
+
+ def _generate_diff(self, new_content, old_content):
+ """Generate a diff for the changed contents if `capture_diff` is True.
+
+ If the changed contents could not be decoded as utf-8 then generate a
+ placeholder message instead of a diff.
+
+ Args:
+ new_content: Str or bytes holding the new file contents.
+ old_content: Str or bytes holding the original file contents. Should be
+ None if no old content is being overwritten.
+ """
+ if not self._capture_diff:
+ return
+
+ try:
+ if old_content is None:
+ old_lines = None
+ else:
+ if self._binary_mode:
+ # difflib doesn't work with bytes.
+ old_content = old_content.decode("utf-8")
+
+ old_lines = old_content.splitlines()
+
+ if self._binary_mode:
+ # difflib doesn't work with bytes.
+ new_content = new_content.decode("utf-8")
+
+ new_lines = new_content.splitlines()
+
+ self.diff = simple_diff(self.name, old_lines, new_lines)
+ # FileAvoidWrite isn't unicode/bytes safe. So, files with non-ascii
+ # content or opened and written in different modes may involve
+ # implicit conversion and this will make Python unhappy. Since
+ # diffing isn't a critical feature, we just ignore the failure.
+ # This can go away once FileAvoidWrite uses io.BytesIO and
+ # io.StringIO. But that will require a lot of work.
+ except (UnicodeDecodeError, UnicodeEncodeError):
+ self.diff = ["Binary or non-ascii file changed: %s" % self.name]
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, type, value, traceback):
+ if not self.closed:
+ self.close()
+
+
+def resolve_target_to_make(topobjdir, target):
+ r"""
+ Resolve `target` (a target, directory, or file) to a make target.
+
+ `topobjdir` is the object directory; all make targets will be
+ rooted at or below the top-level Makefile in this directory.
+
+ Returns a pair `(reldir, target)` where `reldir` is a directory
+ relative to `topobjdir` containing a Makefile and `target` is a
+ make target (possibly `None`).
+
+ A directory resolves to the nearest directory at or above
+ containing a Makefile, and target `None`.
+
+ A regular (non-Makefile) file resolves to the nearest directory at
+ or above the file containing a Makefile, and an appropriate
+ target.
+
+ A Makefile resolves to the nearest parent strictly above the
+ Makefile containing a different Makefile, and an appropriate
+ target.
+ """
+
+ target = target.replace(os.sep, "/").lstrip("/")
+ abs_target = os.path.join(topobjdir, target)
+
+ # For directories, run |make -C dir|. If the directory does not
+ # contain a Makefile, check parents until we find one. At worst,
+ # this will terminate at the root.
+ if os.path.isdir(abs_target):
+ current = abs_target
+
+ while True:
+ make_path = os.path.join(current, "Makefile")
+ if os.path.exists(make_path):
+ return (current[len(topobjdir) + 1 :], None)
+
+ current = os.path.dirname(current)
+
+ # If it's not in a directory, this is probably a top-level make
+ # target. Treat it as such.
+ if "/" not in target:
+ return (None, target)
+
+ # We have a relative path within the tree. We look for a Makefile
+ # as far into the path as possible. Then, we compute the make
+ # target as relative to that directory.
+ reldir = os.path.dirname(target)
+ target = os.path.basename(target)
+
+ while True:
+ make_path = os.path.join(topobjdir, reldir, "Makefile")
+
+ # We append to target every iteration, so the check below
+ # happens exactly once.
+ if target != "Makefile" and os.path.exists(make_path):
+ return (reldir, target)
+
+ target = os.path.join(os.path.basename(reldir), target)
+ reldir = os.path.dirname(reldir)
+
+
+class List(list):
+ """A list specialized for moz.build environments.
+
+ We overload the assignment and append operations to require that the
+ appended thing is a list. This avoids bad surprises coming from appending
+ a string to a list, which would just add each letter of the string.
+ """
+
+ def __init__(self, iterable=None, **kwargs):
+ if iterable is None:
+ iterable = []
+ if not isinstance(iterable, list):
+ raise ValueError("List can only be created from other list instances.")
+
+ self._kwargs = kwargs
+ super(List, self).__init__(iterable)
+
+ def extend(self, l):
+ if not isinstance(l, list):
+ raise ValueError("List can only be extended with other list instances.")
+
+ return super(List, self).extend(l)
+
+ def __setitem__(self, key, val):
+ if isinstance(key, slice):
+ if not isinstance(val, list):
+ raise ValueError(
+ "List can only be sliced with other list " "instances."
+ )
+ if key.step:
+ raise ValueError("List cannot be sliced with a nonzero step " "value")
+ # Python 2 and Python 3 do this differently for some reason.
+ if six.PY2:
+ return super(List, self).__setslice__(key.start, key.stop, val)
+ else:
+ return super(List, self).__setitem__(key, val)
+ return super(List, self).__setitem__(key, val)
+
+ def __setslice__(self, i, j, sequence):
+ return self.__setitem__(slice(i, j), sequence)
+
+ def __add__(self, other):
+ # Allow None and EmptyValue is a special case because it makes undefined
+ # variable references in moz.build behave better.
+ other = [] if isinstance(other, (type(None), EmptyValue)) else other
+ if not isinstance(other, list):
+ raise ValueError("Only lists can be appended to lists.")
+
+ new_list = self.__class__(self, **self._kwargs)
+ new_list.extend(other)
+ return new_list
+
+ def __iadd__(self, other):
+ other = [] if isinstance(other, (type(None), EmptyValue)) else other
+ if not isinstance(other, list):
+ raise ValueError("Only lists can be appended to lists.")
+
+ return super(List, self).__iadd__(other)
+
+
+class UnsortedError(Exception):
+ def __init__(self, srtd, original):
+ assert len(srtd) == len(original)
+
+ self.sorted = srtd
+ self.original = original
+
+ for i, orig in enumerate(original):
+ s = srtd[i]
+
+ if orig != s:
+ self.i = i
+ break
+
+ def __str__(self):
+ s = StringIO()
+
+ s.write("An attempt was made to add an unsorted sequence to a list. ")
+ s.write("The incoming list is unsorted starting at element %d. " % self.i)
+ s.write(
+ 'We expected "%s" but got "%s"'
+ % (self.sorted[self.i], self.original[self.i])
+ )
+
+ return s.getvalue()
+
+
+class StrictOrderingOnAppendList(List):
+ """A list specialized for moz.build environments.
+
+ We overload the assignment and append operations to require that incoming
+ elements be ordered. This enforces cleaner style in moz.build files.
+ """
+
+ @staticmethod
+ def ensure_sorted(l):
+ if isinstance(l, StrictOrderingOnAppendList):
+ return
+
+ def _first_element(e):
+ # If the list entry is a tuple, we sort based on the first element
+ # in the tuple.
+ return e[0] if isinstance(e, tuple) else e
+
+ srtd = sorted(l, key=lambda x: _first_element(x).lower())
+
+ if srtd != l:
+ raise UnsortedError(srtd, l)
+
+ def __init__(self, iterable=None, **kwargs):
+ if iterable is None:
+ iterable = []
+
+ StrictOrderingOnAppendList.ensure_sorted(iterable)
+
+ super(StrictOrderingOnAppendList, self).__init__(iterable, **kwargs)
+
+ def extend(self, l):
+ StrictOrderingOnAppendList.ensure_sorted(l)
+
+ return super(StrictOrderingOnAppendList, self).extend(l)
+
+ def __setitem__(self, key, val):
+ if isinstance(key, slice):
+ StrictOrderingOnAppendList.ensure_sorted(val)
+ return super(StrictOrderingOnAppendList, self).__setitem__(key, val)
+
+ def __add__(self, other):
+ StrictOrderingOnAppendList.ensure_sorted(other)
+
+ return super(StrictOrderingOnAppendList, self).__add__(other)
+
+ def __iadd__(self, other):
+ StrictOrderingOnAppendList.ensure_sorted(other)
+
+ return super(StrictOrderingOnAppendList, self).__iadd__(other)
+
+
+class ImmutableStrictOrderingOnAppendList(StrictOrderingOnAppendList):
+ """Like StrictOrderingOnAppendList, but not allowing mutations of the value."""
+
+ def append(self, elt):
+ raise Exception("cannot use append on this type")
+
+ def extend(self, iterable):
+ raise Exception("cannot use extend on this type")
+
+ def __setslice__(self, i, j, iterable):
+ raise Exception("cannot assign to slices on this type")
+
+ def __setitem__(self, i, elt):
+ raise Exception("cannot assign to indexes on this type")
+
+ def __iadd__(self, other):
+ raise Exception("cannot use += on this type")
+
+
+class StrictOrderingOnAppendListWithAction(StrictOrderingOnAppendList):
+ """An ordered list that accepts a callable to be applied to each item.
+
+ A callable (action) passed to the constructor is run on each item of input.
+ The result of running the callable on each item will be stored in place of
+ the original input, but the original item must be used to enforce sortedness.
+ """
+
+ def __init__(self, iterable=(), action=None):
+ if not callable(action):
+ raise ValueError(
+ "A callable action is required to construct "
+ "a StrictOrderingOnAppendListWithAction"
+ )
+
+ self._action = action
+ if not isinstance(iterable, (tuple, list)):
+ raise ValueError(
+ "StrictOrderingOnAppendListWithAction can only be initialized "
+ "with another list"
+ )
+ iterable = [self._action(i) for i in iterable]
+ super(StrictOrderingOnAppendListWithAction, self).__init__(
+ iterable, action=action
+ )
+
+ def extend(self, l):
+ if not isinstance(l, list):
+ raise ValueError(
+ "StrictOrderingOnAppendListWithAction can only be extended "
+ "with another list"
+ )
+ l = [self._action(i) for i in l]
+ return super(StrictOrderingOnAppendListWithAction, self).extend(l)
+
+ def __setitem__(self, key, val):
+ if isinstance(key, slice):
+ if not isinstance(val, list):
+ raise ValueError(
+ "StrictOrderingOnAppendListWithAction can only be sliced "
+ "with another list"
+ )
+ val = [self._action(item) for item in val]
+ return super(StrictOrderingOnAppendListWithAction, self).__setitem__(key, val)
+
+ def __add__(self, other):
+ if not isinstance(other, list):
+ raise ValueError(
+ "StrictOrderingOnAppendListWithAction can only be added with "
+ "another list"
+ )
+ return super(StrictOrderingOnAppendListWithAction, self).__add__(other)
+
+ def __iadd__(self, other):
+ if not isinstance(other, list):
+ raise ValueError(
+ "StrictOrderingOnAppendListWithAction can only be added with "
+ "another list"
+ )
+ other = [self._action(i) for i in other]
+ return super(StrictOrderingOnAppendListWithAction, self).__iadd__(other)
+
+
+class MozbuildDeletionError(Exception):
+ pass
+
+
+def FlagsFactory(flags):
+ """Returns a class which holds optional flags for an item in a list.
+
+ The flags are defined in the dict given as argument, where keys are
+ the flag names, and values the type used for the value of that flag.
+
+ The resulting class is used by the various <TypeName>WithFlagsFactory
+ functions below.
+ """
+ assert isinstance(flags, dict)
+ assert all(isinstance(v, type) for v in flags.values())
+
+ class Flags(object):
+ __slots__ = flags.keys()
+ _flags = flags
+
+ def update(self, **kwargs):
+ for k, v in six.iteritems(kwargs):
+ setattr(self, k, v)
+
+ def __getattr__(self, name):
+ if name not in self.__slots__:
+ raise AttributeError(
+ "'%s' object has no attribute '%s'"
+ % (self.__class__.__name__, name)
+ )
+ try:
+ return object.__getattr__(self, name)
+ except AttributeError:
+ value = self._flags[name]()
+ self.__setattr__(name, value)
+ return value
+
+ def __setattr__(self, name, value):
+ if name not in self.__slots__:
+ raise AttributeError(
+ "'%s' object has no attribute '%s'"
+ % (self.__class__.__name__, name)
+ )
+ if not isinstance(value, self._flags[name]):
+ raise TypeError(
+ "'%s' attribute of class '%s' must be '%s'"
+ % (name, self.__class__.__name__, self._flags[name].__name__)
+ )
+ return object.__setattr__(self, name, value)
+
+ def __delattr__(self, name):
+ raise MozbuildDeletionError("Unable to delete attributes for this object")
+
+ return Flags
+
+
+class StrictOrderingOnAppendListWithFlags(StrictOrderingOnAppendList):
+ """A list with flags specialized for moz.build environments.
+
+ Each subclass has a set of typed flags; this class lets us use `isinstance`
+ for natural testing.
+ """
+
+
+def StrictOrderingOnAppendListWithFlagsFactory(flags):
+ """Returns a StrictOrderingOnAppendList-like object, with optional
+ flags on each item.
+
+ The flags are defined in the dict given as argument, where keys are
+ the flag names, and values the type used for the value of that flag.
+
+ Example:
+
+ .. code-block:: python
+
+ FooList = StrictOrderingOnAppendListWithFlagsFactory({
+ 'foo': bool, 'bar': unicode
+ })
+ foo = FooList(['a', 'b', 'c'])
+ foo['a'].foo = True
+ foo['b'].bar = 'bar'
+ """
+
+ class StrictOrderingOnAppendListWithFlagsSpecialization(
+ StrictOrderingOnAppendListWithFlags
+ ):
+ def __init__(self, iterable=None):
+ if iterable is None:
+ iterable = []
+ StrictOrderingOnAppendListWithFlags.__init__(self, iterable)
+ self._flags_type = FlagsFactory(flags)
+ self._flags = dict()
+
+ def __getitem__(self, name):
+ if name not in self._flags:
+ if name not in self:
+ raise KeyError("'%s'" % name)
+ self._flags[name] = self._flags_type()
+ return self._flags[name]
+
+ def __setitem__(self, name, value):
+ if not isinstance(name, slice):
+ raise TypeError(
+ "'%s' object does not support item assignment"
+ % self.__class__.__name__
+ )
+ result = super(
+ StrictOrderingOnAppendListWithFlagsSpecialization, self
+ ).__setitem__(name, value)
+ # We may have removed items.
+ for k in set(self._flags.keys()) - set(self):
+ del self._flags[k]
+ if isinstance(value, StrictOrderingOnAppendListWithFlags):
+ self._update_flags(value)
+ return result
+
+ def _update_flags(self, other):
+ if self._flags_type._flags != other._flags_type._flags:
+ raise ValueError(
+ "Expected a list of strings with flags like %s, not like %s"
+ % (self._flags_type._flags, other._flags_type._flags)
+ )
+ intersection = set(self._flags.keys()) & set(other._flags.keys())
+ if intersection:
+ raise ValueError(
+ "Cannot update flags: both lists of strings with flags configure %s"
+ % intersection
+ )
+ self._flags.update(other._flags)
+
+ def extend(self, l):
+ result = super(
+ StrictOrderingOnAppendListWithFlagsSpecialization, self
+ ).extend(l)
+ if isinstance(l, StrictOrderingOnAppendListWithFlags):
+ self._update_flags(l)
+ return result
+
+ def __add__(self, other):
+ result = super(
+ StrictOrderingOnAppendListWithFlagsSpecialization, self
+ ).__add__(other)
+ if isinstance(other, StrictOrderingOnAppendListWithFlags):
+ # Result has flags from other but not from self, since
+ # internally we duplicate self and then extend with other, and
+ # only extend knows about flags. Since we don't allow updating
+ # when the set of flag keys intersect, which we instance we pass
+ # to _update_flags here matters. This needs to be correct but
+ # is an implementation detail.
+ result._update_flags(self)
+ return result
+
+ def __iadd__(self, other):
+ result = super(
+ StrictOrderingOnAppendListWithFlagsSpecialization, self
+ ).__iadd__(other)
+ if isinstance(other, StrictOrderingOnAppendListWithFlags):
+ self._update_flags(other)
+ return result
+
+ return StrictOrderingOnAppendListWithFlagsSpecialization
+
+
+class HierarchicalStringList(object):
+ """A hierarchy of lists of strings.
+
+ Each instance of this object contains a list of strings, which can be set or
+ appended to. A sub-level of the hierarchy is also an instance of this class,
+ can be added by appending to an attribute instead.
+
+ For example, the moz.build variable EXPORTS is an instance of this class. We
+ can do:
+
+ EXPORTS += ['foo.h']
+ EXPORTS.mozilla.dom += ['bar.h']
+
+ In this case, we have 3 instances (EXPORTS, EXPORTS.mozilla, and
+ EXPORTS.mozilla.dom), and the first and last each have one element in their
+ list.
+ """
+
+ __slots__ = ("_strings", "_children")
+
+ def __init__(self):
+ # Please change ContextDerivedTypedHierarchicalStringList in context.py
+ # if you make changes here.
+ self._strings = StrictOrderingOnAppendList()
+ self._children = {}
+
+ class StringListAdaptor(collections.abc.Sequence):
+ def __init__(self, hsl):
+ self._hsl = hsl
+
+ def __getitem__(self, index):
+ return self._hsl._strings[index]
+
+ def __len__(self):
+ return len(self._hsl._strings)
+
+ def walk(self):
+ """Walk over all HierarchicalStringLists in the hierarchy.
+
+ This is a generator of (path, sequence).
+
+ The path is '' for the root level and '/'-delimited strings for
+ any descendants. The sequence is a read-only sequence of the
+ strings contained at that level.
+ """
+
+ if self._strings:
+ path_to_here = ""
+ yield path_to_here, self.StringListAdaptor(self)
+
+ for k, l in sorted(self._children.items()):
+ for p, v in l.walk():
+ path_to_there = "%s/%s" % (k, p)
+ yield path_to_there.strip("/"), v
+
+ def __setattr__(self, name, value):
+ if name in self.__slots__:
+ return object.__setattr__(self, name, value)
+
+ # __setattr__ can be called with a list when a simple assignment is
+ # used:
+ #
+ # EXPORTS.foo = ['file.h']
+ #
+ # In this case, we need to overwrite foo's current list of strings.
+ #
+ # However, __setattr__ is also called with a HierarchicalStringList
+ # to try to actually set the attribute. We want to ignore this case,
+ # since we don't actually create an attribute called 'foo', but just add
+ # it to our list of children (using _get_exportvariable()).
+ self._set_exportvariable(name, value)
+
+ def __getattr__(self, name):
+ if name.startswith("__"):
+ return object.__getattr__(self, name)
+ return self._get_exportvariable(name)
+
+ def __delattr__(self, name):
+ raise MozbuildDeletionError("Unable to delete attributes for this object")
+
+ def __iadd__(self, other):
+ if isinstance(other, HierarchicalStringList):
+ self._strings += other._strings
+ for c in other._children:
+ self[c] += other[c]
+ else:
+ self._check_list(other)
+ self._strings += other
+ return self
+
+ def __getitem__(self, name):
+ return self._get_exportvariable(name)
+
+ def __setitem__(self, name, value):
+ self._set_exportvariable(name, value)
+
+ def _get_exportvariable(self, name):
+ # Please change ContextDerivedTypedHierarchicalStringList in context.py
+ # if you make changes here.
+ child = self._children.get(name)
+ if not child:
+ child = self._children[name] = HierarchicalStringList()
+ return child
+
+ def _set_exportvariable(self, name, value):
+ if name in self._children:
+ if value is self._get_exportvariable(name):
+ return
+ raise KeyError("global_ns", "reassign", "<some variable>.%s" % name)
+
+ exports = self._get_exportvariable(name)
+ exports._check_list(value)
+ exports._strings += value
+
+ def _check_list(self, value):
+ if not isinstance(value, list):
+ raise ValueError("Expected a list of strings, not %s" % type(value))
+ for v in value:
+ if not isinstance(v, six.string_types):
+ raise ValueError(
+ "Expected a list of strings, not an element of %s" % type(v)
+ )
+
+
+class LockFile(object):
+ """LockFile is used by the lock_file method to hold the lock.
+
+ This object should not be used directly, but only through
+ the lock_file method below.
+ """
+
+ def __init__(self, lockfile):
+ self.lockfile = lockfile
+
+ def __del__(self):
+ while True:
+ try:
+ os.remove(self.lockfile)
+ break
+ except OSError as e:
+ if e.errno == errno.EACCES:
+ # Another process probably has the file open, we'll retry.
+ # Just a short sleep since we want to drop the lock ASAP
+ # (but we need to let some other process close the file
+ # first).
+ time.sleep(0.1)
+ else:
+ # Re-raise unknown errors
+ raise
+
+
+def lock_file(lockfile, max_wait=600):
+ """Create and hold a lockfile of the given name, with the given timeout.
+
+ To release the lock, delete the returned object.
+ """
+
+ # FUTURE This function and object could be written as a context manager.
+
+ while True:
+ try:
+ fd = os.open(lockfile, os.O_EXCL | os.O_RDWR | os.O_CREAT)
+ # We created the lockfile, so we're the owner
+ break
+ except OSError as e:
+ if e.errno == errno.EEXIST or (
+ sys.platform == "win32" and e.errno == errno.EACCES
+ ):
+ pass
+ else:
+ # Should not occur
+ raise
+
+ try:
+ # The lock file exists, try to stat it to get its age
+ # and read its contents to report the owner PID
+ f = open(lockfile, "r")
+ s = os.stat(lockfile)
+ except EnvironmentError as e:
+ if e.errno == errno.ENOENT or e.errno == errno.EACCES:
+ # We didn't create the lockfile, so it did exist, but it's
+ # gone now. Just try again
+ continue
+
+ raise Exception(
+ "{0} exists but stat() failed: {1}".format(lockfile, e.strerror)
+ )
+
+ # We didn't create the lockfile and it's still there, check
+ # its age
+ now = int(time.time())
+ if now - s[stat.ST_MTIME] > max_wait:
+ pid = f.readline().rstrip()
+ raise Exception(
+ "{0} has been locked for more than "
+ "{1} seconds (PID {2})".format(lockfile, max_wait, pid)
+ )
+
+ # It's not been locked too long, wait a while and retry
+ f.close()
+ time.sleep(1)
+
+ # if we get here. we have the lockfile. Convert the os.open file
+ # descriptor into a Python file object and record our PID in it
+ f = os.fdopen(fd, "w")
+ f.write("{0}\n".format(os.getpid()))
+ f.close()
+
+ return LockFile(lockfile)
+
+
+class OrderedDefaultDict(OrderedDict):
+ """A combination of OrderedDict and defaultdict."""
+
+ def __init__(self, default_factory, *args, **kwargs):
+ OrderedDict.__init__(self, *args, **kwargs)
+ self._default_factory = default_factory
+
+ def __missing__(self, key):
+ value = self[key] = self._default_factory()
+ return value
+
+
+class KeyedDefaultDict(dict):
+ """Like a defaultdict, but the default_factory function takes the key as
+ argument"""
+
+ def __init__(self, default_factory, *args, **kwargs):
+ dict.__init__(self, *args, **kwargs)
+ self._default_factory = default_factory
+
+ def __missing__(self, key):
+ value = self._default_factory(key)
+ dict.__setitem__(self, key, value)
+ return value
+
+
+class ReadOnlyKeyedDefaultDict(KeyedDefaultDict, ReadOnlyDict):
+ """Like KeyedDefaultDict, but read-only."""
+
+
+class memoize(dict):
+ """A decorator to memoize the results of function calls depending
+ on its arguments.
+ Both functions and instance methods are handled, although in the
+ instance method case, the results are cache in the instance itself.
+ """
+
+ def __init__(self, func):
+ self.func = func
+ functools.update_wrapper(self, func)
+
+ def __call__(self, *args):
+ if args not in self:
+ self[args] = self.func(*args)
+ return self[args]
+
+ def method_call(self, instance, *args):
+ name = "_%s" % self.func.__name__
+ if not hasattr(instance, name):
+ setattr(instance, name, {})
+ cache = getattr(instance, name)
+ if args not in cache:
+ cache[args] = self.func(instance, *args)
+ return cache[args]
+
+ def __get__(self, instance, cls):
+ return functools.update_wrapper(
+ functools.partial(self.method_call, instance), self.func
+ )
+
+
+class memoized_property(object):
+ """A specialized version of the memoize decorator that works for
+ class instance properties.
+ """
+
+ def __init__(self, func):
+ self.func = func
+
+ def __get__(self, instance, cls):
+ name = "_%s" % self.func.__name__
+ if not hasattr(instance, name):
+ setattr(instance, name, self.func(instance))
+ return getattr(instance, name)
+
+
+def TypedNamedTuple(name, fields):
+ """Factory for named tuple types with strong typing.
+
+ Arguments are an iterable of 2-tuples. The first member is the
+ the field name. The second member is a type the field will be validated
+ to be.
+
+ Construction of instances varies from ``collections.namedtuple``.
+
+ First, if a single tuple argument is given to the constructor, this is
+ treated as the equivalent of passing each tuple value as a separate
+ argument into __init__. e.g.::
+
+ t = (1, 2)
+ TypedTuple(t) == TypedTuple(1, 2)
+
+ This behavior is meant for moz.build files, so vanilla tuples are
+ automatically cast to typed tuple instances.
+
+ Second, fields in the tuple are validated to be instances of the specified
+ type. This is done via an ``isinstance()`` check. To allow multiple types,
+ pass a tuple as the allowed types field.
+ """
+ cls = collections.namedtuple(name, (name for name, typ in fields))
+
+ class TypedTuple(cls):
+ __slots__ = ()
+
+ def __new__(klass, *args, **kwargs):
+ if len(args) == 1 and not kwargs and isinstance(args[0], tuple):
+ args = args[0]
+
+ return super(TypedTuple, klass).__new__(klass, *args, **kwargs)
+
+ def __init__(self, *args, **kwargs):
+ for i, (fname, ftype) in enumerate(self._fields):
+ value = self[i]
+
+ if not isinstance(value, ftype):
+ raise TypeError(
+ "field in tuple not of proper type: %s; "
+ "got %s, expected %s" % (fname, type(value), ftype)
+ )
+
+ TypedTuple._fields = fields
+
+ return TypedTuple
+
+
+@memoize
+def TypedList(type, base_class=List):
+ """A list with type coercion.
+
+ The given ``type`` is what list elements are being coerced to. It may do
+ strict validation, throwing ValueError exceptions.
+
+ A ``base_class`` type can be given for more specific uses than a List. For
+ example, a Typed StrictOrderingOnAppendList can be created with:
+
+ TypedList(unicode, StrictOrderingOnAppendList)
+ """
+
+ class _TypedList(base_class):
+ @staticmethod
+ def normalize(e):
+ if not isinstance(e, type):
+ e = type(e)
+ return e
+
+ def _ensure_type(self, l):
+ if isinstance(l, self.__class__):
+ return l
+
+ return [self.normalize(e) for e in l]
+
+ def __init__(self, iterable=None, **kwargs):
+ if iterable is None:
+ iterable = []
+ iterable = self._ensure_type(iterable)
+
+ super(_TypedList, self).__init__(iterable, **kwargs)
+
+ def extend(self, l):
+ l = self._ensure_type(l)
+
+ return super(_TypedList, self).extend(l)
+
+ def __setitem__(self, key, val):
+ val = self._ensure_type(val)
+
+ return super(_TypedList, self).__setitem__(key, val)
+
+ def __add__(self, other):
+ other = self._ensure_type(other)
+
+ return super(_TypedList, self).__add__(other)
+
+ def __iadd__(self, other):
+ other = self._ensure_type(other)
+
+ return super(_TypedList, self).__iadd__(other)
+
+ def append(self, other):
+ self += [other]
+
+ return _TypedList
+
+
+def group_unified_files(files, unified_prefix, unified_suffix, files_per_unified_file):
+ """Return an iterator of (unified_filename, source_filenames) tuples.
+
+ We compile most C and C++ files in "unified mode"; instead of compiling
+ ``a.cpp``, ``b.cpp``, and ``c.cpp`` separately, we compile a single file
+ that looks approximately like::
+
+ #include "a.cpp"
+ #include "b.cpp"
+ #include "c.cpp"
+
+ This function handles the details of generating names for the unified
+ files, and determining which original source files go in which unified
+ file."""
+
+ # Our last returned list of source filenames may be short, and we
+ # don't want the fill value inserted by zip_longest to be an
+ # issue. So we do a little dance to filter it out ourselves.
+ dummy_fill_value = ("dummy",)
+
+ def filter_out_dummy(iterable):
+ return six.moves.filter(lambda x: x != dummy_fill_value, iterable)
+
+ # From the itertools documentation, slightly modified:
+ def grouper(n, iterable):
+ "grouper(3, 'ABCDEFG', 'x') --> ABC DEF Gxx"
+ args = [iter(iterable)] * n
+ return six.moves.zip_longest(fillvalue=dummy_fill_value, *args)
+
+ for i, unified_group in enumerate(grouper(files_per_unified_file, files)):
+ just_the_filenames = list(filter_out_dummy(unified_group))
+ yield "%s%d.%s" % (unified_prefix, i, unified_suffix), just_the_filenames
+
+
+def pair(iterable):
+ """Given an iterable, returns an iterable pairing its items.
+
+ For example,
+ list(pair([1,2,3,4,5,6]))
+ returns
+ [(1,2), (3,4), (5,6)]
+ """
+ i = iter(iterable)
+ return six.moves.zip_longest(i, i)
+
+
+def pairwise(iterable):
+ """Given an iterable, returns an iterable of overlapped pairs of
+ its items. Based on the Python itertools documentation.
+
+ For example,
+ list(pairwise([1,2,3,4,5,6]))
+ returns
+ [(1,2), (2,3), (3,4), (4,5), (5,6)]
+ """
+ a, b = itertools.tee(iterable)
+ next(b, None)
+ return zip(a, b)
+
+
+VARIABLES_RE = re.compile("\$\((\w+)\)")
+
+
+def expand_variables(s, variables):
+ """Given a string with $(var) variable references, replace those references
+ with the corresponding entries from the given `variables` dict.
+
+ If a variable value is not a string, it is iterated and its items are
+ joined with a whitespace."""
+ result = ""
+ for s, name in pair(VARIABLES_RE.split(s)):
+ result += s
+ value = variables.get(name)
+ if not value:
+ continue
+ if not isinstance(value, six.string_types):
+ value = " ".join(value)
+ result += value
+ return result
+
+
+class DefinesAction(argparse.Action):
+ """An ArgumentParser action to handle -Dvar[=value] type of arguments."""
+
+ def __call__(self, parser, namespace, values, option_string):
+ defines = getattr(namespace, self.dest)
+ if defines is None:
+ defines = {}
+ values = values.split("=", 1)
+ if len(values) == 1:
+ name, value = values[0], 1
+ else:
+ name, value = values
+ if value.isdigit():
+ value = int(value)
+ defines[name] = value
+ setattr(namespace, self.dest, defines)
+
+
+class EnumStringComparisonError(Exception):
+ pass
+
+
+class EnumString(six.text_type):
+ """A string type that only can have a limited set of values, similarly to
+ an Enum, and can only be compared against that set of values.
+
+ The class is meant to be subclassed, where the subclass defines
+ POSSIBLE_VALUES. The `subclass` method is a helper to create such
+ subclasses.
+ """
+
+ POSSIBLE_VALUES = ()
+
+ def __init__(self, value):
+ if value not in self.POSSIBLE_VALUES:
+ raise ValueError(
+ "'%s' is not a valid value for %s" % (value, self.__class__.__name__)
+ )
+
+ def __eq__(self, other):
+ if other not in self.POSSIBLE_VALUES:
+ raise EnumStringComparisonError(
+ "Can only compare with %s"
+ % ", ".join("'%s'" % v for v in self.POSSIBLE_VALUES)
+ )
+ return super(EnumString, self).__eq__(other)
+
+ def __ne__(self, other):
+ return not (self == other)
+
+ def __hash__(self):
+ return super(EnumString, self).__hash__()
+
+ @staticmethod
+ def subclass(*possible_values):
+ class EnumStringSubclass(EnumString):
+ POSSIBLE_VALUES = possible_values
+
+ return EnumStringSubclass
+
+
+def _escape_char(c):
+ # str.encode('unicode_espace') doesn't escape quotes, presumably because
+ # quoting could be done with either ' or ".
+ if c == "'":
+ return "\\'"
+ return six.text_type(c.encode("unicode_escape"))
+
+
+def ensure_bytes(value, encoding="utf-8"):
+ if isinstance(value, six.text_type):
+ return value.encode(encoding)
+ return value
+
+
+def ensure_unicode(value, encoding="utf-8"):
+ if isinstance(value, six.binary_type):
+ return value.decode(encoding)
+ return value
+
+
+def process_time():
+ if six.PY2:
+ return time.clock()
+ else:
+ return time.process_time()
+
+
+def hexdump(buf):
+ """
+ Returns a list of hexdump-like lines corresponding to the given input buffer.
+ """
+ assert six.PY3
+ off_format = "%0{}x ".format(len(str(len(buf))))
+ lines = []
+ for off in range(0, len(buf), 16):
+ line = off_format % off
+ chunk = buf[off : min(off + 16, len(buf))]
+ for n, byte in enumerate(chunk):
+ line += " %02x" % byte
+ if n == 7:
+ line += " "
+ for n in range(len(chunk), 16):
+ line += " "
+ if n == 7:
+ line += " "
+ line += " |"
+ for byte in chunk:
+ if byte < 127 and byte >= 32:
+ line += chr(byte)
+ else:
+ line += "."
+ for n in range(len(chunk), 16):
+ line += " "
+ line += "|\n"
+ lines.append(line)
+ return lines
+
+
+def mozilla_build_version():
+ mozilla_build = os.environ.get("MOZILLABUILD")
+
+ version_file = Path(mozilla_build) / "VERSION"
+
+ assert version_file.exists(), (
+ f'The MozillaBuild VERSION file was not found at "{version_file}".\n'
+ "Please check if MozillaBuild is installed correctly and that the"
+ "`MOZILLABUILD` environment variable is to the correct path."
+ )
+
+ with version_file.open() as file:
+ return Version(file.readline().rstrip("\n"))
diff --git a/python/mozbuild/mozbuild/vendor/__init__.py b/python/mozbuild/mozbuild/vendor/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozbuild/vendor/__init__.py
diff --git a/python/mozbuild/mozbuild/vendor/host_angle.py b/python/mozbuild/mozbuild/vendor/host_angle.py
new file mode 100644
index 0000000000..9716c76a24
--- /dev/null
+++ b/python/mozbuild/mozbuild/vendor/host_angle.py
@@ -0,0 +1,37 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, # You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import requests
+
+from mozbuild.vendor.host_base import BaseHost
+
+
+class AngleHost(BaseHost):
+ def upstream_commit(self, revision):
+ raise Exception("Should not be called")
+
+ def upstream_tag(self, revision):
+ data = requests.get("https://omahaproxy.appspot.com/all.json").json()
+
+ for row in data:
+ if row["os"] == "win64":
+ for version in row["versions"]:
+ if version["channel"] == "beta":
+ branch = "chromium/" + version["true_branch"]
+
+ if revision != "HEAD" and revision != branch:
+ raise Exception(
+ "Passing a --revision for Angle that is not HEAD "
+ + "or the true branch is not supported."
+ )
+
+ return (
+ branch,
+ version["current_reldate"],
+ )
+
+ raise Exception("Could not find win64 beta version in the JSON response")
+
+ def upstream_snapshot(self, revision):
+ raise Exception("Not supported for Angle")
diff --git a/python/mozbuild/mozbuild/vendor/host_base.py b/python/mozbuild/mozbuild/vendor/host_base.py
new file mode 100644
index 0000000000..2484d82e09
--- /dev/null
+++ b/python/mozbuild/mozbuild/vendor/host_base.py
@@ -0,0 +1,77 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, # You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import subprocess
+import tempfile
+import urllib
+
+
+class BaseHost:
+ def __init__(self, manifest):
+ self.manifest = manifest
+ self.repo_url = urllib.parse.urlparse(self.manifest["vendoring"]["url"])
+
+ def upstream_tag(self, revision):
+ """Temporarily clone the repo to get the latest tag and timestamp"""
+ with tempfile.TemporaryDirectory() as temp_repo_clone:
+ starting_directory = os.getcwd()
+ os.chdir(temp_repo_clone)
+ subprocess.run(
+ [
+ "git",
+ "clone",
+ "-c",
+ "core.autocrlf=input",
+ self.manifest["vendoring"]["url"],
+ self.manifest["origin"]["name"],
+ ],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ universal_newlines=True,
+ check=True,
+ )
+ os.chdir("/".join([temp_repo_clone, self.manifest["origin"]["name"]]))
+ if revision == "HEAD":
+ tag = subprocess.run(
+ ["git", "--no-pager", "tag", "--sort=creatordate"],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ universal_newlines=True,
+ check=True,
+ ).stdout.splitlines()[-1]
+ else:
+ try:
+ tag = subprocess.run(
+ ["git", "--no-pager", "tag", "-l", revision],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ universal_newlines=True,
+ check=True,
+ ).stdout.splitlines()[-1]
+ except IndexError: # 0 lines of output, the tag does not exist
+ raise Exception(f"Requested tag {revision} not found in source.")
+
+ tag_timestamp = subprocess.run(
+ [
+ "git",
+ "log",
+ "-1",
+ "--date=iso8601-strict",
+ "--format=%ad",
+ tag,
+ ],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ universal_newlines=True,
+ check=True,
+ ).stdout.splitlines()[-1]
+ os.chdir(starting_directory)
+ return tag, tag_timestamp
+
+ def upstream_snapshot(self, revision):
+ raise Exception("Unimplemented for this subclass...")
+
+ def upstream_path_to_file(self, revision, filepath):
+ raise Exception("Unimplemented for this subclass...")
diff --git a/python/mozbuild/mozbuild/vendor/host_codeberg.py b/python/mozbuild/mozbuild/vendor/host_codeberg.py
new file mode 100644
index 0000000000..158dd0472d
--- /dev/null
+++ b/python/mozbuild/mozbuild/vendor/host_codeberg.py
@@ -0,0 +1,28 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, # You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import requests
+
+from mozbuild.vendor.host_base import BaseHost
+
+
+class CodebergHost(BaseHost):
+ def upstream_commit(self, revision):
+ """Query the codeberg api for a git commit id and timestamp."""
+ codeberg_api = (
+ self.repo_url.scheme + "://" + self.repo_url.netloc + "/api/v1/repos/"
+ )
+ codeberg_api += self.repo_url.path[1:]
+ codeberg_api += "/git/commits"
+ req = requests.get("/".join([codeberg_api, revision]))
+ req.raise_for_status()
+ info = req.json()
+ return (info["sha"], info["created"])
+
+ def upstream_snapshot(self, revision):
+ codeberg_api = (
+ self.repo_url.scheme + "://" + self.repo_url.netloc + "/api/v1/repos/"
+ )
+ codeberg_api += self.repo_url.path[1:]
+ return "/".join([codeberg_api, "archive", revision + ".tar.gz"])
diff --git a/python/mozbuild/mozbuild/vendor/host_github.py b/python/mozbuild/mozbuild/vendor/host_github.py
new file mode 100644
index 0000000000..eeaa4b9eaf
--- /dev/null
+++ b/python/mozbuild/mozbuild/vendor/host_github.py
@@ -0,0 +1,27 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, # You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import requests
+
+from mozbuild.vendor.host_base import BaseHost
+
+
+class GitHubHost(BaseHost):
+ def upstream_commit(self, revision):
+ """Query the github api for a git commit id and timestamp."""
+ github_api = "https://api.github.com"
+ repo = self.repo_url.path[1:].strip("/")
+ req = requests.get("/".join([github_api, "repos", repo, "commits", revision]))
+ req.raise_for_status()
+ info = req.json()
+ return (info["sha"], info["commit"]["committer"]["date"])
+
+ def upstream_snapshot(self, revision):
+ return "/".join(
+ [self.manifest["vendoring"]["url"], "archive", revision + ".tar.gz"]
+ )
+
+ def upstream_path_to_file(self, revision, filepath):
+ repo = self.repo_url.path[1:]
+ return "/".join(["https://raw.githubusercontent.com", repo, revision, filepath])
diff --git a/python/mozbuild/mozbuild/vendor/host_gitlab.py b/python/mozbuild/mozbuild/vendor/host_gitlab.py
new file mode 100644
index 0000000000..8bfc3ddc79
--- /dev/null
+++ b/python/mozbuild/mozbuild/vendor/host_gitlab.py
@@ -0,0 +1,26 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, # You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import requests
+
+from mozbuild.vendor.host_base import BaseHost
+
+
+class GitLabHost(BaseHost):
+ def upstream_commit(self, revision):
+ """Query the gitlab api for a git commit id and timestamp."""
+ gitlab_api = (
+ self.repo_url.scheme + "://" + self.repo_url.netloc + "/api/v4/projects/"
+ )
+ gitlab_api += self.repo_url.path[1:].replace("/", "%2F")
+ gitlab_api += "/repository/commits"
+ req = requests.get("/".join([gitlab_api, revision]))
+ req.raise_for_status()
+ info = req.json()
+ return (info["id"], info["committed_date"])
+
+ def upstream_snapshot(self, revision):
+ return "/".join(
+ [self.manifest["vendoring"]["url"], "-", "archive", revision + ".tar.gz"]
+ )
diff --git a/python/mozbuild/mozbuild/vendor/host_googlesource.py b/python/mozbuild/mozbuild/vendor/host_googlesource.py
new file mode 100644
index 0000000000..c903bd99b5
--- /dev/null
+++ b/python/mozbuild/mozbuild/vendor/host_googlesource.py
@@ -0,0 +1,32 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, # You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import requests
+
+from mozbuild.vendor.host_base import BaseHost
+
+
+class GoogleSourceHost(BaseHost):
+ def upstream_commit(self, revision):
+ """Query for a git commit and timestamp."""
+ url = "/".join(
+ [self.manifest["vendoring"]["url"], "+", revision + "?format=JSON"]
+ )
+ req = requests.get(url)
+ req.raise_for_status()
+ try:
+ info = req.json()
+ except ValueError:
+ # As of 2017 May, googlesource sends 4 garbage characters
+ # at the beginning of the json response. Work around this.
+ # https://bugs.chromium.org/p/chromium/issues/detail?id=718550
+ import json
+
+ info = json.loads(req.text[4:])
+ return (info["commit"], info["committer"]["time"])
+
+ def upstream_snapshot(self, revision):
+ return "/".join(
+ [self.manifest["vendoring"]["url"], "+archive", revision + ".tar.gz"]
+ )
diff --git a/python/mozbuild/mozbuild/vendor/mach_commands.py b/python/mozbuild/mozbuild/vendor/mach_commands.py
new file mode 100644
index 0000000000..30fb0e16a5
--- /dev/null
+++ b/python/mozbuild/mozbuild/vendor/mach_commands.py
@@ -0,0 +1,232 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, # You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import logging
+import sys
+
+from mach.decorators import Command, CommandArgument, SubCommand
+
+from mozbuild.vendor.moz_yaml import MozYamlVerifyError, load_moz_yaml
+
+
+# Fun quirk of ./mach - you can specify a default argument as well as subcommands.
+# If the default argument matches a subcommand, the subcommand gets called. If it
+# doesn't, we wind up in the default command.
+@Command(
+ "vendor",
+ category="misc",
+ description="Vendor third-party dependencies into the source repository.",
+)
+@CommandArgument(
+ "--check-for-update",
+ action="store_true",
+ help="For scripted use, prints the new commit to update to, or nothing if up to date.",
+ default=False,
+)
+@CommandArgument(
+ "--add-to-exports",
+ action="store_true",
+ help="Will attempt to add new header files into any relevant EXPORTS block.",
+ default=False,
+)
+@CommandArgument(
+ "--ignore-modified",
+ action="store_true",
+ help="Ignore modified files in current checkout.",
+ default=False,
+)
+@CommandArgument("-r", "--revision", help="Repository tag or commit to update to.")
+@CommandArgument(
+ "-f",
+ "--force",
+ action="store_true",
+ help="Force a re-vendor even if we're up to date",
+)
+@CommandArgument(
+ "--verify", "-v", action="store_true", help="(Only) verify the manifest."
+)
+@CommandArgument(
+ "--patch-mode",
+ help="Select how vendored patches will be imported. 'none' skips patch import, and"
+ "'only' imports patches and skips library vendoring.",
+ default="",
+)
+@CommandArgument("library", nargs=1, help="The moz.yaml file of the library to vendor.")
+def vendor(
+ command_context,
+ library,
+ revision,
+ ignore_modified=False,
+ check_for_update=False,
+ add_to_exports=False,
+ force=False,
+ verify=False,
+ patch_mode="",
+):
+ """
+ Vendor third-party dependencies into the source repository.
+
+ Vendoring rust and python can be done with ./mach vendor [rust/python].
+ Vendoring other libraries can be done with ./mach vendor [arguments] path/to/file.yaml
+ """
+ library = library[0]
+ assert library not in ["rust", "python"]
+
+ command_context.populate_logger()
+ command_context.log_manager.enable_unstructured()
+ if check_for_update:
+ logging.disable(level=logging.CRITICAL)
+
+ try:
+ manifest = load_moz_yaml(library)
+ if verify:
+ print("%s: OK" % library)
+ sys.exit(0)
+ except MozYamlVerifyError as e:
+ print(e)
+ sys.exit(1)
+
+ if "vendoring" not in manifest:
+ raise Exception(
+ "Cannot perform update actions if we don't have a 'vendoring' section in the moz.yaml"
+ )
+
+ if patch_mode and patch_mode not in ["none", "only"]:
+ print(
+ "Unknown patch mode given '%s'. Please use one of: 'none' or 'only'."
+ % patch_mode
+ )
+ sys.exit(1)
+ if (
+ manifest["vendoring"].get("patches", [])
+ and not patch_mode
+ and not check_for_update
+ ):
+ print(
+ "Patch mode was not given when required. Please use one of: 'none' or 'only'"
+ )
+ sys.exit(1)
+ if patch_mode == "only" and not manifest["vendoring"].get("patches", []):
+ print(
+ "Patch import was specified for %s but there are no vendored patches defined."
+ % library
+ )
+ sys.exit(1)
+
+ if not ignore_modified and not check_for_update:
+ check_modified_files(command_context)
+ elif ignore_modified and not check_for_update:
+ print(
+ "Because you passed --ignore-modified we will not be "
+ + "able to detect spurious upstream updates."
+ )
+
+ if not revision:
+ revision = "HEAD"
+
+ from mozbuild.vendor.vendor_manifest import VendorManifest
+
+ vendor_command = command_context._spawn(VendorManifest)
+ vendor_command.vendor(
+ command_context,
+ library,
+ manifest,
+ revision,
+ ignore_modified,
+ check_for_update,
+ force,
+ add_to_exports,
+ patch_mode,
+ )
+
+ sys.exit(0)
+
+
+def check_modified_files(command_context):
+ """
+ Ensure that there aren't any uncommitted changes to files
+ in the working copy, since we're going to change some state
+ on the user.
+ """
+ modified = command_context.repository.get_changed_files("M")
+ if modified:
+ command_context.log(
+ logging.ERROR,
+ "modified_files",
+ {},
+ """You have uncommitted changes to the following files:
+
+{files}
+
+Please commit or stash these changes before vendoring, or re-run with `--ignore-modified`.
+""".format(
+ files="\n".join(sorted(modified))
+ ),
+ )
+ sys.exit(1)
+
+
+# =====================================================================
+
+
+@SubCommand(
+ "vendor",
+ "rust",
+ description="Vendor rust crates from crates.io into third_party/rust",
+)
+@CommandArgument(
+ "--ignore-modified",
+ action="store_true",
+ help="Ignore modified files in current checkout",
+ default=False,
+)
+@CommandArgument(
+ "--build-peers-said-large-imports-were-ok",
+ action="store_true",
+ help=(
+ "Permit overly-large files to be added to the repository. "
+ "To get permission to set this, raise a question in the #build "
+ "channel at https://chat.mozilla.org."
+ ),
+ default=False,
+)
+@CommandArgument(
+ "--issues-json",
+ help="Path to a code-review issues.json file to write out",
+)
+def vendor_rust(command_context, **kwargs):
+ from mozbuild.vendor.vendor_rust import VendorRust
+
+ vendor_command = command_context._spawn(VendorRust)
+ issues_json = kwargs.pop("issues_json", None)
+ ok = vendor_command.vendor(**kwargs)
+ if issues_json:
+ with open(issues_json, "w") as fh:
+ fh.write(vendor_command.serialize_issues_json())
+ sys.exit(0 if ok else 1)
+
+
+# =====================================================================
+
+
+@SubCommand(
+ "vendor",
+ "python",
+ description="Vendor Python packages from pypi.org into third_party/python. "
+ "Some extra files like docs and tests will automatically be excluded."
+ "Installs the packages listed in third_party/python/requirements.in and "
+ "their dependencies.",
+ virtualenv_name="vendor",
+)
+@CommandArgument(
+ "--keep-extra-files",
+ action="store_true",
+ default=False,
+ help="Keep all files, including tests and documentation.",
+)
+def vendor_python(command_context, keep_extra_files):
+ from mozbuild.vendor.vendor_python import VendorPython
+
+ vendor_command = command_context._spawn(VendorPython)
+ vendor_command.vendor(keep_extra_files)
diff --git a/python/mozbuild/mozbuild/vendor/moz.build b/python/mozbuild/mozbuild/vendor/moz.build
new file mode 100644
index 0000000000..315dc32600
--- /dev/null
+++ b/python/mozbuild/mozbuild/vendor/moz.build
@@ -0,0 +1,8 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+with Files("**"):
+ BUG_COMPONENT = ("Developer Infrastructure", "Mach Vendor & Updatebot")
diff --git a/python/mozbuild/mozbuild/vendor/moz_yaml.py b/python/mozbuild/mozbuild/vendor/moz_yaml.py
new file mode 100644
index 0000000000..51210e19b2
--- /dev/null
+++ b/python/mozbuild/mozbuild/vendor/moz_yaml.py
@@ -0,0 +1,770 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, # You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# Utility package for working with moz.yaml files.
+#
+# Requires `pyyaml` and `voluptuous`
+# (both are in-tree under third_party/python)
+
+import errno
+import os
+import re
+
+import voluptuous
+import yaml
+from voluptuous import (
+ All,
+ Boolean,
+ FqdnUrl,
+ In,
+ Invalid,
+ Length,
+ Match,
+ Msg,
+ Required,
+ Schema,
+ Unique,
+)
+from yaml.error import MarkedYAMLError
+
+# TODO ensure this matches the approved list of licenses
+VALID_LICENSES = [
+ # Standard Licenses (as per https://spdx.org/licenses/)
+ "Apache-2.0",
+ "BSD-2-Clause",
+ "BSD-3-Clause",
+ "BSD-3-Clause-Clear",
+ "BSL-1.0",
+ "CC0-1.0",
+ "ISC",
+ "ICU",
+ "LGPL-2.1",
+ "LGPL-3.0",
+ "MIT",
+ "MPL-1.1",
+ "MPL-2.0",
+ "Unlicense",
+ "WTFPL",
+ "Zlib",
+ # Unique Licenses
+ "ACE", # http://www.cs.wustl.edu/~schmidt/ACE-copying.html
+ "Anti-Grain-Geometry", # http://www.antigrain.com/license/index.html
+ "JPNIC", # https://www.nic.ad.jp/ja/idn/idnkit/download/index.html
+ "Khronos", # https://www.khronos.org/openmaxdl
+ "libpng", # http://www.libpng.org/pub/png/src/libpng-LICENSE.txt
+ "Unicode", # http://www.unicode.org/copyright.html
+]
+
+VALID_SOURCE_HOSTS = ["gitlab", "googlesource", "github", "angle", "codeberg"]
+
+"""
+---
+# Third-Party Library Template
+# All fields are mandatory unless otherwise noted
+
+# Version of this schema
+schema: 1
+
+bugzilla:
+ # Bugzilla product and component for this directory and subdirectories
+ product: product name
+ component: component name
+
+# Document the source of externally hosted code
+origin:
+
+ # Short name of the package/library
+ name: name of the package
+
+ description: short (one line) description
+
+ # Full URL for the package's homepage/etc
+ # Usually different from repository url
+ url: package's homepage url
+
+ # Human-readable identifier for this version/release
+ # Generally "version NNN", "tag SSS", "bookmark SSS"
+ release: identifier
+
+ # Revision to pull in
+ # Must be a long or short commit SHA (long preferred)
+ revision: sha
+
+ # The package's license, where possible using the mnemonic from
+ # https://spdx.org/licenses/
+ # Multiple licenses can be specified (as a YAML list)
+ # A "LICENSE" file must exist containing the full license text
+ license: MPL-2.0
+
+ # If the package's license is specified in a particular file,
+ # this is the name of the file.
+ # optional
+ license-file: COPYING
+
+ # If there are any mozilla-specific notes you want to put
+ # about a library, they can be put here.
+ notes: Notes about the library
+
+# Configuration for the automated vendoring system.
+# optional
+vendoring:
+
+ # Repository URL to vendor from
+ # eg. https://github.com/kinetiknz/nestegg
+ # Any repository host can be specified here, however initially we'll only
+ # support automated vendoring from selected sources.
+ url: source url (generally repository clone url)
+
+ # Type of hosting for the upstream repository
+ # Valid values are 'gitlab', 'github', googlesource
+ source-hosting: gitlab
+
+ # Type of Vendoring
+ # This is either 'regular', 'individual-files', or 'rust'
+ # If omitted, will default to 'regular'
+ flavor: rust
+
+ # Type of git reference (commit, tag) to track updates from.
+ # You cannot use tag tracking with the individual-files flavor
+ # If omitted, will default to tracking commits.
+ tracking: commit
+
+ # Base directory of the location where the source files will live in-tree.
+ # If omitted, will default to the location the moz.yaml file is in.
+ vendor-directory: third_party/directory
+
+ # Allows skipping certain steps of the vendoring process.
+ # Most useful if e.g. vendoring upstream is complicated and should be done by a script
+ # The valid steps that can be skipped are listed below
+ skip-vendoring-steps:
+ - fetch
+ - keep
+ - include
+ - exclude
+ - move-contents
+ - hg-add
+ - spurious-check
+ - update-moz-yaml
+ - update-moz-build
+
+ # List of patch files to apply after vendoring. Applied in the order
+ # specified, and alphabetically if globbing is used. Patches must apply
+ # cleanly before changes are pushed.
+ # Patch files should be relative to the vendor-directory rather than the gecko
+ # root directory.
+ # All patch files are implicitly added to the keep file list.
+ # optional
+ patches:
+ - file
+ - path/to/file
+ - path/*.patch
+ - path/** # Captures all files and subdirectories below path
+ - path/* # Captures all files but _not_ subdirectories below path. Equivalent to `path/`
+
+ # List of files that are not removed from the destination directory while vendoring
+ # in a new version of the library. Intended for mozilla files not present in upstream.
+ # Implicitly contains "moz.yaml", "moz.build", and any files referenced in
+ # "patches"
+ # optional
+ keep:
+ - file
+ - path/to/file
+ - another/path
+ - *.mozilla
+
+ # Files/paths that will not be vendored from the upstream repository
+ # Implicitly contains ".git", and ".gitignore"
+ # optional
+ exclude:
+ - file
+ - path/to/file
+ - another/path
+ - docs
+ - src/*.test
+
+ # Files/paths that will always be vendored from source repository, even if
+ # they would otherwise be excluded by "exclude".
+ # optional
+ include:
+ - file
+ - path/to/file
+ - another/path
+ - docs/LICENSE.*
+
+ # Files that are modified as part of the update process.
+ # To avoid creating updates that don't update anything, ./mach vendor will detect
+ # if any in-tree files have changed. If there are files that are always changed
+ # during an update process (e.g. version numbers or source revisions), list them
+ # here to avoid having them counted as substative changes.
+ # This field does NOT support directories or globbing
+ # optional
+ generated:
+ - '{yaml_dir}/vcs_version.h'
+
+ # If neither "exclude" or "include" are set, all files will be vendored
+ # Files/paths in "include" will always be vendored, even if excluded
+ # eg. excluding "docs/" then including "docs/LICENSE" will vendor just the
+ # LICENSE file from the docs directory
+
+ # All three file/path parameters ("keep", "exclude", and "include") support
+ # filenames, directory names, and globs/wildcards.
+
+ # Actions to take after updating. Applied in order.
+ # The action subfield is required. It must be one of:
+ # - copy-file
+ # - move-file
+ # - move-dir
+ # - replace-in-file
+ # - replace-in-file-regex
+ # - delete-path
+ # - run-script
+ # Unless otherwise noted, all subfields of action are required.
+ #
+ # If the action is copy-file, move-file, or move-dir:
+ # from is the source file
+ # to is the destination
+ #
+ # If the action is replace-in-file or replace-in-file-regex:
+ # pattern is what in the file to search for. It is an exact strng match.
+ # with is the string to replace it with. Accepts the special keyword
+ # '{revision}' for the commit we are updating to.
+ # File is the file to replace it in.
+ #
+ # If the action is delete-path
+ # path is the file or directory to recursively delete
+ #
+ # If the action is run-script:
+ # script is the script to run
+ # cwd is the directory the script should run with as its cwd
+ # args is a list of arguments to pass to the script
+ #
+ # If the action is run-command:
+ # command is the command to run
+ # Unlike run-script, `command` is _not_ processed to be relative
+ # to the vendor directory, and is passed directly to python's
+ # execution code without any path substitution or manipulation
+ # cwd is the directory the command should run with as its cwd
+ # args is a list of arguments to pass to the command
+ #
+ #
+ # Unless specified otherwise, all files/directories are relative to the
+ # vendor-directory. If the vendor-directory is different from the
+ # directory of the yaml file, the keyword '{yaml_dir}' may be used
+ # to make the path relative to that directory.
+ # 'run-script' supports the addictional keyword {cwd} which, if used,
+ # must only be used at the beginning of the path.
+ #
+ # optional
+ update-actions:
+ - action: copy-file
+ from: include/vcs_version.h.in
+ to: '{yaml_dir}/vcs_version.h'
+
+ - action: replace-in-file
+ pattern: '@VCS_TAG@'
+ with: '{revision}'
+ file: '{yaml_dir}/vcs_version.h'
+
+ - action: delete-path
+ path: '{yaml_dir}/config'
+
+ - action: run-script
+ script: '{cwd}/generate_sources.sh'
+ cwd: '{yaml_dir}'
+
+
+# Configuration for automatic updating system.
+# optional
+updatebot:
+
+ # TODO: allow multiple users to be specified
+ # Phabricator username for a maintainer of the library, used for assigning
+ # reviewers. For a review group, preface with #, such as "#build""
+ maintainer-phab: tjr
+
+ # Bugzilla email address for a maintainer of the library, used for needinfos
+ maintainer-bz: tom@mozilla.com
+
+ # Optional: A preset for ./mach try to use. If present, fuzzy-query and fuzzy-paths will
+ # be ignored. If it, fuzzy-query, and fuzzy-path are omitted, ./mach try auto will be used
+ try-preset: media
+
+ # Optional: A query string for ./mach try fuzzy. If try-preset, it and fuzzy-paths are omitted
+ # then ./mach try auto will be used
+ fuzzy-query: media
+
+ # Optional: An array of test paths for ./mach try fuzzy. If try-preset, it and fuzzy-query are
+ # omitted then ./mach try auto will be used
+ fuzzy-paths: ['media']
+
+ # The tasks that Updatebot can run. Only one of each task is currently permitted
+ # optional
+ tasks:
+ - type: commit-alert
+ branch: upstream-branch-name
+ cc: ["bugzilla@email.address", "another@example.com"]
+ needinfo: ["bugzilla@email.address", "another@example.com"]
+ enabled: True
+ filter: security
+ frequency: every
+ platform: windows
+ blocking: 1234
+ - type: vendoring
+ branch: master
+ enabled: False
+
+ # frequency can be 'every', 'release', 'N weeks', 'N commits'
+ # or 'N weeks, M commits' requiring satisfying both constraints.
+ frequency: 2 weeks
+"""
+
+RE_SECTION = re.compile(r"^(\S[^:]*):").search
+RE_FIELD = re.compile(r"^\s\s([^:]+):\s+(\S+)$").search
+
+
+class MozYamlVerifyError(Exception):
+ def __init__(self, filename, error):
+ self.filename = filename
+ self.error = error
+
+ def __str__(self):
+ return "%s: %s" % (self.filename, self.error)
+
+
+def load_moz_yaml(filename, verify=True, require_license_file=True):
+ """Loads and verifies the specified manifest."""
+
+ # Load and parse YAML.
+ try:
+ with open(filename, "r") as f:
+ manifest = yaml.load(f, Loader=yaml.BaseLoader)
+ except IOError as e:
+ if e.errno == errno.ENOENT:
+ raise MozYamlVerifyError(filename, "Failed to find manifest: %s" % filename)
+ raise
+ except MarkedYAMLError as e:
+ raise MozYamlVerifyError(filename, e)
+
+ if not verify:
+ return manifest
+
+ # Verify schema.
+ if "schema" not in manifest:
+ raise MozYamlVerifyError(filename, 'Missing manifest "schema"')
+ if manifest["schema"] == "1":
+ schema = _schema_1()
+ schema_additional = _schema_1_additional
+ schema_transform = _schema_1_transform
+ else:
+ raise MozYamlVerifyError(filename, "Unsupported manifest schema")
+
+ try:
+ schema(manifest)
+ schema_additional(filename, manifest, require_license_file=require_license_file)
+ manifest = schema_transform(manifest)
+ except (voluptuous.Error, ValueError) as e:
+ raise MozYamlVerifyError(filename, e)
+
+ return manifest
+
+
+def _schema_1():
+ """Returns Voluptuous Schema object."""
+ return Schema(
+ {
+ Required("schema"): "1",
+ Required("bugzilla"): {
+ Required("product"): All(str, Length(min=1)),
+ Required("component"): All(str, Length(min=1)),
+ },
+ "origin": {
+ Required("name"): All(str, Length(min=1)),
+ Required("description"): All(str, Length(min=1)),
+ "notes": All(str, Length(min=1)),
+ Required("url"): FqdnUrl(),
+ Required("license"): Msg(License(), msg="Unsupported License"),
+ "license-file": All(str, Length(min=1)),
+ Required("release"): All(str, Length(min=1)),
+ # The following regex defines a valid git reference
+ # The first group [^ ~^:?*[\]] matches 0 or more times anything
+ # that isn't a Space, ~, ^, :, ?, *, or ]
+ # The second group [^ ~^:?*[\]\.]+ matches 1 or more times
+ # anything that isn't a Space, ~, ^, :, ?, *, [, ], or .
+ "revision": Match(r"^[^ ~^:?*[\]]*[^ ~^:?*[\]\.]+$"),
+ },
+ "updatebot": {
+ Required("maintainer-phab"): All(str, Length(min=1)),
+ Required("maintainer-bz"): All(str, Length(min=1)),
+ "try-preset": All(str, Length(min=1)),
+ "fuzzy-query": All(str, Length(min=1)),
+ "fuzzy-paths": All([str], Length(min=1)),
+ "tasks": All(
+ UpdatebotTasks(),
+ [
+ {
+ Required("type"): In(
+ ["vendoring", "commit-alert"],
+ msg="Invalid type specified in tasks",
+ ),
+ "branch": All(str, Length(min=1)),
+ "enabled": Boolean(),
+ "cc": Unique([str]),
+ "needinfo": Unique([str]),
+ "filter": In(
+ ["none", "security", "source-extensions"],
+ msg="Invalid filter value specified in tasks",
+ ),
+ "source-extensions": Unique([str]),
+ "blocking": Match(r"^[0-9]+$"),
+ "frequency": Match(
+ r"^(every|release|[1-9][0-9]* weeks?|[1-9][0-9]* commits?|"
+ + r"[1-9][0-9]* weeks?, ?[1-9][0-9]* commits?)$"
+ ),
+ "platform": Match(r"^(windows|linux)$"),
+ }
+ ],
+ ),
+ },
+ "vendoring": {
+ Required("url"): FqdnUrl(),
+ Required("source-hosting"): All(
+ str,
+ Length(min=1),
+ In(VALID_SOURCE_HOSTS, msg="Unsupported Source Hosting"),
+ ),
+ "tracking": Match(r"^(commit|tag)$"),
+ "flavor": Match(r"^(regular|rust|individual-files)$"),
+ "skip-vendoring-steps": Unique([str]),
+ "vendor-directory": All(str, Length(min=1)),
+ "patches": Unique([str]),
+ "keep": Unique([str]),
+ "exclude": Unique([str]),
+ "include": Unique([str]),
+ "generated": Unique([str]),
+ "individual-files": [
+ {
+ Required("upstream"): All(str, Length(min=1)),
+ Required("destination"): All(str, Length(min=1)),
+ }
+ ],
+ "individual-files-default-upstream": All(str, Length(min=1)),
+ "individual-files-default-destination": All(str, Length(min=1)),
+ "individual-files-list": Unique([str]),
+ "update-actions": All(
+ UpdateActions(),
+ [
+ {
+ Required("action"): In(
+ [
+ "copy-file",
+ "move-file",
+ "move-dir",
+ "replace-in-file",
+ "replace-in-file-regex",
+ "run-script",
+ "run-command",
+ "delete-path",
+ ],
+ msg="Invalid action specified in update-actions",
+ ),
+ "from": All(str, Length(min=1)),
+ "to": All(str, Length(min=1)),
+ "pattern": All(str, Length(min=1)),
+ "with": All(str, Length(min=1)),
+ "file": All(str, Length(min=1)),
+ "script": All(str, Length(min=1)),
+ "command": All(str, Length(min=1)),
+ "args": All([All(str, Length(min=1))]),
+ "cwd": All(str, Length(min=1)),
+ "path": All(str, Length(min=1)),
+ }
+ ],
+ ),
+ },
+ }
+ )
+
+
+def _schema_1_additional(filename, manifest, require_license_file=True):
+ """Additional schema/validity checks"""
+
+ vendor_directory = os.path.dirname(filename)
+ if "vendoring" in manifest and "vendor-directory" in manifest["vendoring"]:
+ vendor_directory = manifest["vendoring"]["vendor-directory"]
+
+ # LICENSE file must exist, except for Rust crates which are exempted
+ # because the license is required to be specified in the Cargo.toml file
+ if require_license_file and "origin" in manifest:
+ files = [f.lower() for f in os.listdir(vendor_directory)]
+ if (
+ not (
+ "license-file" in manifest["origin"]
+ and manifest["origin"]["license-file"].lower() in files
+ )
+ and not (
+ "license" in files
+ or "license.txt" in files
+ or "license.rst" in files
+ or "license.html" in files
+ or "license.md" in files
+ )
+ and not (
+ "vendoring" in manifest
+ and manifest["vendoring"].get("flavor", "regular") == "rust"
+ )
+ ):
+ license = manifest["origin"]["license"]
+ if isinstance(license, list):
+ license = "/".join(license)
+ raise ValueError("Failed to find %s LICENSE file" % license)
+
+ # Cannot vendor without an origin.
+ if "vendoring" in manifest and "origin" not in manifest:
+ raise ValueError('"vendoring" requires an "origin"')
+
+ # Cannot vendor without a computer-readable revision.
+ if "vendoring" in manifest and "revision" not in manifest["origin"]:
+ raise ValueError(
+ 'If "vendoring" is present, "revision" must be present in "origin"'
+ )
+
+ # The Rust and Individual Flavor type precludes a lot of options
+ # individual-files could, in theory, use several of these, but until we have a use case let's
+ # disallow them so we're not worrying about whether they work. When we need them we can make
+ # sure they do.
+ if (
+ "vendoring" in manifest
+ and manifest["vendoring"].get("flavor", "regular") != "regular"
+ ):
+ for i in [
+ "skip-vendoring-steps",
+ "keep",
+ "exclude",
+ "include",
+ "generated",
+ ]:
+ if i in manifest["vendoring"]:
+ raise ValueError("A non-regular flavor of update cannot use '%s'" % i)
+
+ if manifest["vendoring"].get("flavor", "regular") == "rust":
+ for i in [
+ "update-actions",
+ ]:
+ if i in manifest["vendoring"]:
+ raise ValueError("A rust flavor of update cannot use '%s'" % i)
+
+ # Ensure that only individual-files flavor uses those options
+ if (
+ "vendoring" in manifest
+ and manifest["vendoring"].get("flavor", "regular") != "individual-files"
+ ):
+ if (
+ "individual-files" in manifest["vendoring"]
+ or "individual-files-list" in manifest["vendoring"]
+ ):
+ raise ValueError(
+ "Only individual-files flavor of update can use 'individual-files'"
+ )
+
+ # Ensure that the individual-files flavor has all the correct options
+ if (
+ "vendoring" in manifest
+ and manifest["vendoring"].get("flavor", "regular") == "individual-files"
+ ):
+ # Because the only way we can determine the latest tag is by doing a local clone,
+ # we don't want to do that for individual-files flavors because those flavors are
+ # usually on gigantic repos we don't want to clone for such a simple thing.
+ if manifest["vendoring"].get("tracking", "commit") == "tag":
+ raise ValueError(
+ "You cannot use tag tracking with the individual-files flavor. (Sorry.)"
+ )
+
+ # We need either individual-files or individual-files-list
+ if (
+ "individual-files" not in manifest["vendoring"]
+ and "individual-files-list" not in manifest["vendoring"]
+ ):
+ raise ValueError(
+ "The individual-files flavor must include either "
+ + "'individual-files' or 'individual-files-list'"
+ )
+ # For whichever we have, make sure we don't have the other and we don't have
+ # options we shouldn't or lack ones we should.
+ if "individual-files" in manifest["vendoring"]:
+ if "individual-files-list" in manifest["vendoring"]:
+ raise ValueError(
+ "individual-files-list is mutually exclusive with individual-files"
+ )
+ if "individual-files-default-upstream" in manifest["vendoring"]:
+ raise ValueError(
+ "individual-files-default-upstream can only be used with individual-files-list"
+ )
+ if "individual-files-default-destination" in manifest["vendoring"]:
+ raise ValueError(
+ "individual-files-default-destination can only be used "
+ + "with individual-files-list"
+ )
+ if "individual-files-list" in manifest["vendoring"]:
+ if "individual-files" in manifest["vendoring"]:
+ raise ValueError(
+ "individual-files is mutually exclusive with individual-files-list"
+ )
+ if "individual-files-default-upstream" not in manifest["vendoring"]:
+ raise ValueError(
+ "individual-files-default-upstream must be used with individual-files-list"
+ )
+ if "individual-files-default-destination" not in manifest["vendoring"]:
+ raise ValueError(
+ "individual-files-default-destination must be used with individual-files-list"
+ )
+
+ if "updatebot" in manifest:
+ # If there are Updatebot tasks, then certain fields must be present and
+ # defaults need to be set.
+ if "tasks" in manifest["updatebot"]:
+ if "vendoring" not in manifest or "url" not in manifest["vendoring"]:
+ raise ValueError(
+ "If Updatebot tasks are specified, a vendoring url must be included."
+ )
+
+ if "try-preset" in manifest["updatebot"]:
+ for f in ["fuzzy-query", "fuzzy-paths"]:
+ if f in manifest["updatebot"]:
+ raise ValueError(
+ "If 'try-preset' is specified, then %s cannot be" % f
+ )
+
+ # Check for a simple YAML file
+ with open(filename, "r") as f:
+ has_schema = False
+ for line in f.readlines():
+ m = RE_SECTION(line)
+ if m:
+ if m.group(1) == "schema":
+ has_schema = True
+ break
+ if not has_schema:
+ raise ValueError("Not simple YAML")
+
+
+# Do type conversion for the few things that need it.
+# Everythig is parsed as a string to (a) not cause problems with revisions that
+# are only numerals and (b) not strip leading zeros from the numbers if we just
+# converted them to string
+def _schema_1_transform(manifest):
+ if "updatebot" in manifest:
+ if "tasks" in manifest["updatebot"]:
+ for i in range(len(manifest["updatebot"]["tasks"])):
+ if "enabled" in manifest["updatebot"]["tasks"][i]:
+ val = manifest["updatebot"]["tasks"][i]["enabled"]
+ manifest["updatebot"]["tasks"][i]["enabled"] = (
+ val.lower() == "true" or val.lower() == "yes"
+ )
+ return manifest
+
+
+class UpdateActions(object):
+ """Voluptuous validator which verifies the update actions(s) are valid."""
+
+ def __call__(self, values):
+ for v in values:
+ if "action" not in v:
+ raise Invalid("All file-update entries must specify a valid action")
+ if v["action"] in ["copy-file", "move-file", "move-dir"]:
+ if "from" not in v or "to" not in v or len(v.keys()) != 3:
+ raise Invalid(
+ "%s action must (only) specify 'from' and 'to' keys"
+ % v["action"]
+ )
+ elif v["action"] in ["replace-in-file", "replace-in-file-regex"]:
+ if (
+ "pattern" not in v
+ or "with" not in v
+ or "file" not in v
+ or len(v.keys()) != 4
+ ):
+ raise Invalid(
+ "replace-in-file action must (only) specify "
+ + "'pattern', 'with', and 'file' keys"
+ )
+ elif v["action"] == "delete-path":
+ if "path" not in v or len(v.keys()) != 2:
+ raise Invalid(
+ "delete-path action must (only) specify the 'path' key"
+ )
+ elif v["action"] == "run-script":
+ if "script" not in v or "cwd" not in v:
+ raise Invalid(
+ "run-script action must specify 'script' and 'cwd' keys"
+ )
+ if set(v.keys()) - set(["args", "cwd", "script", "action"]) != set():
+ raise Invalid(
+ "run-script action may only specify 'script', 'cwd', and 'args' keys"
+ )
+ elif v["action"] == "run-command":
+ if "command" not in v or "cwd" not in v:
+ raise Invalid(
+ "run-command action must specify 'command' and 'cwd' keys"
+ )
+ if set(v.keys()) - set(["args", "cwd", "command", "action"]) != set():
+ raise Invalid(
+ "run-command action may only specify 'command', 'cwd', and 'args' keys"
+ )
+ else:
+ # This check occurs before the validator above, so the above is
+ # redundant but we leave it to be verbose.
+ raise Invalid("Supplied action " + v["action"] + " is invalid.")
+ return values
+
+ def __repr__(self):
+ return "UpdateActions"
+
+
+class UpdatebotTasks(object):
+ """Voluptuous validator which verifies the updatebot task(s) are valid."""
+
+ def __call__(self, values):
+ seenTaskTypes = set()
+ for v in values:
+ if "type" not in v:
+ raise Invalid("All updatebot tasks must specify a valid type")
+
+ if v["type"] in seenTaskTypes:
+ raise Invalid("Only one type of each task is currently supported")
+ seenTaskTypes.add(v["type"])
+
+ if v["type"] == "vendoring":
+ for i in ["filter", "branch", "source-extensions"]:
+ if i in v:
+ raise Invalid(
+ "'%s' is only valid for commit-alert task types" % i
+ )
+ elif v["type"] == "commit-alert":
+ pass
+ else:
+ # This check occurs before the validator above, so the above is
+ # redundant but we leave it to be verbose.
+ raise Invalid("Supplied type " + v["type"] + " is invalid.")
+ return values
+
+ def __repr__(self):
+ return "UpdatebotTasks"
+
+
+class License(object):
+ """Voluptuous validator which verifies the license(s) are valid as per our
+ allow list."""
+
+ def __call__(self, values):
+ if isinstance(values, str):
+ values = [values]
+ elif not isinstance(values, list):
+ raise Invalid("Must be string or list")
+ for v in values:
+ if v not in VALID_LICENSES:
+ raise Invalid("Bad License")
+ return values
+
+ def __repr__(self):
+ return "License"
diff --git a/python/mozbuild/mozbuild/vendor/rewrite_mozbuild.py b/python/mozbuild/mozbuild/vendor/rewrite_mozbuild.py
new file mode 100644
index 0000000000..8163c05dc3
--- /dev/null
+++ b/python/mozbuild/mozbuild/vendor/rewrite_mozbuild.py
@@ -0,0 +1,1286 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, # You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# Utility package for working with moz.yaml files.
+#
+# Requires `pyyaml` and `voluptuous`
+# (both are in-tree under third_party/python)
+
+"""
+Problem:
+ ./mach vendor needs to be able to add or remove files from moz.build files automatically to
+ be able to effectively update a library automatically and send useful try runs in.
+
+ So far, it has been difficult to do that.
+
+ Why:
+ - Some files need to go into UNIFIED_SOURCES vs SOURCES
+ - Some files are os-specific, and need to go into per-OS conditionals
+ - Some files are both UNIFIED_SOURCES/SOURCES sensitive and OS-specific.
+
+Proposal:
+ Design an algorithm that maps a third party library file to a suspected moz.build location.
+ Run the algorithm on all files specified in all third party libraries' moz.build files.
+ See if the proposed place in the moz.build file matches the actual place.
+
+Initial Algorithm
+ Given a file, which includes the filename and the path from gecko root, we want to find the
+ correct moz.build file and location within that file.
+ Take the path of the file, and iterate up the directory tree, looking for moz.build files as
+ we go.
+ Consider each of these moz.build files, starting with the one closest to the file.
+ Within a moz.build file, identify the SOURCES or UNIFIED_SOURCES block(s) that contains a file
+ in the same directory path as the file to be added.
+ If there is only one such block, use that one.
+ If there are multiple blocks, look at the files within each block and note the longest length
+ of a common prefix (including partial filenames - if we just did full directories the
+ result would be the same as the prior step and we would not narrow the results down). Use
+ the block containing the longest prefix. (We call this 'guessing'.)
+
+Result of the proposal:
+ The initial implementation works on 1675 of 1977 elligible files.
+ The files it does not work on include:
+ - general failures. Such as when we find that avutil.cpp wants to be next to adler32.cpp
+ but avutil.cpp is in SOURCES and adler32.cpp is in UNIFIED_SOURCES. (And many similar
+ cases.)
+ - per-cpu-feature files, where only a single file is added under a conditional
+ - When guessing, because of a len(...) > longest_so_far comparison, we would prefer the
+ first block we found.
+ - Changing this to prefer UNIFIED_SOURCES in the event of a tie
+ yielded 17 additional correct assignments (about a 1% improvement)
+ - As a result of the change immediately above, when guessing, because given equal
+ prefixes, we would prefer a UNIFIED_SOURCES block over other blocks, even if the other
+ blocks are longer
+ - Changing this (again) to prefer the block containing more files yielded 49 additional
+ correct assignments (about a 2.5% improvement)
+
+ The files that are ineligible for consideration are:
+ - Those in libwebrtc
+ - Those specified in source assignments composed of generators (e.g. [f for f in '%.c'])
+ - Those specified in source assignments to subscripted variables
+ (e.g. SOURCES += foo['x86_files'])
+
+ We needed to iterate up the directory and look at a different moz.build file _zero_ times.
+ This indicates this code is probably not needed, and therefore we will remove it from the
+ algorithm.
+ We needed to guess base on the longest prefix 944 times, indicating that this code is
+ absolutely crucial and should be double-checked. (And indeed, upon double-checking it,
+ bugs were identified.)
+
+ After some initial testing, it was determined that this code completely fell down when the
+ vendoring directory differed from the moz.yaml directory (definitions below.) The code was
+ slightly refactored to handle this case, primarily by (a) re-inserting the logic to check
+ multiple moz.build files instead of the first and (b) handling some complicated normalization
+ notions (details in comments).
+
+Slightly Improved Algorithm Changes:
+ Don't bother iterating up the directory tree looking for moz.build files, just take the first.
+ When guessing, in the event of a common-prefix tie, prefer the block containing more files
+
+ With these changes, we now Successfully Matched 1724 of 1977 files
+
+CODE CONCEPTS
+
+source-assignment
+ An assignment of files to a SOURCES or UNIFIED_SOURCES variable, such as
+ SOURCES += ['ffpvx.cpp']
+
+ We specifically look only for these two variable names to avoid identifying things
+ such as CXX_FLAGS.
+
+ Sometimes; however, there is an intermediary variable, such as `SOURCES += celt_filenames`
+ In this situation we find the celt_filenames assignment, and treat it as a 'source-assignment'
+
+source-assignment-location
+ source-assignment-location is a human readable string that identifies where in the moz.build
+ file the source-assignment is. It can used to visually match the location upon manual
+ inspection; and given a source-assignment-location, re-identify it when iterating over all
+ source-assignments in a file.
+
+ The actual string consists of the path from the root of the moz.build file to the
+ source-assignment, plus a suffix number.
+
+ We suffix the final value with an incrementing counter. This is to support moz.build files
+ that, for whatever reason, use multiple SOURCES += [] list in the same basic block. This index
+ is per-file, so no two assignments in the same file (even if they have separate locations)
+ should have the same suffix.
+
+ For example:
+
+ When `SOURCES += ['ffpvx.xpp']` appears as the first line of the file (or any other
+ unindented-location) its source-assignment-location will be `> SOURCES 1`.
+
+ When `SOURCES += ['ffpvx.xpp']` appears inside a conditional such as
+ `CONFIG['OS_TARGET'] == 'WINNT'` then its source-assignment-location will be
+ `> if CONFIG['OS_TARGET'] == 'WINNT' > SOURCES 1`
+
+ When SOURCES += ['ffpvx.xpp'] appears as the second line of the file, and a different
+ SOURCES += [] was the first line, then its source-assignment-location will be "> SOURCES 2".
+
+ No two source-assignments may have the same source-assignment-location. If they do, we raise
+ an assert.
+
+file vs filename
+ a 'filename' is a string specifing the name and sometimes the path of a file.
+ a 'file' is an object you get from open()-ing a filename
+
+ A variable that is a string should always use 'filename'
+
+vendoring directory vs moz.yaml directory
+ In many cases, a library's moz.yaml file, moz.build file(s), and sources files will all live
+ under a single directory. e.g. libjpeg
+
+ In other cases, a library's source files are in one directory (we call this the 'vendoring
+ directory') and the moz.yaml file and moz.build file(s) are in another directory (we call this
+ the moz.yaml directory). e.g. libdav1d
+
+normalized-filename
+ A filename is 'normalized' if it has been expanded to the full path from the gecko root. This
+ requires a moz.build file.
+
+ For example a filename `lib/opus.c` may be specified inside the `media/libopus/moz.build`
+ file. The filename is normalized by os.path.join()-ing the dirname of the moz.build file
+ (i.e. `media/libopus`) to the filename, resulting in `media/libopus/lib/opus.c`
+
+ A filename that begins with '/' is presumed to already be specified relative to the gecko
+ root, and therefore is not modified.
+
+ Normalization gets more complicated when dealing with separate vendoring and moz.yaml
+ directories. This is because a file can be considered normalized when it looks like
+ third_party/libdav1d/src/a.cpp
+ _or_ when it looks like
+ media/libdav1d/../../third_party/libdav1d/src/a.cpp
+ This is because in the moz.build file, it will be specified as
+ `../../third_party/libdav1d/src/a.cpp` and we 'normalize' it by prepending the path to the
+ moz.build file.
+
+ Normalization is not just about having an 'absolute' path from gecko_root to file. In fact
+ it's not really about that at all - it's about matching filenames. Therefore when we are
+ dealing with separate vendoring and moz.yaml directories we will very quickly 're-normalize'
+ a normalized filename to get it into one of those foo/bar/../../third_party/... paths that
+ will make sense for the moz.build file we are interested in.
+
+ Whenever a filename is normalized, it should be specified as such in the variable name,
+ either as a prefix (normalized_filename) or a suffix (target_filename_normalized)
+
+statistic
+ Using some hacky stuff, we report statistics about how many times we hit certain branches of
+ the code.
+ e.g.
+ - "How many times did we refine a guess based on prefix length"
+ - "How many times did we refine a guess based on the number of files in the block"
+ - "What is the histogram of guess candidates"
+
+ We do this to identify how frequently certain code paths were taken, allowing us to identify
+ strange behavior and investigate outliers. This process lead to identifying bugs and small
+ improvements.
+"""
+
+import ast
+import copy
+import os
+import re
+import shutil
+import subprocess
+import sys
+from pprint import pprint
+
+try:
+ from mozbuild.frontend.sandbox import alphabetical_sorted
+except Exception:
+
+ def alphabetical_sorted(iterable, key=lambda x: x.lower(), reverse=False):
+ return sorted(iterable, key=key, reverse=reverse)
+
+
+# This can be edited to enable better Python 3.8 behavior, but is set so that
+# everything is consistent by default so errors can be detected more easily.
+FORCE_DOWNGRADE_BEHAVIOR = True
+
+statistics = {
+ "guess_candidates": {},
+ "number_refinements": {},
+ "needed_to_guess": 0,
+ "length_logic": {},
+}
+
+
+def log(*args, **kwargs):
+ # If is helpful to keep some logging statements around, but we don't want to print them
+ # unless we are debugging
+ # print(*args, **kwargs)
+ pass
+
+
+##############################################
+
+import inspect
+
+
+def node_to_name(code, node):
+ if (
+ not FORCE_DOWNGRADE_BEHAVIOR
+ and sys.version_info[0] >= 3
+ and sys.version_info[1] >= 8
+ ):
+ return ast.get_source_segment(code, node)
+
+ return node.__class__.__name__
+
+
+def get_attribute_label(node):
+ assert isinstance(node, ast.Attribute)
+
+ label = ""
+ subtarget = node
+ while isinstance(subtarget, ast.Attribute):
+ label = subtarget.attr + ("." if label else "") + label
+ subtarget = subtarget.value
+
+ if isinstance(subtarget, ast.Name):
+ label = subtarget.id + "." + label
+ elif isinstance(subtarget, ast.Subscript) and isinstance(subtarget.value, ast.Name):
+ label = subtarget.value.id + "." + label
+ else:
+ raise Exception(
+ "Unxpected subtarget of type %s found in get_attribute_label. label=%s"
+ % (subtarget, label)
+ )
+
+ return label
+
+
+def ast_get_source_segment(code, node):
+ caller = inspect.stack()[1]
+
+ if "sphinx" in caller.filename or (
+ not FORCE_DOWNGRADE_BEHAVIOR
+ and sys.version_info[0] >= 3
+ and sys.version_info[1] >= 8
+ ):
+ return ast.original_get_source_segment(code, node)
+
+ if caller.function == "assignment_node_to_source_filename_list":
+ return ""
+
+ raise Exception(
+ "ast_get_source_segment is not available with this Python version. (ver=%s.%s, caller=%s)"
+ % (sys.version_info.major, sys.version_info.minor, caller.function)
+ )
+
+
+# Overwrite it so we don't accidently use it
+if sys.version_info[0] >= 3 and sys.version_info[1] >= 8:
+ ast.original_get_source_segment = ast.get_source_segment
+ ast.get_source_segment = ast_get_source_segment
+
+
+##############################################
+
+
+def node_to_readable_file_location(code, node, child_node=None):
+ location = ""
+
+ if isinstance(node.parent, ast.Module):
+ # The next node up is the root, don't go higher.
+ pass
+ else:
+ location += node_to_readable_file_location(code, node.parent, node)
+
+ location += " > "
+ if isinstance(node, ast.Module):
+ raise Exception("We shouldn't see a Module")
+ elif isinstance(node, ast.If):
+ assert child_node
+ if child_node in node.body:
+ location += "if " + node_to_name(code, node.test)
+ else:
+ location += "else-of-if " + node_to_name(code, node.test)
+ elif isinstance(node, ast.For):
+ location += (
+ "for "
+ + node_to_name(code, node.target)
+ + " in "
+ + node_to_name(code, node.iter)
+ )
+ elif isinstance(node, ast.AugAssign):
+ if isinstance(node.target, ast.Name):
+ location += node.target.id
+ else:
+ location += node_to_name(code, node.target)
+ elif isinstance(node, ast.Assign):
+ # This assert would fire if we did e.g. some_sources = all_sources = [ ... ]
+ assert len(node.targets) == 1, "Assignment node contains more than one target"
+ if isinstance(node.targets[0], ast.Name):
+ location += node.targets[0].id
+ else:
+ location += node_to_name(code, node.targets[0])
+ else:
+ raise Exception("Got a node type I don't know how to handle: " + str(node))
+
+ return location
+
+
+def assignment_node_to_source_filename_list(code, node):
+ """
+ If the list of filenames is not a list of constants (e.g. it's a generated list)
+ it's (probably) infeasible to try and figure it out. At least we're not going to try
+ right now. Maybe in the future?
+
+ If this happens, we'll return an empty list. The consequence of this is that we
+ won't be able to match a file against this list, so we may not be able to add it.
+
+ (But if the file matches a generated list, perhaps it will be included in the
+ Sources list automatically?)
+ """
+ if isinstance(node.value, ast.List) and "elts" in node.value._fields:
+ for f in node.value.elts:
+ if not isinstance(f, ast.Constant) and not isinstance(f, ast.Str):
+ log(
+ "Found non-constant source file name in list: ",
+ ast_get_source_segment(code, f),
+ )
+ return []
+ return [
+ f.value if isinstance(f, ast.Constant) else f.s for f in node.value.elts
+ ]
+ elif isinstance(node.value, ast.ListComp):
+ # SOURCES += [f for f in foo if blah]
+ log("Could not find the files for " + ast_get_source_segment(code, node.value))
+ elif isinstance(node.value, ast.Name) or isinstance(node.value, ast.Subscript):
+ # SOURCES += other_var
+ # SOURCES += files['X64_SOURCES']
+ log("Could not find the files for " + ast_get_source_segment(code, node))
+ elif isinstance(node.value, ast.Call):
+ # SOURCES += sorted(...)
+ log("Could not find the files for " + ast_get_source_segment(code, node))
+ else:
+ raise Exception(
+ "Unexpected node received in assignment_node_to_source_filename_list: "
+ + str(node)
+ )
+ return []
+
+
+def mozbuild_file_to_source_assignments(normalized_mozbuild_filename, assignment_type):
+ """
+ Returns a dictionary of 'source-assignment-location' -> 'normalized source filename list'
+ contained in the moz.build file specified
+
+ normalized_mozbuild_filename: the moz.build file to read
+ """
+ source_assignments = {}
+
+ if assignment_type == "source-files":
+ targets = ["SOURCES", "UNIFIED_SOURCES"]
+ else:
+ targets = ["EXPORTS"]
+
+ # Parse the AST of the moz.build file
+ code = open(normalized_mozbuild_filename).read()
+ root = ast.parse(code)
+
+ # Populate node parents. This allows us to walk up from a node to the root.
+ # (Really I think python's ast class should do this, but it doesn't, so we monkey-patch it)
+ for node in ast.walk(root):
+ for child in ast.iter_child_nodes(node):
+ child.parent = node
+
+ # Find all the assignments of SOURCES or UNIFIED_SOURCES
+ if assignment_type == "source-files":
+ source_assignment_nodes = [
+ node
+ for node in ast.walk(root)
+ if isinstance(node, ast.AugAssign)
+ and isinstance(node.target, ast.Name)
+ and node.target.id in targets
+ ]
+ assert (
+ len([n for n in source_assignment_nodes if not isinstance(n.op, ast.Add)])
+ == 0
+ ), "We got a Source assignment that wasn't +="
+
+ # Recurse and find nodes where we do SOURCES += other_var or SOURCES += FILES['foo']
+ recursive_assignment_nodes = [
+ node
+ for node in source_assignment_nodes
+ if isinstance(node.value, ast.Name) or isinstance(node.value, ast.Subscript)
+ ]
+
+ recursive_assignment_nodes_names = [
+ node.value.id
+ for node in recursive_assignment_nodes
+ if isinstance(node.value, ast.Name)
+ ]
+
+ # TODO: We do not dig into subscript variables. These are currently only used by two
+ # libraries that use external sources.mozbuild files.
+ # recursive_assignment_nodes_names.extend([something<node> for node in
+ # recursive_assignment_nodes if isinstance(node.value, ast.Subscript)]
+
+ additional_assignment_nodes = [
+ node
+ for node in ast.walk(root)
+ if isinstance(node, ast.Assign)
+ and isinstance(node.targets[0], ast.Name)
+ and node.targets[0].id in recursive_assignment_nodes_names
+ ]
+
+ # Remove the original, useless assignment node (the SOURCES += other_var)
+ for node in recursive_assignment_nodes:
+ source_assignment_nodes.remove(node)
+ # Add the other_var += [''] source-assignment
+ source_assignment_nodes.extend(additional_assignment_nodes)
+ else:
+ source_assignment_nodes = [
+ node
+ for node in ast.walk(root)
+ if isinstance(node, ast.AugAssign)
+ and (
+ (isinstance(node.target, ast.Name) and node.target.id == "EXPORTS")
+ or (
+ isinstance(node.target, ast.Attribute)
+ and get_attribute_label(node.target).startswith("EXPORTS")
+ )
+ )
+ ]
+ source_assignment_nodes.extend(
+ [
+ node
+ for node in ast.walk(root)
+ if isinstance(node, ast.Assign)
+ and (
+ (
+ isinstance(node.targets[0], ast.Name)
+ and node.targets[0].id == "EXPORTS"
+ )
+ or (
+ isinstance(node.targets[0], ast.Attribute)
+ and get_attribute_label(node.targets[0]).startswith("EXPORTS")
+ )
+ )
+ ]
+ )
+
+ # Get the source-assignment-location for the node:
+ assignment_index = 1
+ for a in source_assignment_nodes:
+ source_assignment_location = (
+ node_to_readable_file_location(code, a) + " " + str(assignment_index)
+ )
+ source_filename_list = assignment_node_to_source_filename_list(code, a)
+
+ if not source_filename_list:
+ # In some cases (like generated source file lists) we will have an empty list.
+ # If that is the case, just omit the source assignment
+ continue
+
+ normalized_source_filename_list = [
+ normalize_filename(normalized_mozbuild_filename, f)
+ for f in source_filename_list
+ ]
+
+ if source_assignment_location in source_assignments:
+ source_assignment_location = node_to_readable_file_location(code, a)
+
+ assert (
+ source_assignment_location not in source_assignments
+ ), "In %s, two assignments have the same key ('%s')" % (
+ normalized_mozbuild_filename,
+ source_assignment_location,
+ )
+ source_assignments[source_assignment_location] = normalized_source_filename_list
+ assignment_index += 1
+
+ return (source_assignments, root, code)
+
+
+def unnormalize_filename(normalized_mozbuild_filename, normalized_filename):
+ if normalized_filename[0] == "/":
+ return normalized_filename
+
+ mozbuild_path = (
+ os.path.dirname(normalized_mozbuild_filename).replace(os.path.sep, "/") + "/"
+ )
+ return normalized_filename.replace(mozbuild_path, "")
+
+
+def normalize_filename(normalized_mozbuild_filename, filename):
+ if filename[0] == "/":
+ return filename
+
+ mozbuild_path = os.path.dirname(normalized_mozbuild_filename).replace(
+ os.path.sep, "/"
+ )
+ return os.path.join(mozbuild_path, filename).replace(os.path.sep, "/")
+
+
+def get_mozbuild_file_search_order(
+ normalized_filename,
+ moz_yaml_dir=None,
+ vendoring_dir=None,
+ all_mozbuild_filenames_normalized=None,
+):
+ """
+ Returns an ordered list of normalized moz.build filenames to consider for a given filename
+
+ normalized_filename: a source filename normalized to the gecko root
+
+ moz_yaml_dir: the path from gecko_root to the moz.yaml file (which is the root of the
+ moz.build files)
+
+ moz_yaml_dir: the path to where the library's source files are
+
+ all_mozbuild_filenames_normalized: (optional) the list of all third-party moz.build files
+ If all_mozbuild_filenames_normalized is not specified, we look in the filesystem.
+
+ The list is built out of two distinct steps.
+
+ In Step 1 we will walk up a directory tree, looking for moz.build files. We append moz.build
+ files in this order, preferring the lowest moz.build we find, then moving on to one in a
+ higher directory.
+ The directory we start in is a little complicated. We take the series of subdirectories
+ between vendoring_dir and the file in question, and then append them to the moz.yaml
+ directory.
+
+ Example:
+
+ .. code-block:: python
+
+ When moz_yaml directory != vendoring_directory:
+ moz_yaml_dir = foo/bar/
+ vendoring_dir = third_party/baz/
+ normalized_filename = third_party/baz/asm/arm/a.S
+ starting_directory: foo/bar/asm/arm/
+ When moz_yaml directory == vendoring_directory
+ (In this case, these variables will actually be 'None' but the algorthm is the same)
+ moz_yaml_dir = foo/bar/
+ vendoring_dir = foo/bar/
+ normalized_filename = foo/bar/asm/arm/a.S
+ starting_directory: foo/bar/asm/arm/
+
+ In Step 2 we get a bit desparate. When the vendoring directory and the moz_yaml directory are
+ not the same, there is no guarentee that the moz_yaml directory will adhere to the same
+ directory structure as the vendoring directory. And indeed it doesn't in some cases
+ (e.g. libdav1d.)
+ So in this situation we start at the root of the moz_yaml directory and walk downwards, adding
+ _any_ moz.build file we encounter to the list. Later on (in all cases, not just
+ moz_yaml_dir != vendoring_dir) we only consider a moz.build file if it has source files whose
+ directory matches the normalized_filename, so this step, though desparate, is safe-ish and
+ believe it or not has worked for some file additions.
+ """
+ ordered_list = []
+
+ if all_mozbuild_filenames_normalized is None:
+ assert os.path.isfile(
+ ".arcconfig"
+ ), "We do not seem to be running from the gecko root"
+
+ # The first time around, this variable name is incorrect.
+ # It's actually the full path+filename, not a directory.
+ test_directory = None
+ if (moz_yaml_dir, vendoring_dir) == (None, None):
+ # In this situation, the library is vendored into the same directory as
+ # the moz.build files. We can start traversing directories up from the file to
+ # add to find the correct moz.build file
+ test_directory = normalized_filename
+ elif moz_yaml_dir and vendoring_dir:
+ # In this situation, the library is vendored in a different place (typically
+ # third_party/foo) from the moz.build files.
+ subdirectory_path = normalized_filename.replace(vendoring_dir, "")
+ test_directory = os.path.join(moz_yaml_dir, subdirectory_path)
+ else:
+ raise Exception("If moz_yaml_dir or vendoring_dir are specified, both must be")
+
+ # Step 1
+ while (
+ len(os.path.dirname(test_directory).replace(os.path.sep, "/")) > 1
+ ): # While we are not at '/'
+ containing_directory = os.path.dirname(test_directory)
+
+ possible_normalized_mozbuild_filename = os.path.join(
+ containing_directory, "moz.build"
+ )
+
+ if not all_mozbuild_filenames_normalized:
+ if os.path.isfile(possible_normalized_mozbuild_filename):
+ ordered_list.append(possible_normalized_mozbuild_filename)
+ elif possible_normalized_mozbuild_filename in all_mozbuild_filenames_normalized:
+ ordered_list.append(possible_normalized_mozbuild_filename)
+
+ test_directory = containing_directory
+
+ # Step 2
+ if moz_yaml_dir:
+ for root, dirs, files in os.walk(moz_yaml_dir):
+ for f in files:
+ if f == "moz.build":
+ ordered_list.append(os.path.join(root, f))
+
+ return ordered_list
+
+
+def get_closest_mozbuild_file(
+ normalized_filename,
+ moz_yaml_dir=None,
+ vendoring_dir=None,
+ all_mozbuild_filenames_normalized=None,
+):
+ """
+ Returns the closest moz.build file in the directory tree to a normalized filename
+ """
+ r = get_mozbuild_file_search_order(
+ normalized_filename,
+ moz_yaml_dir,
+ vendoring_dir,
+ all_mozbuild_filenames_normalized,
+ )
+ return r[0] if r else None
+
+
+def filenames_directory_is_in_filename_list(
+ filename_normalized, list_of_normalized_filenames
+):
+ """
+ Given a normalized filename and a list of normalized filenames, first turn them into a
+ containing directory, and a list of containing directories. Then test if the containing
+ directory of the filename is in the list.
+
+ ex:
+ f = filenames_directory_is_in_filename_list
+ f("foo/bar/a.c", ["foo/b.c"]) -> false
+ f("foo/bar/a.c", ["foo/b.c", "foo/bar/c.c"]) -> true
+ f("foo/bar/a.c", ["foo/b.c", "foo/bar/baz/d.c"]) -> false
+ """
+ path_list = set(
+ [
+ os.path.dirname(f).replace(os.path.sep, "/")
+ for f in list_of_normalized_filenames
+ ]
+ )
+ return os.path.dirname(filename_normalized).replace(os.path.sep, "/") in path_list
+
+
+def find_all_posible_assignments_from_filename(source_assignments, filename_normalized):
+ """
+ Given a list of source assignments and a normalized filename, narrow the list to assignments
+ that contain a file whose directory matches the filename's directory.
+ """
+ possible_assignments = {}
+ for key, list_of_normalized_filenames in source_assignments.items():
+ if not list_of_normalized_filenames:
+ continue
+ if filenames_directory_is_in_filename_list(
+ filename_normalized, list_of_normalized_filenames
+ ):
+ possible_assignments[key] = list_of_normalized_filenames
+ return possible_assignments
+
+
+def guess_best_assignment(source_assignments, filename_normalized):
+ """
+ Given several assignments, all of which contain the same directory as the filename, pick one
+ we think is best and return its source-assignment-location.
+
+ We do this by looking at the filename itself (not just its directory) and picking the
+ assignment which contains a filename with the longest matching prefix.
+
+ e.g: "foo/asm_neon.c" compared to ["foo/main.c", "foo/all_utility.c"], ["foo/asm_arm.c"]
+ -> ["foo/asm_arm.c"] (match of `foo/asm_`)
+ """
+ length_of_longest_match = 0
+ source_assignment_location_of_longest_match = None
+ statistic_number_refinements = 0
+ statistic_length_logic = 0
+
+ for key, list_of_normalized_filenames in source_assignments.items():
+ for f in list_of_normalized_filenames:
+ if filename_normalized == f:
+ # Do not cheat by matching the prefix of the exact file
+ continue
+
+ prefix = os.path.commonprefix([filename_normalized, f])
+ if len(prefix) > length_of_longest_match:
+ statistic_number_refinements += 1
+ length_of_longest_match = len(prefix)
+ source_assignment_location_of_longest_match = key
+ elif len(prefix) == length_of_longest_match and len(
+ source_assignments[key]
+ ) > len(source_assignments[source_assignment_location_of_longest_match]):
+ statistic_number_refinements += 1
+ statistic_length_logic += 1
+ length_of_longest_match = len(prefix)
+ source_assignment_location_of_longest_match = key
+ return (
+ source_assignment_location_of_longest_match,
+ (statistic_number_refinements, statistic_length_logic),
+ )
+
+
+def edit_moz_build_file_to_add_file(
+ normalized_mozbuild_filename,
+ unnormalized_filename_to_add,
+ unnormalized_list_of_files,
+):
+ """
+ This function edits the moz.build file in-place
+
+ I had _really_ hoped to replace this whole damn thing with something that adds a
+ node to the AST, dumps the AST out, and then runs black on the file but there are
+ some issues:
+ - third party moz.build files (or maybe all moz.build files) aren't always run through black
+ - dumping the ast out losing comments
+
+ """
+
+ # Make sure that we only write in forward slashes
+ if "\\" in unnormalized_filename_to_add:
+ unnormalized_filename_to_add = unnormalized_filename_to_add.replace("\\", "/")
+
+ # add the file into the list, and then sort it in the same way the moz.build validator
+ # expects
+ unnormalized_list_of_files.append(unnormalized_filename_to_add)
+ unnormalized_list_of_files = alphabetical_sorted(unnormalized_list_of_files)
+
+ # we're going to add our file by doing a find/replace of an adjacent file in the list
+ indx_of_addition = unnormalized_list_of_files.index(unnormalized_filename_to_add)
+ indx_of_addition
+ if indx_of_addition == 0:
+ target_indx = 1
+ replace_before = False
+ else:
+ target_indx = indx_of_addition - 1
+ replace_before = True
+
+ find_str = unnormalized_list_of_files[target_indx]
+
+ # We will only perform the first replacement. This is because sometimes there's moz.build
+ # code like:
+ # SOURCES += ['file.cpp']
+ # SOURCES['file.cpp'].flags += ['-Winline']
+ # If we replaced every time we found the target, we would be inserting into that second
+ # line.
+ did_replace = False
+
+ with open(normalized_mozbuild_filename, mode="r") as file:
+ with open(normalized_mozbuild_filename + ".new", mode="wb") as output:
+ for line in file:
+ if not did_replace and find_str in line:
+ did_replace = True
+
+ # Okay, we found the line we need to edit, now we need to be ugly about it
+ # Grab the type of quote used in this moz.build file: single or double
+ quote_type = line[line.index(find_str) - 1]
+
+ if "[" not in line:
+ # We'll want to put our new file onto its own line
+ newline_to_add = "\n"
+ # And copy the indentation of the line we're adding adjacent to
+ indent_value = line[0 : line.index(quote_type)]
+ else:
+ # This is frustrating, we have the start of the array here. We aren't
+ # going to be able to indent things onto a newline properly. We're just
+ # going to have to stick it in on the same line.
+ newline_to_add = ""
+ indent_value = ""
+
+ find_str = "%s%s%s" % (quote_type, find_str, quote_type)
+ if replace_before:
+ replacement_tuple = (
+ find_str,
+ newline_to_add,
+ indent_value,
+ quote_type,
+ unnormalized_filename_to_add,
+ quote_type,
+ )
+ replace_str = "%s,%s%s%s%s%s" % replacement_tuple
+ else:
+ replacement_tuple = (
+ quote_type,
+ unnormalized_filename_to_add,
+ quote_type,
+ newline_to_add,
+ indent_value,
+ find_str,
+ )
+ replace_str = "%s%s%s,%s%s%s" % replacement_tuple
+
+ line = line.replace(find_str, replace_str)
+
+ output.write((line.rstrip() + "\n").encode("utf-8"))
+
+ shutil.move(normalized_mozbuild_filename + ".new", normalized_mozbuild_filename)
+
+
+def edit_moz_build_file_to_remove_file(
+ normalized_mozbuild_filename, unnormalized_filename_to_remove
+):
+ """
+ This function edits the moz.build file in-place
+ """
+
+ simple_file_line = re.compile(
+ "^\s*['\"]" + unnormalized_filename_to_remove + "['\"],*$"
+ )
+ did_replace = False
+
+ with open(normalized_mozbuild_filename, mode="r") as file:
+ with open(normalized_mozbuild_filename + ".new", mode="wb") as output:
+ for line in file:
+ if not did_replace and unnormalized_filename_to_remove in line:
+ did_replace = True
+
+ # If the line consists of just a single source file on it, then we're in the
+ # clear - we can just skip this line.
+ if simple_file_line.match(line):
+ # Do not output anything, just keep going.
+ continue
+
+ # Okay, so the line is a little more complicated.
+ quote_type = line[line.index(unnormalized_filename_to_remove) - 1]
+
+ if "[" in line or "]" in line:
+ find_str = "%s%s%s,*" % (
+ quote_type,
+ unnormalized_filename_to_remove,
+ quote_type,
+ )
+ line = re.sub(find_str, "", line)
+ else:
+ raise Exception(
+ "Got an unusual type of line we're trying to remove a file from:",
+ line,
+ )
+
+ output.write((line.rstrip() + "\n").encode("utf-8"))
+
+ shutil.move(normalized_mozbuild_filename + ".new", normalized_mozbuild_filename)
+
+
+def validate_directory_parameters(moz_yaml_dir, vendoring_dir):
+ # Validate the parameters
+ assert (moz_yaml_dir, vendoring_dir) == (None, None) or (
+ moz_yaml_dir and vendoring_dir
+ ), "If either moz_yaml_dir or vendoring_dir are specified, they both must be"
+
+ if moz_yaml_dir is not None and vendoring_dir is not None:
+ # Ensure they are provided with trailing slashes
+ moz_yaml_dir += "/" if moz_yaml_dir[-1] != "/" else ""
+ vendoring_dir += "/" if vendoring_dir[-1] != "/" else ""
+
+ return (moz_yaml_dir, vendoring_dir)
+
+
+HAS_ABSOLUTE = 1
+HAS_TRAVERSE_CHILD = 2
+HAS_RELATIVE_CHILD = 2 # behaves the same as above
+
+
+def get_file_reference_modes(source_assignments):
+ """
+ Given a set of source assignments, this function traverses through the
+ files references in those assignments to see if the files are referenced
+ using absolute paths (relative to gecko root) or relative paths.
+
+ It will return all the modes that are seen.
+ """
+ modes = set()
+
+ for key, list_of_normalized_filenames in source_assignments.items():
+ if not list_of_normalized_filenames:
+ continue
+ for file in list_of_normalized_filenames:
+ if file[0] == "/":
+ modes.add(HAS_ABSOLUTE)
+ elif file[0:2] == "../":
+ modes.add(HAS_TRAVERSE_CHILD)
+ else:
+ modes.add(HAS_RELATIVE_CHILD)
+ return modes
+
+
+def renormalize_filename(
+ mode,
+ moz_yaml_dir,
+ vendoring_dir,
+ normalized_mozbuild_filename,
+ normalized_filename_to_act_on,
+):
+ """
+ Edit the normalized_filename_to_act_on to either
+ - Make it an absolute path from gecko root (if we're in that mode)
+ - Get a relative path from the vendoring directory to the yaml directory where the
+ moz.build file is (If they are in separate directories)
+ """
+ if mode == HAS_ABSOLUTE:
+ # If the moz.build file uses absolute paths from the gecko root, this is easy,
+ # all we need to do is prepend a '/' to indicate that
+ normalized_filename_to_act_on = "/" + normalized_filename_to_act_on
+ elif moz_yaml_dir and vendoring_dir:
+ # To re-normalize it in this case, we:
+ # (a) get the path from gecko_root to the moz.build file we are considering
+ # (b) compute a relative path from that directory to the file we want
+ # (c) because (b) started at the moz.build file's directory, it is not
+ # normalized to the gecko_root. Therefore we need to normalize it by
+ # prepending (a)
+ a = os.path.dirname(normalized_mozbuild_filename).replace(os.path.sep, "/")
+ b = os.path.relpath(normalized_filename_to_act_on, start=a).replace(
+ os.path.sep, "/"
+ )
+ c = os.path.join(a, b).replace(os.path.sep, "/")
+ normalized_filename_to_act_on = c
+
+ return normalized_filename_to_act_on
+
+
+#########################################################
+# PUBLIC API
+#########################################################
+
+
+class MozBuildRewriteException(Exception):
+ pass
+
+
+def remove_file_from_moz_build_file(
+ normalized_filename_to_remove, moz_yaml_dir=None, vendoring_dir=None
+):
+ """
+ Given a filename, relative to the gecko root (aka normalized), we look for the nearest
+ moz.build file, look in that file for the file, and then edit that moz.build file in-place.
+ """
+ moz_yaml_dir, vendoring_dir = validate_directory_parameters(
+ moz_yaml_dir, vendoring_dir
+ )
+
+ all_possible_normalized_mozbuild_filenames = get_mozbuild_file_search_order(
+ normalized_filename_to_remove, moz_yaml_dir, vendoring_dir, None
+ )
+
+ # normalized_filename_to_remove is the path from gecko_root to the file. However, if we vendor
+ # separate from moz.yaml; then 'normalization' gets more complicated as explained above.
+ # We will need to re-normalize the filename for each moz.build file we want to test, so we
+ # save the original normalized filename for this purpose
+ original_normalized_filename_to_remove = normalized_filename_to_remove
+
+ # These are the two header file types specified in vendor_manifest.py > source_suffixes
+ if normalized_filename_to_remove.endswith(
+ ".h"
+ ) or normalized_filename_to_remove.endswith(".hpp"):
+ assignment_type = "header-files"
+ else:
+ assignment_type = "source-files"
+
+ for normalized_mozbuild_filename in all_possible_normalized_mozbuild_filenames:
+ source_assignments, root, code = mozbuild_file_to_source_assignments(
+ normalized_mozbuild_filename, assignment_type
+ )
+
+ modes = get_file_reference_modes(source_assignments)
+
+ for mode in modes:
+ normalized_filename_to_remove = renormalize_filename(
+ mode,
+ moz_yaml_dir,
+ vendoring_dir,
+ normalized_mozbuild_filename,
+ normalized_filename_to_remove,
+ )
+
+ for key in source_assignments:
+ normalized_source_filename_list = source_assignments[key]
+ if normalized_filename_to_remove in normalized_source_filename_list:
+ unnormalized_filename_to_remove = unnormalize_filename(
+ normalized_mozbuild_filename, normalized_filename_to_remove
+ )
+ edit_moz_build_file_to_remove_file(
+ normalized_mozbuild_filename, unnormalized_filename_to_remove
+ )
+ return
+
+ normalized_filename_to_remove = original_normalized_filename_to_remove
+ raise MozBuildRewriteException("Could not remove " + normalized_filename_to_remove)
+
+
+def add_file_to_moz_build_file(
+ normalized_filename_to_add, moz_yaml_dir=None, vendoring_dir=None
+):
+ """
+ This is the overall function. Given a filename, relative to the gecko root (aka normalized),
+ we look for a moz.build file to add it to, look for the place in the moz.build file to add it,
+ and then edit that moz.build file in-place.
+
+ It accepted two optional parameters. If one is specified they both must be. If a library is
+ vendored in a separate place from the moz.yaml file, these parameters specify those two
+ directories.
+ """
+ moz_yaml_dir, vendoring_dir = validate_directory_parameters(
+ moz_yaml_dir, vendoring_dir
+ )
+
+ all_possible_normalized_mozbuild_filenames = get_mozbuild_file_search_order(
+ normalized_filename_to_add, moz_yaml_dir, vendoring_dir, None
+ )
+
+ # normalized_filename_to_add is the path from gecko_root to the file. However, if we vendor
+ # separate from moz.yaml; then 'normalization' gets more complicated as explained above.
+ # We will need to re-normalize the filename for each moz.build file we want to test, so we
+ # save the original normalized filename for this purpose
+ original_normalized_filename_to_add = normalized_filename_to_add
+
+ if normalized_filename_to_add.endswith(".h") or normalized_filename_to_add.endswith(
+ ".hpp"
+ ):
+ assignment_type = "header-files"
+ else:
+ assignment_type = "source-files"
+
+ for normalized_mozbuild_filename in all_possible_normalized_mozbuild_filenames:
+ source_assignments, root, code = mozbuild_file_to_source_assignments(
+ normalized_mozbuild_filename, assignment_type
+ )
+
+ modes = get_file_reference_modes(source_assignments)
+
+ for mode in modes:
+ normalized_filename_to_add = renormalize_filename(
+ mode,
+ moz_yaml_dir,
+ vendoring_dir,
+ normalized_mozbuild_filename,
+ normalized_filename_to_add,
+ )
+
+ possible_assignments = find_all_posible_assignments_from_filename(
+ source_assignments, normalized_filename_to_add
+ )
+
+ if len(possible_assignments) == 0:
+ normalized_filename_to_add = original_normalized_filename_to_add
+ continue
+
+ assert (
+ len(possible_assignments) > 0
+ ), "Could not find a single possible source assignment"
+ if len(possible_assignments) > 1:
+ best_guess, _ = guess_best_assignment(
+ possible_assignments, normalized_filename_to_add
+ )
+ chosen_source_assignment_location = best_guess
+ else:
+ chosen_source_assignment_location = list(possible_assignments.keys())[0]
+
+ guessed_list_containing_normalized_filenames = possible_assignments[
+ chosen_source_assignment_location
+ ]
+
+ # unnormalize filenames so we can edit the moz.build file. They rarely use full paths.
+ unnormalized_filename_to_add = unnormalize_filename(
+ normalized_mozbuild_filename, normalized_filename_to_add
+ )
+ unnormalized_list_of_files = [
+ unnormalize_filename(normalized_mozbuild_filename, f)
+ for f in guessed_list_containing_normalized_filenames
+ ]
+
+ edit_moz_build_file_to_add_file(
+ normalized_mozbuild_filename,
+ unnormalized_filename_to_add,
+ unnormalized_list_of_files,
+ )
+ return
+
+ raise MozBuildRewriteException(
+ "Could not find a single moz.build file to add " + normalized_filename_to_add
+ )
+
+
+#########################################################
+# TESTING CODE
+#########################################################
+
+
+def get_all_target_filenames_normalized(all_mozbuild_filenames_normalized):
+ """
+ Given a list of moz.build files, returns all the files listed in all the souce assignments
+ in the file.
+
+ This function is only used for debug/testing purposes - there is no reason to call this
+ as part of 'the algorithm'
+ """
+ all_target_filenames_normalized = []
+ for normalized_mozbuild_filename in all_mozbuild_filenames_normalized:
+ source_assignments, root, code = mozbuild_file_to_source_assignments(
+ normalized_mozbuild_filename
+ )
+ for key in source_assignments:
+ list_of_normalized_filenames = source_assignments[key]
+ all_target_filenames_normalized.extend(list_of_normalized_filenames)
+
+ return all_target_filenames_normalized
+
+
+def try_to_match_target_file(
+ all_mozbuild_filenames_normalized, target_filename_normalized
+):
+ """
+ Runs 'the algorithm' on a target file, and returns if the algorithm was successful
+
+ all_mozbuild_filenames_normalized: the list of all third-party moz.build files
+ target_filename_normalized - the target filename, normalized to the gecko root
+ """
+
+ # We do not update the statistics for failed matches, so save a copy
+ global statistics
+ backup_statistics = copy.deepcopy(statistics)
+
+ if "" == target_filename_normalized:
+ raise Exception("Received an empty target_filename_normalized")
+
+ normalized_mozbuild_filename = get_closest_mozbuild_file(
+ target_filename_normalized, None, None, all_mozbuild_filenames_normalized
+ )
+ if not normalized_mozbuild_filename:
+ return (False, "No moz.build file found")
+
+ source_assignments, root, code = mozbuild_file_to_source_assignments(
+ normalized_mozbuild_filename
+ )
+ possible_assignments = find_all_posible_assignments_from_filename(
+ source_assignments, target_filename_normalized
+ )
+
+ if len(possible_assignments) == 0:
+ raise Exception("No possible assignments were found")
+ elif len(possible_assignments) > 1:
+ (
+ best_guess,
+ (statistic_number_refinements, statistic_length_logic),
+ ) = guess_best_assignment(possible_assignments, target_filename_normalized)
+ chosen_source_assignment_location = best_guess
+
+ statistics["needed_to_guess"] += 1
+
+ if len(possible_assignments) not in statistics["guess_candidates"]:
+ statistics["guess_candidates"][len(possible_assignments)] = 0
+ statistics["guess_candidates"][len(possible_assignments)] += 1
+
+ if statistic_number_refinements not in statistics["number_refinements"]:
+ statistics["number_refinements"][statistic_number_refinements] = 0
+ statistics["number_refinements"][statistic_number_refinements] += 1
+
+ if statistic_length_logic not in statistics["length_logic"]:
+ statistics["length_logic"][statistic_length_logic] = 0
+ statistics["length_logic"][statistic_length_logic] += 1
+
+ else:
+ chosen_source_assignment_location = list(possible_assignments.keys())[0]
+
+ guessed_list_containing_normalized_filenames = possible_assignments[
+ chosen_source_assignment_location
+ ]
+
+ if target_filename_normalized in guessed_list_containing_normalized_filenames:
+ return (True, None)
+
+ # Restore the copy of the statistics so we don't alter it for failed matches
+ statistics = backup_statistics
+ return (False, chosen_source_assignment_location)
+
+
+def get_gecko_root():
+ """
+ Using __file__ as a base, find the gecko root
+ """
+ gecko_root = None
+ directory_to_check = os.path.dirname(os.path.abspath(__file__))
+ while not os.path.isfile(os.path.join(directory_to_check, ".arcconfig")):
+ directory_to_check = os.path.dirname(directory_to_check)
+ if directory_to_check == "/":
+ print("Could not find gecko root")
+ sys.exit(1)
+
+ gecko_root = directory_to_check
+ return gecko_root
+
+
+def get_all_mozbuild_filenames(gecko_root):
+ """
+ Find all the third party moz.build files in the gecko repo
+ """
+ third_party_paths = open(
+ os.path.join(gecko_root, "tools", "rewriting", "ThirdPartyPaths.txt")
+ ).readlines()
+ all_mozbuild_filenames_normalized = []
+ for path in third_party_paths:
+ # We need shell=True because some paths are specified as globs
+ # We need an exception handler because sometimes the directory doesn't exist and find barfs
+ try:
+ output = subprocess.check_output(
+ "find %s -name moz.build" % os.path.join(gecko_root, path.strip()),
+ shell=True,
+ ).decode("utf-8")
+ for f in output.split("\n"):
+ f = f.replace("//", "/").strip().replace(gecko_root, "")[1:]
+ if f:
+ all_mozbuild_filenames_normalized.append(f)
+ except Exception:
+ pass
+
+ return all_mozbuild_filenames_normalized
+
+
+def test_all_third_party_files(gecko_root, all_mozbuild_filenames_normalized):
+ """
+ Run the algorithm on every source file in a third party moz.build file and output the results
+ """
+ all_mozbuild_filenames_normalized = [
+ f for f in all_mozbuild_filenames_normalized if "webrtc" not in f
+ ]
+ all_target_filenames_normalized = get_all_target_filenames_normalized(
+ all_mozbuild_filenames_normalized
+ )
+
+ total_attempted = 0
+ failed_matched = []
+ successfully_matched = 0
+
+ print("Going to try to match %i files..." % len(all_target_filenames_normalized))
+ for target_filename_normalized in all_target_filenames_normalized:
+ result, wrong_guess = try_to_match_target_file(
+ all_mozbuild_filenames_normalized, target_filename_normalized
+ )
+
+ total_attempted += 1
+ if result:
+ successfully_matched += 1
+ else:
+ failed_matched.append((target_filename_normalized, wrong_guess))
+ if total_attempted % 100 == 0:
+ print("Progress:", total_attempted)
+
+ print(
+ "Successfully Matched %i of %i files" % (successfully_matched, total_attempted)
+ )
+ if failed_matched:
+ print("Failed files:")
+ for f in failed_matched:
+ print("\t", f[0], f[1])
+ print("Statistics:")
+ pprint(statistics)
+
+
+if __name__ == "__main__":
+ gecko_root = get_gecko_root()
+ os.chdir(gecko_root)
+
+ add_file_to_moz_build_file(
+ "third_party/jpeg-xl/lib/include/jxl/resizable_parallel_runner.h",
+ "media/libjxl",
+ "third_party/jpeg-xl",
+ )
+
+ # all_mozbuild_filenames_normalized = get_all_mozbuild_filenames(gecko_root)
+ # test_all_third_party_files(gecko_root, all_mozbuild_filenames_normalized)
diff --git a/python/mozbuild/mozbuild/vendor/test_vendor_changes.sh b/python/mozbuild/mozbuild/vendor/test_vendor_changes.sh
new file mode 100755
index 0000000000..3d0e390f7f
--- /dev/null
+++ b/python/mozbuild/mozbuild/vendor/test_vendor_changes.sh
@@ -0,0 +1,65 @@
+#!/bin/bash
+
+if [[ ! -f "CLOBBER" ]]; then
+ echo "Script should be run from mozilla-central root"
+ exit 1
+fi
+
+echo "THIS SCRIPT WILL REVERT AND PURGE UNCOMMIT LOCAL CHANGES"
+echo "TYPE ok TO CONTINUE"
+read CONFIRMATION
+if [[ $CONFIRMATION != "ok" ]]; then
+ echo "Did not get 'ok', exiting"
+ exit 0
+fi
+
+ALL_MOZ_YAML_FILES=$(find . -name moz.yaml)
+
+for f in $ALL_MOZ_YAML_FILES; do
+ IFS='' read -r -d '' INPUT <<"EOF"
+import sys
+import yaml
+enabled = False
+with open(sys.argv[1]) as yaml_in:
+ o = yaml.safe_load(yaml_in)
+ if "updatebot" in o:
+ if 'tasks' in o["updatebot"]:
+ for t in o["updatebot"]["tasks"]:
+ if t["type"] == "vendoring":
+ if t.get("enabled", True) and t.get("platform", "Linux").lower() == "linux":
+ enabled = True
+if enabled:
+ print(sys.argv[1])
+EOF
+
+ FILE=$(python3 -c "$INPUT" $f)
+
+ if [[ ! -z $FILE ]]; then
+ UPDATEBOT_YAML_FILES+=("$FILE")
+ fi
+done
+
+
+for FILE in "${UPDATEBOT_YAML_FILES[@]}"; do
+ REVISION=$(yq eval ".origin.revision" $FILE)
+ HAS_PATCHES=$(yq eval ".vendoring.patches | (. != null)" $FILE)
+
+ echo "$FILE - $REVISION"
+ if [[ $HAS_PATCHES == "false" ]]; then
+ ./mach vendor $FILE --force --revision $REVISION
+ if [[ $? == 1 ]]; then
+ exit 1
+ fi
+ else
+ ./mach vendor $FILE --force --revision $REVISION --patch-mode=none
+ if [[ $? == 1 ]]; then
+ exit 1
+ fi
+ ./mach vendor $FILE --force --revision $REVISION --patch-mode=only --ignore-modified
+ if [[ $? == 1 ]]; then
+ exit 1
+ fi
+ fi
+ hg revert .
+ hg purge
+done
diff --git a/python/mozbuild/mozbuild/vendor/vendor_manifest.py b/python/mozbuild/mozbuild/vendor/vendor_manifest.py
new file mode 100644
index 0000000000..9de2c23e95
--- /dev/null
+++ b/python/mozbuild/mozbuild/vendor/vendor_manifest.py
@@ -0,0 +1,789 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, # You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import functools
+import glob
+import logging
+import os
+import re
+import shutil
+import stat
+import sys
+import tarfile
+import tempfile
+from collections import defaultdict
+
+import mozfile
+import mozpack.path as mozpath
+import requests
+
+from mozbuild.base import MozbuildObject
+from mozbuild.vendor.rewrite_mozbuild import (
+ MozBuildRewriteException,
+ add_file_to_moz_build_file,
+ remove_file_from_moz_build_file,
+)
+
+DEFAULT_EXCLUDE_FILES = [".git*", ".git*/**"]
+DEFAULT_KEEP_FILES = ["**/moz.build", "**/moz.yaml"]
+DEFAULT_INCLUDE_FILES = []
+
+
+def throwe():
+ raise Exception
+
+
+def _replace_in_file(file, pattern, replacement, regex=False):
+ with open(file) as f:
+ contents = f.read()
+
+ if regex:
+ newcontents = re.sub(pattern, replacement, contents)
+ else:
+ newcontents = contents.replace(pattern, replacement)
+
+ if newcontents == contents:
+ raise Exception(
+ "Could not find '%s' in %s to %sreplace with '%s'"
+ % (pattern, file, "regex-" if regex else "", replacement)
+ )
+
+ with open(file, "w") as f:
+ f.write(newcontents)
+
+
+def list_of_paths_to_readable_string(paths):
+ # From https://stackoverflow.com/a/41578071
+ dic = defaultdict(list)
+ for item in paths:
+ if os.path.isdir(item): # To check path is a directory
+ _ = dic[item] # will set default value as empty list
+ else:
+ path, file = os.path.split(item)
+ dic[path].append(file)
+
+ final_string = "["
+ for key, val in dic.items():
+ if len(val) == 0:
+ final_string += key + ", "
+ elif len(val) < 3:
+ final_string += ", ".join([os.path.join(key, v) for v in val]) + ", "
+ elif len(val) < 10:
+ final_string += "%s items in %s: %s and %s, " % (
+ len(val),
+ key,
+ ", ".join(val[0:-1]),
+ val[-1],
+ )
+ else:
+ final_string += "%s (omitted) items in %s, " % (len(val), key)
+
+ if final_string[-2:] == ", ":
+ final_string = final_string[:-2]
+
+ return final_string + "]"
+
+
+class VendorManifest(MozbuildObject):
+ def should_perform_step(self, step):
+ return step not in self.manifest["vendoring"].get("skip-vendoring-steps", [])
+
+ def vendor(
+ self,
+ command_context,
+ yaml_file,
+ manifest,
+ revision,
+ ignore_modified,
+ check_for_update,
+ force,
+ add_to_exports,
+ patch_mode,
+ ):
+ self.manifest = manifest
+ self.yaml_file = yaml_file
+ self._extract_directory = throwe
+ self.logInfo = functools.partial(self.log, logging.INFO, "vendor")
+ if "vendor-directory" not in self.manifest["vendoring"]:
+ self.manifest["vendoring"]["vendor-directory"] = os.path.dirname(
+ self.yaml_file
+ )
+
+ # ==========================================================
+ # If we're only patching; do that
+ if "patches" in self.manifest["vendoring"] and patch_mode == "only":
+ self.import_local_patches(
+ self.manifest["vendoring"]["patches"],
+ os.path.dirname(self.yaml_file),
+ self.manifest["vendoring"]["vendor-directory"],
+ )
+ return
+
+ # ==========================================================
+ self.source_host = self.get_source_host()
+
+ ref_type = self.manifest["vendoring"].get("tracking", "commit")
+ flavor = self.manifest["vendoring"].get("flavor", "regular")
+ # Individiual files are special
+
+ if revision == "tip":
+ # This case allows us to force-update a tag-tracking library to master
+ new_revision, timestamp = self.source_host.upstream_commit("HEAD")
+ elif ref_type == "tag":
+ new_revision, timestamp = self.source_host.upstream_tag(revision)
+ else:
+ new_revision, timestamp = self.source_host.upstream_commit(revision)
+
+ self.logInfo(
+ {"ref_type": ref_type, "ref": new_revision, "timestamp": timestamp},
+ "Latest {ref_type} is {ref} from {timestamp}",
+ )
+
+ # ==========================================================
+ if not force and self.manifest["origin"]["revision"] == new_revision:
+ # We're up to date, don't do anything
+ self.logInfo({}, "Latest upstream matches in-tree.")
+ return
+ elif flavor != "individual-file" and check_for_update:
+ # Only print the new revision to stdout
+ print("%s %s" % (new_revision, timestamp))
+ return
+
+ # ==========================================================
+ if flavor == "regular":
+ self.process_regular(
+ new_revision, timestamp, ignore_modified, add_to_exports
+ )
+ elif flavor == "individual-files":
+ self.process_individual(new_revision, timestamp, ignore_modified)
+ elif flavor == "rust":
+ self.process_rust(
+ command_context,
+ self.manifest["origin"]["revision"],
+ new_revision,
+ timestamp,
+ ignore_modified,
+ )
+ else:
+ raise Exception("Unknown flavor")
+
+ def process_rust(
+ self, command_context, old_revision, new_revision, timestamp, ignore_modified
+ ):
+ # First update the Cargo.toml
+ cargo_file = os.path.join(os.path.dirname(self.yaml_file), "Cargo.toml")
+ try:
+ _replace_in_file(cargo_file, old_revision, new_revision)
+ except Exception:
+ # If we can't find it the first time, try again with a short hash
+ _replace_in_file(cargo_file, old_revision[:8], new_revision)
+
+ # Then call ./mach vendor rust
+ from mozbuild.vendor.vendor_rust import VendorRust
+
+ vendor_command = command_context._spawn(VendorRust)
+ vendor_command.vendor(
+ ignore_modified=True, build_peers_said_large_imports_were_ok=False
+ )
+
+ self.update_yaml(new_revision, timestamp)
+
+ def process_individual(self, new_revision, timestamp, ignore_modified):
+ # This design is used because there is no github API to query
+ # for the last commit that modified a file; nor a way to get file
+ # blame. So really all we can do is just download and replace the
+ # files and see if they changed...
+
+ def download_and_write_file(url, destination):
+ self.logInfo(
+ {"local_file": destination, "url": url},
+ "Downloading {local_file} from {url}...",
+ )
+
+ with mozfile.NamedTemporaryFile() as tmpfile:
+ try:
+ req = requests.get(url, stream=True)
+ for data in req.iter_content(4096):
+ tmpfile.write(data)
+ tmpfile.seek(0)
+
+ shutil.copy2(tmpfile.name, destination)
+ except Exception as e:
+ raise (e)
+
+ # Only one of these loops will have content, so just do them both
+ for f in self.manifest["vendoring"].get("individual-files", []):
+ url = self.source_host.upstream_path_to_file(new_revision, f["upstream"])
+ destination = self.get_full_path(f["destination"])
+ download_and_write_file(url, destination)
+
+ for f in self.manifest["vendoring"].get("individual-files-list", []):
+ url = self.source_host.upstream_path_to_file(
+ new_revision,
+ self.manifest["vendoring"]["individual-files-default-upstream"] + f,
+ )
+ destination = self.get_full_path(
+ self.manifest["vendoring"]["individual-files-default-destination"] + f
+ )
+ download_and_write_file(url, destination)
+
+ self.spurious_check(new_revision, ignore_modified)
+
+ self.logInfo({}, "Checking for update actions")
+ self.update_files(new_revision)
+
+ self.update_yaml(new_revision, timestamp)
+
+ self.logInfo({"rev": new_revision}, "Updated to '{rev}'.")
+
+ if "patches" in self.manifest["vendoring"]:
+ # Remind the user
+ self.log(
+ logging.CRITICAL,
+ "vendor",
+ {},
+ "Patches present in manifest!!! Please run "
+ "'./mach vendor --patch-mode only' after commiting changes.",
+ )
+
+ def process_regular(self, new_revision, timestamp, ignore_modified, add_to_exports):
+
+ if self.should_perform_step("fetch"):
+ self.fetch_and_unpack(new_revision)
+ else:
+ self.logInfo({}, "Skipping fetching upstream source.")
+
+ self.logInfo({}, "Checking for update actions")
+ self.update_files(new_revision)
+
+ if self.should_perform_step("hg-add"):
+ self.logInfo({}, "Registering changes with version control.")
+ self.repository.add_remove_files(
+ self.manifest["vendoring"]["vendor-directory"],
+ os.path.dirname(self.yaml_file),
+ )
+ else:
+ self.logInfo({}, "Skipping registering changes.")
+
+ if self.should_perform_step("spurious-check"):
+ self.logInfo({}, "Checking for a spurious update.")
+ self.spurious_check(new_revision, ignore_modified)
+ else:
+ self.logInfo({}, "Skipping the spurious update check.")
+
+ if self.should_perform_step("update-moz-yaml"):
+ self.logInfo({}, "Updating moz.yaml.")
+ self.update_yaml(new_revision, timestamp)
+ else:
+ self.logInfo({}, "Skipping updating the moz.yaml file.")
+
+ if self.should_perform_step("update-moz-build"):
+ self.logInfo({}, "Updating moz.build files")
+ self.update_moz_build(
+ self.manifest["vendoring"]["vendor-directory"],
+ os.path.dirname(self.yaml_file),
+ add_to_exports,
+ )
+ else:
+ self.logInfo({}, "Skipping update of moz.build files")
+
+ self.logInfo({"rev": new_revision}, "Updated to '{rev}'.")
+
+ if "patches" in self.manifest["vendoring"]:
+ # Remind the user
+ self.log(
+ logging.CRITICAL,
+ "vendor",
+ {},
+ "Patches present in manifest!!! Please run "
+ "'./mach vendor --patch-mode only' after commiting changes.",
+ )
+
+ def get_source_host(self):
+ if self.manifest["vendoring"]["source-hosting"] == "gitlab":
+ from mozbuild.vendor.host_gitlab import GitLabHost
+
+ return GitLabHost(self.manifest)
+ elif self.manifest["vendoring"]["source-hosting"] == "github":
+ from mozbuild.vendor.host_github import GitHubHost
+
+ return GitHubHost(self.manifest)
+ elif self.manifest["vendoring"]["source-hosting"] == "googlesource":
+ from mozbuild.vendor.host_googlesource import GoogleSourceHost
+
+ return GoogleSourceHost(self.manifest)
+ elif self.manifest["vendoring"]["source-hosting"] == "angle":
+ from mozbuild.vendor.host_angle import AngleHost
+
+ return AngleHost(self.manifest)
+ elif self.manifest["vendoring"]["source-hosting"] == "codeberg":
+ from mozbuild.vendor.host_codeberg import CodebergHost
+
+ return CodebergHost(self.manifest)
+ else:
+ raise Exception(
+ "Unknown source host: " + self.manifest["vendoring"]["source-hosting"]
+ )
+
+ def get_full_path(self, path, support_cwd=False):
+ if support_cwd and path[0:5] == "{cwd}":
+ path = path.replace("{cwd}", ".")
+ elif "{tmpextractdir}" in path:
+ # _extract_directory() will throw an exception if it is invalid to use it
+ path = path.replace("{tmpextractdir}", self._extract_directory())
+ elif "{yaml_dir}" in path:
+ path = path.replace("{yaml_dir}", os.path.dirname(self.yaml_file))
+ elif "{vendor_dir}" in path:
+ path = path.replace(
+ "{vendor_dir}", self.manifest["vendoring"]["vendor-directory"]
+ )
+ else:
+ path = mozpath.join(self.manifest["vendoring"]["vendor-directory"], path)
+ return os.path.abspath(path)
+
+ def convert_patterns_to_paths(self, directory, patterns):
+ # glob.iglob uses shell-style wildcards for path name completion.
+ # "recursive=True" enables the double asterisk "**" wildcard which matches
+ # for nested directories as well as the directory we're searching in.
+ paths = []
+ for pattern in patterns:
+ pattern_full_path = mozpath.join(directory, pattern)
+ # If pattern is a directory recursively add contents of directory
+ if os.path.isdir(pattern_full_path):
+ # Append double asterisk to the end to make glob.iglob recursively match
+ # contents of directory
+ paths.extend(
+ glob.iglob(mozpath.join(pattern_full_path, "**"), recursive=True)
+ )
+ # Otherwise pattern is a file or wildcard expression so add it without altering it
+ else:
+ paths.extend(glob.iglob(pattern_full_path, recursive=True))
+ # Remove folder names from list of paths in order to avoid prematurely
+ # truncating directories elsewhere
+ # Sort the final list to ensure we preserve 01_, 02_ ordering for e.g. *.patch globs
+ final_paths = sorted(
+ [mozpath.normsep(path) for path in paths if not os.path.isdir(path)]
+ )
+ return final_paths
+
+ def fetch_and_unpack(self, revision):
+ """Fetch and unpack upstream source"""
+
+ def validate_tar_member(member, path):
+ def is_within_directory(directory, target):
+ real_directory = os.path.realpath(directory)
+ real_target = os.path.realpath(target)
+ prefix = os.path.commonprefix([real_directory, real_target])
+ return prefix == real_directory
+
+ member_path = os.path.join(path, member.name)
+ if not is_within_directory(path, member_path):
+ raise Exception("Attempted path traversal in tar file: " + member.name)
+ if member.issym():
+ link_path = os.path.join(os.path.dirname(member_path), member.linkname)
+ if not is_within_directory(path, link_path):
+ raise Exception(
+ "Attempted link path traversal in tar file: " + member.name
+ )
+ if member.mode & (stat.S_ISUID | stat.S_ISGID):
+ raise Exception(
+ "Attempted setuid or setgid in tar file: " + member.name
+ )
+
+ def safe_extract(tar, path=".", *, numeric_owner=False):
+ def _files(tar, path):
+ for member in tar:
+ validate_tar_member(member, path)
+ yield member
+
+ tar.extractall(path, members=_files(tar, path), numeric_owner=numeric_owner)
+
+ url = self.source_host.upstream_snapshot(revision)
+ self.logInfo({"url": url}, "Fetching code archive from {url}")
+
+ with mozfile.NamedTemporaryFile() as tmptarfile:
+ tmpextractdir = tempfile.TemporaryDirectory()
+ try:
+ req = requests.get(url, stream=True)
+ for data in req.iter_content(4096):
+ tmptarfile.write(data)
+ tmptarfile.seek(0)
+
+ vendor_dir = mozpath.normsep(
+ self.manifest["vendoring"]["vendor-directory"]
+ )
+ if self.should_perform_step("keep"):
+ self.logInfo({}, "Retaining wanted in-tree files.")
+ to_keep = self.convert_patterns_to_paths(
+ vendor_dir,
+ self.manifest["vendoring"].get("keep", [])
+ + DEFAULT_KEEP_FILES
+ + self.manifest["vendoring"].get("patches", []),
+ )
+ else:
+ self.logInfo({}, "Skipping retention of in-tree files.")
+ to_keep = []
+
+ self.logInfo({"vd": vendor_dir}, "Cleaning {vd} to import changes.")
+ # We use double asterisk wildcard here to get complete list of recursive contents
+ for file in self.convert_patterns_to_paths(vendor_dir, ["**"]):
+ file = mozpath.normsep(file)
+ if file not in to_keep:
+ mozfile.remove(file)
+
+ self.logInfo({"vd": vendor_dir}, "Unpacking upstream files for {vd}.")
+ with tarfile.open(tmptarfile.name) as tar:
+
+ safe_extract(tar, tmpextractdir.name)
+
+ def get_first_dir(p):
+ halves = os.path.split(p)
+ return get_first_dir(halves[0]) if halves[0] else halves[1]
+
+ one_prefix = get_first_dir(tar.getnames()[0])
+ has_prefix = all(
+ map(lambda name: name.startswith(one_prefix), tar.getnames())
+ )
+
+ # GitLab puts everything down a directory; move it up.
+ if has_prefix:
+ tardir = mozpath.join(tmpextractdir.name, one_prefix)
+ mozfile.copy_contents(tardir, tmpextractdir.name)
+ mozfile.remove(tardir)
+
+ if self.should_perform_step("include"):
+ self.logInfo({}, "Retaining wanted files from upstream changes.")
+ to_include = self.convert_patterns_to_paths(
+ tmpextractdir.name,
+ self.manifest["vendoring"].get("include", [])
+ + DEFAULT_INCLUDE_FILES,
+ )
+ else:
+ self.logInfo({}, "Skipping retention of included files.")
+ to_include = []
+
+ if self.should_perform_step("exclude"):
+ self.logInfo({}, "Removing excluded files from upstream changes.")
+ to_exclude = self.convert_patterns_to_paths(
+ tmpextractdir.name,
+ self.manifest["vendoring"].get("exclude", [])
+ + DEFAULT_EXCLUDE_FILES,
+ )
+ else:
+ self.logInfo({}, "Skipping removing excluded files.")
+ to_exclude = []
+
+ to_exclude = list(set(to_exclude) - set(to_include))
+ if to_exclude:
+ self.logInfo(
+ {"files": list_of_paths_to_readable_string(to_exclude)},
+ "Removing: {files}",
+ )
+ for exclusion in to_exclude:
+ mozfile.remove(exclusion)
+
+ # Clear out empty directories
+ # removeEmpty() won't remove directories containing only empty directories
+ # so just keep callign it as long as it's doing something
+ def removeEmpty(tmpextractdir):
+ removed = False
+ folders = list(os.walk(tmpextractdir))[1:]
+ for folder in folders:
+ if not folder[2]:
+ try:
+ os.rmdir(folder[0])
+ removed = True
+ except Exception:
+ pass
+ return removed
+
+ while removeEmpty(tmpextractdir.name):
+ pass
+
+ # Then copy over the directories
+ if self.should_perform_step("move-contents"):
+ self.logInfo({"d": vendor_dir}, "Copying to {d}.")
+ mozfile.copy_contents(tmpextractdir.name, vendor_dir)
+ else:
+ self.logInfo({}, "Skipping copying contents into tree.")
+ self._extract_directory = lambda: tmpextractdir.name
+ except Exception as e:
+ tmpextractdir.cleanup()
+ raise e
+
+ def update_yaml(self, revision, timestamp):
+ with open(self.yaml_file) as f:
+ yaml = f.readlines()
+
+ replaced = 0
+ replacements = [
+ [" release:", " %s (%s)." % (revision, timestamp)],
+ [" revision:", " %s" % (revision)],
+ ]
+
+ for i in range(0, len(yaml)):
+ l = yaml[i]
+
+ for r in replacements:
+ if r[0] in l:
+ print("Found " + l)
+ replaced += 1
+ yaml[i] = re.sub(r[0] + " [v\.a-f0-9]+.*$", r[0] + r[1], yaml[i])
+
+ assert len(replacements) == replaced
+
+ with open(self.yaml_file, "wb") as f:
+ f.write(("".join(yaml)).encode("utf-8"))
+
+ def spurious_check(self, revision, ignore_modified):
+ changed_files = set(
+ [
+ os.path.abspath(f)
+ for f in self.repository.get_changed_files(mode="staged")
+ ]
+ )
+ generated_files = set(
+ [
+ self.get_full_path(f)
+ for f in self.manifest["vendoring"].get("generated", [])
+ ]
+ )
+ changed_files = set(changed_files) - generated_files
+ if not changed_files:
+ self.logInfo({"r": revision}, "Upstream {r} hasn't modified files locally.")
+ # We almost certainly won't be here if ignore_modified was passed, because a modified
+ # local file will show up as a changed_file, but we'll be safe anyway.
+ if not ignore_modified and generated_files:
+ for g in generated_files:
+ self.repository.clean_directory(g)
+ elif generated_files:
+ self.log(
+ logging.CRITICAL,
+ "vendor",
+ {"files": generated_files},
+ "Because you passed --ignore-modified we are not cleaning your"
+ + " working directory, but the following files were probably"
+ + " spuriously edited and can be reverted: {files}",
+ )
+ sys.exit(-2)
+
+ self.logInfo(
+ {"rev": revision, "num": len(changed_files)},
+ "Version '{rev}' has changed {num} files.",
+ )
+
+ def update_files(self, revision):
+ if "update-actions" not in self.manifest["vendoring"]:
+ return
+
+ for update in self.manifest["vendoring"]["update-actions"]:
+ if update["action"] == "copy-file":
+ src = self.get_full_path(update["from"])
+ dst = self.get_full_path(update["to"])
+
+ self.logInfo(
+ {"s": src, "d": dst}, "action: copy-file src: {s} dst: {d}"
+ )
+
+ with open(src) as f:
+ contents = f.read()
+ with open(dst, "w") as f:
+ f.write(contents)
+ elif update["action"] == "move-file":
+ src = self.get_full_path(update["from"])
+ dst = self.get_full_path(update["to"])
+
+ self.logInfo(
+ {"s": src, "d": dst}, "action: move-file src: {s} dst: {d}"
+ )
+
+ shutil.move(src, dst)
+ elif update["action"] == "move-dir":
+ src = self.get_full_path(update["from"])
+ dst = self.get_full_path(update["to"])
+
+ self.logInfo(
+ {"src": src, "dst": dst}, "action: move-dir src: {src} dst: {dst}"
+ )
+
+ if not os.path.isdir(src):
+ raise Exception(
+ "Cannot move from a source directory %s that is not a directory"
+ % src
+ )
+ os.makedirs(dst, exist_ok=True)
+
+ def copy_tree(src, dst):
+ names = os.listdir(src)
+ os.makedirs(dst, exist_ok=True)
+
+ for name in names:
+ srcname = os.path.join(src, name)
+ dstname = os.path.join(dst, name)
+
+ if os.path.isdir(srcname):
+ copy_tree(srcname, dstname)
+ else:
+ shutil.copy2(srcname, dstname)
+
+ copy_tree(src, dst)
+ shutil.rmtree(src)
+
+ elif update["action"] in ["replace-in-file", "replace-in-file-regex"]:
+ file = self.get_full_path(update["file"])
+
+ self.logInfo({"file": file}, "action: replace-in-file file: {file}")
+
+ replacement = update["with"].replace("{revision}", revision)
+ _replace_in_file(
+ file,
+ update["pattern"],
+ replacement,
+ regex=update["action"] == "replace-in-file-regex",
+ )
+ elif update["action"] == "delete-path":
+ path = self.get_full_path(update["path"])
+ self.logInfo({"path": path}, "action: delete-path path: {path}")
+ mozfile.remove(path)
+ elif update["action"] in ["run-script", "run-command"]:
+ if update["action"] == "run-script":
+ command = self.get_full_path(update["script"], support_cwd=True)
+ else:
+ command = update["command"]
+
+ run_dir = self.get_full_path(update["cwd"], support_cwd=True)
+
+ args = []
+ for a in update.get("args", []):
+ if a == "{revision}":
+ args.append(revision)
+ elif any(
+ s in a
+ for s in [
+ "{cwd}",
+ "{vendor_dir}",
+ "{yaml_dir}",
+ "{tmpextractdir}",
+ ]
+ ):
+ args.append(self.get_full_path(a, support_cwd=True))
+ else:
+ args.append(a)
+
+ self.logInfo(
+ {
+ "command": command,
+ "run_dir": run_dir,
+ "args": args,
+ "type": update["action"],
+ },
+ "action: {type} command: {command} working dir: {run_dir} args: {args}",
+ )
+ extra_env = (
+ {"GECKO_PATH": os.getcwd()}
+ if "GECKO_PATH" not in os.environ
+ else {}
+ )
+ # We also add a signal to scripts that they are running under mach vendor
+ extra_env["MACH_VENDOR"] = "1"
+ self.run_process(
+ args=[command] + args,
+ cwd=run_dir,
+ log_name=command,
+ require_unix_environment=True,
+ append_env=extra_env,
+ )
+ else:
+ assert False, "Unknown action supplied (how did this pass validation?)"
+
+ def update_moz_build(self, vendoring_dir, moz_yaml_dir, add_to_exports):
+ if vendoring_dir == moz_yaml_dir:
+ vendoring_dir = moz_yaml_dir = None
+
+ # If you edit this (especially for header files) you should double check
+ # rewrite_mozbuild.py around 'assignment_type'
+ source_suffixes = [".cc", ".c", ".cpp", ".S", ".asm"]
+ header_suffixes = [".h", ".hpp"]
+
+ files_removed = self.repository.get_changed_files(diff_filter="D")
+ files_added = self.repository.get_changed_files(diff_filter="A")
+
+ # Filter the files added to just source files we track in moz.build files.
+ files_added = [
+ f for f in files_added if any([f.endswith(s) for s in source_suffixes])
+ ]
+ header_files_to_add = [
+ f for f in files_added if any([f.endswith(s) for s in header_suffixes])
+ ]
+ if add_to_exports:
+ files_added += header_files_to_add
+ elif header_files_to_add:
+ self.log(
+ logging.WARNING,
+ "header_files_warning",
+ {},
+ (
+ "We found %s header files in the update, pass --add-to-exports if you want"
+ + " to attempt to include them in EXPORTS blocks: %s"
+ )
+ % (len(header_files_to_add), header_files_to_add),
+ )
+
+ self.logInfo(
+ {"added": len(files_added), "removed": len(files_removed)},
+ "Found {added} files added and {removed} files removed.",
+ )
+
+ should_abort = False
+ for f in files_added:
+ try:
+ add_file_to_moz_build_file(f, moz_yaml_dir, vendoring_dir)
+ except MozBuildRewriteException:
+ self.log(
+ logging.ERROR,
+ "vendor",
+ {},
+ "Could not add %s to the appropriate moz.build file" % f,
+ )
+ should_abort = True
+
+ for f in files_removed:
+ try:
+ remove_file_from_moz_build_file(f, moz_yaml_dir, vendoring_dir)
+ except MozBuildRewriteException:
+ self.log(
+ logging.ERROR,
+ "vendor",
+ {},
+ "Could not remove %s from the appropriate moz.build file" % f,
+ )
+ should_abort = True
+
+ if should_abort:
+ self.log(
+ logging.ERROR,
+ "vendor",
+ {},
+ "This is a deficiency in ./mach vendor . "
+ + "Please review the affected files before committing.",
+ )
+ # Exit with -1 to distinguish this from the Exception case of exiting with 1
+ sys.exit(-1)
+
+ def import_local_patches(self, patches, yaml_dir, vendor_dir):
+ self.logInfo({}, "Importing local patches...")
+ for patch in self.convert_patterns_to_paths(yaml_dir, patches):
+ script = [
+ "patch",
+ "-p1",
+ "--directory",
+ vendor_dir,
+ "--input",
+ os.path.abspath(patch),
+ "--no-backup-if-mismatch",
+ ]
+ self.run_process(
+ args=script,
+ log_name=script,
+ )
diff --git a/python/mozbuild/mozbuild/vendor/vendor_python.py b/python/mozbuild/mozbuild/vendor/vendor_python.py
new file mode 100644
index 0000000000..db554e20d4
--- /dev/null
+++ b/python/mozbuild/mozbuild/vendor/vendor_python.py
@@ -0,0 +1,228 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import shutil
+import subprocess
+import sys
+from pathlib import Path
+
+import mozfile
+from mozfile import TemporaryDirectory
+from mozpack.files import FileFinder
+
+from mozbuild.base import MozbuildObject
+
+EXCLUDED_PACKAGES = {
+ # dlmanager's package on PyPI only has metadata, but is missing the code.
+ # https://github.com/parkouss/dlmanager/issues/1
+ "dlmanager",
+ # gyp's package on PyPI doesn't have any downloadable files.
+ "gyp",
+ # We keep some wheels vendored in "_venv" for use in Mozharness
+ "_venv",
+ # We manage vendoring "vsdownload" with a moz.yaml file (there is no module
+ # on PyPI).
+ "vsdownload",
+ # The moz.build file isn't a vendored module, so don't delete it.
+ "moz.build",
+ "requirements.in",
+ # The ansicon package contains DLLs and we don't want to arbitrarily vendor
+ # them since they could be unsafe. This module should rarely be used in practice
+ # (it's a fallback for old versions of windows). We've intentionally vendored a
+ # modified 'dummy' version of it so that the dependency checks still succeed, but
+ # if it ever is attempted to be used, it will fail gracefully.
+ "ansicon",
+}
+
+
+class VendorPython(MozbuildObject):
+ def __init__(self, *args, **kwargs):
+ MozbuildObject.__init__(self, *args, virtualenv_name="vendor", **kwargs)
+
+ def vendor(self, keep_extra_files=False):
+ from mach.python_lockfile import PoetryHandle
+
+ self.populate_logger()
+ self.log_manager.enable_unstructured()
+
+ vendor_dir = Path(self.topsrcdir) / "third_party" / "python"
+ requirements_in = vendor_dir / "requirements.in"
+ poetry_lockfile = vendor_dir / "poetry.lock"
+ _sort_requirements_in(requirements_in)
+
+ with TemporaryDirectory() as work_dir:
+ work_dir = Path(work_dir)
+ poetry = PoetryHandle(work_dir)
+ poetry.add_requirements_in_file(requirements_in)
+ poetry.reuse_existing_lockfile(poetry_lockfile)
+ lockfiles = poetry.generate_lockfiles(do_update=False)
+
+ # Vendoring packages is only viable if it's possible to have a single
+ # set of packages that work regardless of which environment they're used in.
+ # So, we scrub environment markers, so that we essentially ask pip to
+ # download "all dependencies for all environments". Pip will then either
+ # fetch them as requested, or intelligently raise an error if that's not
+ # possible (e.g.: if different versions of Python would result in different
+ # packages/package versions).
+ pip_lockfile_without_markers = work_dir / "requirements.no-markers.txt"
+ shutil.copy(str(lockfiles.pip_lockfile), str(pip_lockfile_without_markers))
+ remove_environment_markers_from_requirements_txt(
+ pip_lockfile_without_markers
+ )
+
+ with TemporaryDirectory() as tmp:
+ # use requirements.txt to download archived source distributions of all
+ # packages
+ subprocess.check_call(
+ [
+ sys.executable,
+ "-m",
+ "pip",
+ "download",
+ "-r",
+ str(pip_lockfile_without_markers),
+ "--no-deps",
+ "--dest",
+ tmp,
+ "--abi",
+ "none",
+ "--platform",
+ "any",
+ ]
+ )
+ _purge_vendor_dir(vendor_dir)
+ self._extract(tmp, vendor_dir, keep_extra_files)
+
+ requirements_out = vendor_dir / "requirements.txt"
+
+ # since requirements.out and poetry.lockfile are both outputs from
+ # third party code, they may contain carriage returns on Windows. We
+ # should strip the carriage returns to maintain consistency in our output
+ # regardless of which platform is doing the vendoring. We can do this and
+ # the copying at the same time to minimize reads and writes.
+ _copy_file_strip_carriage_return(lockfiles.pip_lockfile, requirements_out)
+ _copy_file_strip_carriage_return(lockfiles.poetry_lockfile, poetry_lockfile)
+ self.repository.add_remove_files(vendor_dir)
+
+ def _extract(self, src, dest, keep_extra_files=False):
+ """extract source distribution into vendor directory"""
+
+ ignore = ()
+ if not keep_extra_files:
+ ignore = ("*/doc", "*/docs", "*/test", "*/tests", "**/.git")
+ finder = FileFinder(src)
+ for archive, _ in finder.find("*"):
+ _, ext = os.path.splitext(archive)
+ archive_path = os.path.join(finder.base, archive)
+ if ext == ".whl":
+ # Archive is named like "$package-name-1.0-py2.py3-none-any.whl", and should
+ # have four dashes that aren't part of the package name.
+ package_name, version, spec, abi, platform_and_suffix = archive.rsplit(
+ "-", 4
+ )
+
+ if package_name in EXCLUDED_PACKAGES:
+ print(
+ f"'{package_name}' is on the exclusion list and will not be vendored."
+ )
+ continue
+
+ target_package_dir = os.path.join(dest, package_name)
+ os.mkdir(target_package_dir)
+
+ # Extract all the contents of the wheel into the package subdirectory.
+ # We're expecting at least a code directory and a ".dist-info" directory,
+ # though there may be a ".data" directory as well.
+ mozfile.extract(archive_path, target_package_dir, ignore=ignore)
+ _denormalize_symlinks(target_package_dir)
+ else:
+ # Archive is named like "$package-name-1.0.tar.gz", and the rightmost
+ # dash should separate the package name from the rest of the archive
+ # specifier.
+ package_name, archive_postfix = archive.rsplit("-", 1)
+ package_dir = os.path.join(dest, package_name)
+
+ if package_name in EXCLUDED_PACKAGES:
+ print(
+ f"'{package_name}' is on the exclusion list and will not be vendored."
+ )
+ continue
+
+ # The archive should only contain one top-level directory, which has
+ # the source files. We extract this directory directly to
+ # the vendor directory.
+ extracted_files = mozfile.extract(archive_path, dest, ignore=ignore)
+ assert len(extracted_files) == 1
+ extracted_package_dir = extracted_files[0]
+
+ # The extracted package dir includes the version in the name,
+ # which we don't we don't want.
+ mozfile.move(extracted_package_dir, package_dir)
+ _denormalize_symlinks(package_dir)
+
+
+def _sort_requirements_in(requirements_in: Path):
+ requirements = {}
+ with requirements_in.open(mode="r", newline="\n") as f:
+ comments = []
+ for line in f.readlines():
+ line = line.strip()
+ if not line or line.startswith("#"):
+ comments.append(line)
+ continue
+ name, version = line.split("==")
+ requirements[name] = version, comments
+ comments = []
+
+ with requirements_in.open(mode="w", newline="\n") as f:
+ for name, (version, comments) in sorted(requirements.items()):
+ if comments:
+ f.write("{}\n".format("\n".join(comments)))
+ f.write("{}=={}\n".format(name, version))
+
+
+def remove_environment_markers_from_requirements_txt(requirements_txt: Path):
+ with requirements_txt.open(mode="r", newline="\n") as f:
+ lines = f.readlines()
+ markerless_lines = []
+ continuation_token = " \\"
+ for line in lines:
+ line = line.rstrip()
+
+ if not line.startswith(" ") and not line.startswith("#") and ";" in line:
+ has_continuation_token = line.endswith(continuation_token)
+ # The first line of each requirement looks something like:
+ # package-name==X.Y; python_version>=3.7
+ # We can scrub the environment marker by splitting on the semicolon
+ line = line.split(";")[0]
+ if has_continuation_token:
+ line += continuation_token
+ markerless_lines.append(line)
+ else:
+ markerless_lines.append(line)
+
+ with requirements_txt.open(mode="w", newline="\n") as f:
+ f.write("\n".join(markerless_lines))
+
+
+def _purge_vendor_dir(vendor_dir):
+ for child in Path(vendor_dir).iterdir():
+ if child.name not in EXCLUDED_PACKAGES:
+ mozfile.remove(str(child))
+
+
+def _denormalize_symlinks(target):
+ # If any files inside the vendored package were symlinks, turn them into normal files
+ # because hg.mozilla.org forbids symlinks in the repository.
+ link_finder = FileFinder(target)
+ for _, f in link_finder.find("**"):
+ if os.path.islink(f.path):
+ link_target = os.path.realpath(f.path)
+ os.unlink(f.path)
+ shutil.copyfile(link_target, f.path)
+
+
+def _copy_file_strip_carriage_return(file_src: Path, file_dst):
+ shutil.copyfileobj(file_src.open(mode="r"), file_dst.open(mode="w", newline="\n"))
diff --git a/python/mozbuild/mozbuild/vendor/vendor_rust.py b/python/mozbuild/mozbuild/vendor/vendor_rust.py
new file mode 100644
index 0000000000..f87d2efde8
--- /dev/null
+++ b/python/mozbuild/mozbuild/vendor/vendor_rust.py
@@ -0,0 +1,961 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, # You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import errno
+import hashlib
+import json
+import logging
+import os
+import re
+import subprocess
+import typing
+from collections import defaultdict
+from itertools import dropwhile
+from pathlib import Path
+
+import mozpack.path as mozpath
+import toml
+from looseversion import LooseVersion
+from mozboot.util import MINIMUM_RUST_VERSION
+
+from mozbuild.base import BuildEnvironmentNotFoundException, MozbuildObject
+
+if typing.TYPE_CHECKING:
+ import datetime
+
+# Type of a TOML value.
+TomlItem = typing.Union[
+ str,
+ typing.List["TomlItem"],
+ typing.Dict[str, "TomlItem"],
+ bool,
+ int,
+ float,
+ "datetime.datetime",
+ "datetime.date",
+ "datetime.time",
+]
+
+
+CARGO_CONFIG_TEMPLATE = """\
+# This file contains vendoring instructions for cargo.
+# It was generated by `mach vendor rust`.
+# Please do not edit.
+
+{config}
+
+# Take advantage of the fact that cargo will treat lines starting with #
+# as comments to add preprocessing directives. This file can thus by copied
+# as-is to $topsrcdir/.cargo/config with no preprocessing to be used there
+# (for e.g. independent tasks building rust code), or be preprocessed by
+# the build system to produce a .cargo/config with the right content.
+#define REPLACE_NAME {replace_name}
+#define VENDORED_DIRECTORY {directory}
+# We explicitly exclude the following section when preprocessing because
+# it would overlap with the preprocessed [source."@REPLACE_NAME@"], and
+# cargo would fail.
+#ifndef REPLACE_NAME
+[source.{replace_name}]
+directory = "{directory}"
+#endif
+
+# Thankfully, @REPLACE_NAME@ is unlikely to be a legitimate source, so
+# cargo will ignore it when it's here verbatim.
+#filter substitution
+[source."@REPLACE_NAME@"]
+directory = "@top_srcdir@/@VENDORED_DIRECTORY@"
+"""
+
+
+CARGO_LOCK_NOTICE = """
+NOTE: `cargo vendor` may have made changes to your Cargo.lock. To restore your
+Cargo.lock to the HEAD version, run `git checkout -- Cargo.lock` or
+`hg revert Cargo.lock`.
+"""
+
+
+WINDOWS_UNDESIRABLE_REASON = """\
+The windows and windows-sys crates and their dependencies are too big to \
+vendor, and is a risk of version duplication due to its current update \
+cadence. Until this is worked out with upstream, we prefer to avoid them.\
+"""
+
+PACKAGES_WE_DONT_WANT = {
+ "windows-sys": WINDOWS_UNDESIRABLE_REASON,
+ "windows": WINDOWS_UNDESIRABLE_REASON,
+ "windows_aarch64_msvc": WINDOWS_UNDESIRABLE_REASON,
+ "windows_i686_gnu": WINDOWS_UNDESIRABLE_REASON,
+ "windows_i686_msvc": WINDOWS_UNDESIRABLE_REASON,
+ "windows_x86_64_gnu": WINDOWS_UNDESIRABLE_REASON,
+ "windows_x86_64_msvc": WINDOWS_UNDESIRABLE_REASON,
+}
+
+PACKAGES_WE_ALWAYS_WANT_AN_OVERRIDE_OF = [
+ "autocfg",
+ "cmake",
+ "vcpkg",
+]
+
+
+# Historically duplicated crates. Eventually we want this list to be empty.
+# If you do need to make changes increasing the number of duplicates, please
+# add a comment as to why.
+TOLERATED_DUPES = {
+ "mio": 2,
+ # Transition from time 0.1 to 0.3 underway, but chrono is stuck on 0.1
+ # and hasn't been updated in 1.5 years (an hypothetical update is
+ # expected to remove the dependency on time altogether).
+ "time": 2,
+}
+
+
+class VendorRust(MozbuildObject):
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self._issues = []
+
+ def serialize_issues_json(self):
+ return json.dumps(
+ {
+ "Cargo.lock": [
+ {
+ "path": "Cargo.lock",
+ "column": None,
+ "line": None,
+ "level": "error" if level == logging.ERROR else "warning",
+ "message": msg,
+ }
+ for (level, msg) in self._issues
+ ]
+ }
+ )
+
+ def log(self, level, action, params, format_str):
+ if level >= logging.WARNING:
+ self._issues.append((level, format_str.format(**params)))
+ super().log(level, action, params, format_str)
+
+ def get_cargo_path(self):
+ try:
+ return self.substs["CARGO"]
+ except (BuildEnvironmentNotFoundException, KeyError):
+ if "MOZ_AUTOMATION" in os.environ:
+ cargo = os.path.join(
+ os.environ["MOZ_FETCHES_DIR"], "rustc", "bin", "cargo"
+ )
+ assert os.path.exists(cargo)
+ return cargo
+ # Default if this tree isn't configured.
+ from mozfile import which
+
+ cargo = which("cargo")
+ if not cargo:
+ raise OSError(
+ errno.ENOENT,
+ (
+ "Could not find 'cargo' on your $PATH. "
+ "Hint: have you run `mach build` or `mach configure`?"
+ ),
+ )
+ return cargo
+
+ def check_cargo_version(self, cargo):
+ """
+ Ensure that Cargo is new enough.
+ """
+ out = (
+ subprocess.check_output([cargo, "--version"])
+ .splitlines()[0]
+ .decode("UTF-8")
+ )
+ if not out.startswith("cargo"):
+ return False
+ version = LooseVersion(out.split()[1])
+ # Cargo 1.68.0 changed vendoring in a way that creates a lot of noise
+ # if we go back and forth between vendoring with an older version and
+ # a newer version. Only allow the newer versions.
+ minimum_rust_version = MINIMUM_RUST_VERSION
+ if LooseVersion("1.68.0") >= MINIMUM_RUST_VERSION:
+ minimum_rust_version = "1.68.0"
+ if version < minimum_rust_version:
+ self.log(
+ logging.ERROR,
+ "cargo_version",
+ {},
+ "Cargo >= {0} required (install Rust {0} or newer)".format(
+ minimum_rust_version
+ ),
+ )
+ return False
+ self.log(logging.DEBUG, "cargo_version", {}, "cargo is new enough")
+ return True
+
+ def has_modified_files(self):
+ """
+ Ensure that there aren't any uncommitted changes to files
+ in the working copy, since we're going to change some state
+ on the user. Allow changes to Cargo.{toml,lock} since that's
+ likely to be a common use case.
+ """
+ modified = [
+ f
+ for f in self.repository.get_changed_files("M")
+ if os.path.basename(f) not in ("Cargo.toml", "Cargo.lock")
+ and not f.startswith("supply-chain/")
+ ]
+ if modified:
+ self.log(
+ logging.ERROR,
+ "modified_files",
+ {},
+ """You have uncommitted changes to the following files:
+
+{files}
+
+Please commit or stash these changes before vendoring, or re-run with `--ignore-modified`.
+""".format(
+ files="\n".join(sorted(modified))
+ ),
+ )
+ return modified
+
+ def check_openssl(self):
+ """
+ Set environment flags for building with openssl.
+
+ MacOS doesn't include openssl, but the openssl-sys crate used by
+ mach-vendor expects one of the system. It's common to have one
+ installed in /usr/local/opt/openssl by homebrew, but custom link
+ flags are necessary to build against it.
+ """
+
+ test_paths = ["/usr/include", "/usr/local/include"]
+ if any(
+ [os.path.exists(os.path.join(path, "openssl/ssl.h")) for path in test_paths]
+ ):
+ # Assume we can use one of these system headers.
+ return None
+
+ if os.path.exists("/usr/local/opt/openssl/include/openssl/ssl.h"):
+ # Found a likely homebrew install.
+ self.log(
+ logging.INFO, "openssl", {}, "Using OpenSSL in /usr/local/opt/openssl"
+ )
+ return {
+ "OPENSSL_INCLUDE_DIR": "/usr/local/opt/openssl/include",
+ "OPENSSL_LIB_DIR": "/usr/local/opt/openssl/lib",
+ }
+
+ self.log(logging.ERROR, "openssl", {}, "OpenSSL not found!")
+ return None
+
+ def _ensure_cargo(self):
+ """
+ Ensures all the necessary cargo bits are installed.
+
+ Returns the path to cargo if successful, None otherwise.
+ """
+ cargo = self.get_cargo_path()
+ if not self.check_cargo_version(cargo):
+ return None
+ return cargo
+
+ # A whitelist of acceptable license identifiers for the
+ # packages.license field from https://spdx.org/licenses/. Cargo
+ # documentation claims that values are checked against the above
+ # list and that multiple entries can be separated by '/'. We
+ # choose to list all combinations instead for the sake of
+ # completeness and because some entries below obviously do not
+ # conform to the format prescribed in the documentation.
+ #
+ # It is insufficient to have additions to this whitelist reviewed
+ # solely by a build peer; any additions must be checked by somebody
+ # competent to review licensing minutiae.
+
+ # Licenses for code used at runtime. Please see the above comment before
+ # adding anything to this list.
+ RUNTIME_LICENSE_WHITELIST = [
+ "Apache-2.0",
+ "Apache-2.0 WITH LLVM-exception",
+ # BSD-2-Clause and BSD-3-Clause are ok, but packages using them
+ # must be added to the appropriate section of about:licenses.
+ # To encourage people to remember to do that, we do not whitelist
+ # the licenses themselves, and we require the packages to be added
+ # to RUNTIME_LICENSE_PACKAGE_WHITELIST below.
+ "CC0-1.0",
+ "ISC",
+ "MIT",
+ "MPL-2.0",
+ "Unicode-DFS-2016",
+ "Unlicense",
+ "Zlib",
+ ]
+
+ # Licenses for code used at build time (e.g. code generators). Please see the above
+ # comments before adding anything to this list.
+ BUILDTIME_LICENSE_WHITELIST = {
+ "BSD-3-Clause": [
+ "bindgen",
+ "fuchsia-zircon",
+ "fuchsia-zircon-sys",
+ "fuchsia-cprng",
+ "glsl",
+ "instant",
+ ]
+ }
+
+ # This whitelist should only be used for packages that use an acceptable
+ # license, but that also need to explicitly mentioned in about:license.
+ RUNTIME_LICENSE_PACKAGE_WHITELIST = {
+ "BSD-2-Clause": [
+ "arrayref",
+ "cloudabi",
+ "Inflector",
+ "mach",
+ "qlog",
+ ],
+ "BSD-3-Clause": [],
+ }
+
+ # ICU4X is distributed as individual crates that all share the same LICENSE
+ # that will need to be individually added to the allow list below. We'll
+ # define the SHA256 once here, to make the review process easier as new
+ # ICU4X crates are vendored into the tree.
+ ICU4X_LICENSE_SHA256 = (
+ "02420cc1b4c26d9a3318d60fd57048d015831249a5b776a1ada75cd227e78630"
+ )
+
+ # This whitelist should only be used for packages that use a
+ # license-file and for which the license-file entry has been
+ # reviewed. The table is keyed by package names and maps to the
+ # sha256 hash of the license file that we reviewed.
+ #
+ # As above, it is insufficient to have additions to this whitelist
+ # reviewed solely by a build peer; any additions must be checked by
+ # somebody competent to review licensing minutiae.
+ RUNTIME_LICENSE_FILE_PACKAGE_WHITELIST = {
+ # MIT
+ "deque": "6485b8ed310d3f0340bf1ad1f47645069ce4069dcc6bb46c7d5c6faf41de1fdb",
+ # we're whitelisting this fuchsia crate because it doesn't get built in the final
+ # product but has a license-file that needs ignoring
+ "fuchsia-cprng": "03b114f53e6587a398931762ee11e2395bfdba252a329940e2c8c9e81813845b",
+ # Old ICU4X crates for ICU4X 1.0, see comment above.
+ "yoke-derive": ICU4X_LICENSE_SHA256,
+ "zerofrom-derive": ICU4X_LICENSE_SHA256,
+ }
+
+ @staticmethod
+ def runtime_license(package, license_string):
+ """Cargo docs say:
+ ---
+ https://doc.rust-lang.org/cargo/reference/manifest.html
+
+ This is an SPDX 2.1 license expression for this package. Currently
+ crates.io will validate the license provided against a whitelist of
+ known license and exception identifiers from the SPDX license list
+ 2.4. Parentheses are not currently supported.
+
+ Multiple licenses can be separated with a `/`, although that usage
+ is deprecated. Instead, use a license expression with AND and OR
+ operators to get more explicit semantics.
+ ---
+ But I have no idea how you can meaningfully AND licenses, so
+ we will abort if that is detected. We'll handle `/` and OR as
+ equivalent and approve is any is in our approved list."""
+
+ # This specific AND combination has been reviewed for encoding_rs.
+ if (
+ license_string == "(Apache-2.0 OR MIT) AND BSD-3-Clause"
+ and package == "encoding_rs"
+ ):
+ return True
+
+ # This specific AND combination has been reviewed for unicode-ident.
+ if (
+ license_string == "(MIT OR Apache-2.0) AND Unicode-DFS-2016"
+ and package == "unicode-ident"
+ ):
+ return True
+
+ if re.search(r"\s+AND", license_string):
+ return False
+
+ license_list = re.split(r"\s*/\s*|\s+OR\s+", license_string)
+ for license in license_list:
+ if license in VendorRust.RUNTIME_LICENSE_WHITELIST:
+ return True
+ if package in VendorRust.RUNTIME_LICENSE_PACKAGE_WHITELIST.get(license, []):
+ return True
+ return False
+
+ def _check_licenses(self, vendor_dir: str) -> bool:
+ def verify_acceptable_license(package: str, license: str) -> bool:
+ self.log(
+ logging.DEBUG, "package_license", {}, "has license {}".format(license)
+ )
+
+ if not self.runtime_license(package, license):
+ if license not in self.BUILDTIME_LICENSE_WHITELIST:
+ self.log(
+ logging.ERROR,
+ "package_license_error",
+ {},
+ """Package {} has a non-approved license: {}.
+
+ Please request license review on the package's license. If the package's license
+ is approved, please add it to the whitelist of suitable licenses.
+ """.format(
+ package, license
+ ),
+ )
+ return False
+ elif package not in self.BUILDTIME_LICENSE_WHITELIST[license]:
+ self.log(
+ logging.ERROR,
+ "package_license_error",
+ {},
+ """Package {} has a license that is approved for build-time dependencies:
+ {}
+ but the package itself is not whitelisted as being a build-time only package.
+
+ If your package is build-time only, please add it to the whitelist of build-time
+ only packages. Otherwise, you need to request license review on the package's license.
+ If the package's license is approved, please add it to the whitelist of suitable licenses.
+ """.format(
+ package, license
+ ),
+ )
+ return False
+ return True
+
+ def check_package(package_name: str) -> bool:
+ self.log(
+ logging.DEBUG,
+ "package_check",
+ {},
+ "Checking license for {}".format(package_name),
+ )
+
+ toml_file = os.path.join(vendor_dir, package_name, "Cargo.toml")
+ with open(toml_file, encoding="utf-8") as fh:
+ toml_data = toml.load(fh)
+
+ package_entry: typing.Dict[str, TomlItem] = toml_data["package"]
+ license = package_entry.get("license", None)
+ license_file = package_entry.get("license-file", None)
+
+ if license is not None and type(license) is not str:
+ self.log(
+ logging.ERROR,
+ "package_invalid_license_format",
+ {},
+ "package {} has an invalid `license` field (expected a string)".format(
+ package_name
+ ),
+ )
+ return False
+
+ if license_file is not None and type(license_file) is not str:
+ self.log(
+ logging.ERROR,
+ "package_invalid_license_format",
+ {},
+ "package {} has an invalid `license-file` field (expected a string)".format(
+ package_name
+ ),
+ )
+ return False
+
+ # License information is optional for crates to provide, but
+ # we require it.
+ if not license and not license_file:
+ self.log(
+ logging.ERROR,
+ "package_no_license",
+ {},
+ "package {} does not provide a license".format(package_name),
+ )
+ return False
+
+ # The Cargo.toml spec suggests that crates should either have
+ # `license` or `license-file`, but not both. We might as well
+ # be defensive about that, though.
+ if license and license_file:
+ self.log(
+ logging.ERROR,
+ "package_many_licenses",
+ {},
+ "package {} provides too many licenses".format(package_name),
+ )
+ return False
+
+ if license:
+ return verify_acceptable_license(package_name, license)
+
+ # otherwise, it's a custom license in a separate file
+ assert license_file is not None
+ self.log(
+ logging.DEBUG,
+ "package_license_file",
+ {},
+ "package has license-file {}".format(license_file),
+ )
+
+ if package_name not in self.RUNTIME_LICENSE_FILE_PACKAGE_WHITELIST:
+ self.log(
+ logging.ERROR,
+ "package_license_file_unknown",
+ {},
+ """Package {} has an unreviewed license file: {}.
+
+Please request review on the provided license; if approved, the package can be added
+to the whitelist of packages whose licenses are suitable.
+""".format(
+ package_name, license_file
+ ),
+ )
+ return False
+
+ approved_hash = self.RUNTIME_LICENSE_FILE_PACKAGE_WHITELIST[package_name]
+
+ with open(
+ os.path.join(vendor_dir, package_name, license_file), "rb"
+ ) as license_buf:
+ current_hash = hashlib.sha256(license_buf.read()).hexdigest()
+
+ if current_hash != approved_hash:
+ self.log(
+ logging.ERROR,
+ "package_license_file_mismatch",
+ {},
+ """Package {} has changed its license file: {} (hash {}).
+
+Please request review on the provided license; if approved, please update the
+license file's hash.
+""".format(
+ package_name, license_file, current_hash
+ ),
+ )
+ return False
+ return True
+
+ # Force all of the packages to be checked for license information
+ # before reducing via `all`, so all license issues are found in a
+ # single `mach vendor rust` invocation.
+ results = [
+ check_package(p)
+ for p in os.listdir(vendor_dir)
+ if os.path.isdir(os.path.join(vendor_dir, p))
+ ]
+ return all(results)
+
+ def _check_build_rust(self, cargo_lock):
+ ret = True
+ crates = {}
+ for path in Path(self.topsrcdir).glob("build/rust/**/Cargo.toml"):
+ with open(path) as fh:
+ cargo_toml = toml.load(fh)
+ path = path.relative_to(self.topsrcdir)
+ package = cargo_toml["package"]
+ key = (package["name"], package["version"])
+ if key in crates:
+ self.log(
+ logging.ERROR,
+ "build_rust",
+ {
+ "path": crates[key],
+ "path2": path,
+ "crate": key[0],
+ "version": key[1],
+ },
+ "{path} and {path2} both contain {crate} {version}",
+ )
+ ret = False
+ crates[key] = path
+
+ for package in cargo_lock["package"]:
+ key = (package["name"], package["version"])
+ if key in crates and "source" not in package:
+ crates.pop(key)
+
+ for ((name, version), path) in crates.items():
+ self.log(
+ logging.ERROR,
+ "build_rust",
+ {"path": path, "crate": name, "version": version},
+ "{crate} {version} has an override in {path} that is not used",
+ )
+ ret = False
+ return ret
+
+ def vendor(
+ self, ignore_modified=False, build_peers_said_large_imports_were_ok=False
+ ):
+ from mozbuild.mach_commands import cargo_vet
+
+ self.populate_logger()
+ self.log_manager.enable_unstructured()
+ if not ignore_modified and self.has_modified_files():
+ return False
+
+ cargo = self._ensure_cargo()
+ if not cargo:
+ self.log(logging.ERROR, "cargo_not_found", {}, "Cargo was not found.")
+ return False
+
+ relative_vendor_dir = "third_party/rust"
+ vendor_dir = mozpath.join(self.topsrcdir, relative_vendor_dir)
+
+ # We use check_call instead of mozprocess to ensure errors are displayed.
+ # We do an |update -p| here to regenerate the Cargo.lock file with minimal
+ # changes. See bug 1324462
+ res = subprocess.run([cargo, "update", "-p", "gkrust"], cwd=self.topsrcdir)
+ if res.returncode:
+ self.log(logging.ERROR, "cargo_update_failed", {}, "Cargo update failed.")
+ return False
+
+ with open(os.path.join(self.topsrcdir, "Cargo.lock")) as fh:
+ cargo_lock = toml.load(fh)
+ failed = False
+ for package in cargo_lock.get("patch", {}).get("unused", []):
+ self.log(
+ logging.ERROR,
+ "unused_patch",
+ {"crate": package["name"]},
+ """Unused patch in top-level Cargo.toml for {crate}.""",
+ )
+ failed = True
+
+ if not self._check_build_rust(cargo_lock):
+ failed = True
+
+ grouped = defaultdict(list)
+ for package in cargo_lock["package"]:
+ if package["name"] in PACKAGES_WE_ALWAYS_WANT_AN_OVERRIDE_OF:
+ # When the in-tree version is used, there is `source` for
+ # it in Cargo.lock, which is what we expect.
+ if package.get("source"):
+ self.log(
+ logging.ERROR,
+ "non_overridden",
+ {
+ "crate": package["name"],
+ "version": package["version"],
+ "source": package["source"],
+ },
+ "Crate {crate} v{version} must be overridden but isn't "
+ "and comes from {source}.",
+ )
+ failed = True
+ elif package["name"] in PACKAGES_WE_DONT_WANT:
+ self.log(
+ logging.ERROR,
+ "undesirable",
+ {
+ "crate": package["name"],
+ "version": package["version"],
+ "reason": PACKAGES_WE_DONT_WANT[package["name"]],
+ },
+ "Crate {crate} is not desirable: {reason}",
+ )
+ failed = True
+ grouped[package["name"]].append(package)
+
+ for name, packages in grouped.items():
+ # Allow to have crates of the same name when one depends on the other.
+ num = len(
+ [
+ p
+ for p in packages
+ if all(d.split()[0] != name for d in p.get("dependencies", []))
+ ]
+ )
+ expected = TOLERATED_DUPES.get(name, 1)
+ if num > expected:
+ self.log(
+ logging.ERROR,
+ "duplicate_crate",
+ {
+ "crate": name,
+ "num": num,
+ "expected": expected,
+ "file": Path(__file__).relative_to(self.topsrcdir),
+ },
+ "There are {num} different versions of crate {crate} "
+ "(expected {expected}). Please avoid the extra duplication "
+ "or adjust TOLERATED_DUPES in {file} if not possible "
+ "(but we'd prefer the former).",
+ )
+ failed = True
+ elif num < expected and num > 1:
+ self.log(
+ logging.ERROR,
+ "less_duplicate_crate",
+ {
+ "crate": name,
+ "num": num,
+ "expected": expected,
+ "file": Path(__file__).relative_to(self.topsrcdir),
+ },
+ "There are {num} different versions of crate {crate} "
+ "(expected {expected}). Please adjust TOLERATED_DUPES in "
+ "{file} to reflect this improvement.",
+ )
+ failed = True
+ elif num < expected and num > 0:
+ self.log(
+ logging.ERROR,
+ "less_duplicate_crate",
+ {
+ "crate": name,
+ "file": Path(__file__).relative_to(self.topsrcdir),
+ },
+ "Crate {crate} is not duplicated anymore. "
+ "Please adjust TOLERATED_DUPES in {file} to reflect this improvement.",
+ )
+ failed = True
+ elif name in TOLERATED_DUPES and expected <= 1:
+ self.log(
+ logging.ERROR,
+ "broken_allowed_dupes",
+ {
+ "crate": name,
+ "file": Path(__file__).relative_to(self.topsrcdir),
+ },
+ "Crate {crate} is not duplicated. Remove it from "
+ "TOLERATED_DUPES in {file}.",
+ )
+ failed = True
+
+ for name in TOLERATED_DUPES:
+ if name not in grouped:
+ self.log(
+ logging.ERROR,
+ "outdated_allowed_dupes",
+ {
+ "crate": name,
+ "file": Path(__file__).relative_to(self.topsrcdir),
+ },
+ "Crate {crate} is not in Cargo.lock anymore. Remove it from "
+ "TOLERATED_DUPES in {file}.",
+ )
+ failed = True
+
+ # Only emit warnings for cargo-vet for now.
+ env = os.environ.copy()
+ env["PATH"] = os.pathsep.join(
+ (
+ str(Path(cargo).parent),
+ os.environ["PATH"],
+ )
+ )
+ flags = ["--output-format=json"]
+ if "MOZ_AUTOMATION" in os.environ:
+ flags.append("--locked")
+ flags.append("--frozen")
+ res = cargo_vet(
+ self,
+ flags,
+ stdout=subprocess.PIPE,
+ env=env,
+ )
+ if res.returncode:
+ vet = json.loads(res.stdout)
+ logged_error = False
+ for failure in vet.get("failures", []):
+ failure["crate"] = failure.pop("name")
+ self.log(
+ logging.ERROR,
+ "cargo_vet_failed",
+ failure,
+ "Missing audit for {crate}:{version} (requires {missing_criteria})."
+ " Run `./mach cargo vet` for more information.",
+ )
+ logged_error = True
+ # NOTE: This could log more information, but the violation JSON
+ # output isn't super stable yet, so it's probably simpler to tell
+ # the caller to run `./mach cargo vet` directly.
+ for key in vet.get("violations", {}).keys():
+ self.log(
+ logging.ERROR,
+ "cargo_vet_failed",
+ {"key": key},
+ "Violation conflict for {key}. Run `./mach cargo vet` for more information.",
+ )
+ logged_error = True
+ if "error" in vet:
+ # NOTE: The error format produced by cargo-vet is from the
+ # `miette` crate, and can include a lot of metadata and context.
+ # If we want to show more details in the future, we can expand
+ # this rendering to also include things like source labels and
+ # related error metadata.
+ error = vet["error"]
+ self.log(
+ logging.ERROR,
+ "cargo_vet_failed",
+ error,
+ "Vet {severity}: {message}",
+ )
+ if "help" in error:
+ self.log(logging.INFO, "cargo_vet_failed", error, " help: {help}")
+ for cause in error.get("causes", []):
+ self.log(
+ logging.INFO,
+ "cargo_vet_failed",
+ {"cause": cause},
+ " cause: {cause}",
+ )
+ for related in error.get("related", []):
+ self.log(
+ logging.INFO,
+ "cargo_vet_failed",
+ related,
+ " related {severity}: {message}",
+ )
+ self.log(
+ logging.INFO,
+ "cargo_vet_failed",
+ {},
+ "Run `./mach cargo vet` for more information.",
+ )
+ logged_error = True
+ if not logged_error:
+ self.log(
+ logging.ERROR,
+ "cargo_vet_failed",
+ {},
+ "Unknown vet error. Run `./mach cargo vet` for more information.",
+ )
+ failed = True
+
+ # If we failed when checking the crates list and/or running `cargo vet`,
+ # stop before invoking `cargo vendor`.
+ if failed:
+ return False
+
+ res = subprocess.run(
+ [cargo, "vendor", vendor_dir], cwd=self.topsrcdir, stdout=subprocess.PIPE
+ )
+ if res.returncode:
+ self.log(logging.ERROR, "cargo_vendor_failed", {}, "Cargo vendor failed.")
+ return False
+ output = res.stdout.decode("UTF-8")
+
+ # Get the snippet of configuration that cargo vendor outputs, and
+ # update .cargo/config with it.
+ # XXX(bug 1576765): Hopefully do something better after
+ # https://github.com/rust-lang/cargo/issues/7280 is addressed.
+ config = "\n".join(
+ dropwhile(lambda l: not l.startswith("["), output.splitlines())
+ )
+
+ # The config is toml; parse it as such.
+ config = toml.loads(config)
+
+ # For each replace-with, extract their configuration and update the
+ # corresponding directory to be relative to topsrcdir.
+ replaces = {
+ v["replace-with"] for v in config["source"].values() if "replace-with" in v
+ }
+
+ # We only really expect one replace-with
+ if len(replaces) != 1:
+ self.log(
+ logging.ERROR,
+ "vendor_failed",
+ {},
+ """cargo vendor didn't output a unique replace-with. Found: %s."""
+ % replaces,
+ )
+ return False
+
+ replace_name = replaces.pop()
+ replace = config["source"].pop(replace_name)
+ replace["directory"] = mozpath.relpath(
+ mozpath.normsep(os.path.normcase(replace["directory"])),
+ mozpath.normsep(os.path.normcase(self.topsrcdir)),
+ )
+
+ cargo_config = os.path.join(self.topsrcdir, ".cargo", "config.in")
+ with open(cargo_config, "w", encoding="utf-8", newline="\n") as fh:
+ fh.write(
+ CARGO_CONFIG_TEMPLATE.format(
+ config=toml.dumps(config),
+ replace_name=replace_name,
+ directory=replace["directory"],
+ )
+ )
+
+ if not self._check_licenses(vendor_dir):
+ self.log(
+ logging.ERROR,
+ "license_check_failed",
+ {},
+ """The changes from `mach vendor rust` will NOT be added to version control.
+
+{notice}""".format(
+ notice=CARGO_LOCK_NOTICE
+ ),
+ )
+ self.repository.clean_directory(vendor_dir)
+ return False
+
+ self.repository.add_remove_files(vendor_dir)
+
+ # 100k is a reasonable upper bound on source file size.
+ FILESIZE_LIMIT = 100 * 1024
+ large_files = set()
+ cumulative_added_size = 0
+ for f in self.repository.get_changed_files("A"):
+ path = mozpath.join(self.topsrcdir, f)
+ size = os.stat(path).st_size
+ cumulative_added_size += size
+ if size > FILESIZE_LIMIT:
+ large_files.add(f)
+
+ # Forcefully complain about large files being added, as history has
+ # shown that large-ish files typically are not needed.
+ if large_files and not build_peers_said_large_imports_were_ok:
+ self.log(
+ logging.ERROR,
+ "filesize_check",
+ {},
+ """The following files exceed the filesize limit of {size}:
+
+{files}
+
+If you can't reduce the size of these files, talk to a build peer (on the #build
+channel at https://chat.mozilla.org) about the particular large files you are
+adding.
+
+The changes from `mach vendor rust` will NOT be added to version control.
+
+{notice}""".format(
+ files="\n".join(sorted(large_files)),
+ size=FILESIZE_LIMIT,
+ notice=CARGO_LOCK_NOTICE,
+ ),
+ )
+ self.repository.forget_add_remove_files(vendor_dir)
+ self.repository.clean_directory(vendor_dir)
+ return False
+
+ # Only warn for large imports, since we may just have large code
+ # drops from time to time (e.g. importing features into m-c).
+ SIZE_WARN_THRESHOLD = 5 * 1024 * 1024
+ if cumulative_added_size >= SIZE_WARN_THRESHOLD:
+ self.log(
+ logging.WARN,
+ "filesize_check",
+ {},
+ """Your changes add {size} bytes of added files.
+
+Please consider finding ways to reduce the size of the vendored packages.
+For instance, check the vendored packages for unusually large test or
+benchmark files that don't need to be published to crates.io and submit
+a pull request upstream to ignore those files when publishing.""".format(
+ size=cumulative_added_size
+ ),
+ )
+ return True
diff --git a/python/mozbuild/mozpack/__init__.py b/python/mozbuild/mozpack/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozpack/__init__.py
diff --git a/python/mozbuild/mozpack/apple_pkg/Distribution.template b/python/mozbuild/mozpack/apple_pkg/Distribution.template
new file mode 100644
index 0000000000..2f4b9484d9
--- /dev/null
+++ b/python/mozbuild/mozpack/apple_pkg/Distribution.template
@@ -0,0 +1,19 @@
+<?xml version="1.0" encoding="utf-8"?>
+<installer-gui-script minSpecVersion="1">
+ <pkg-ref id="${CFBundleIdentifier}">
+ <bundle-version>
+ <bundle CFBundleShortVersionString="${CFBundleShortVersionString}" CFBundleVersion="${CFBundleVersion}" id="${CFBundleIdentifier}" path="${app_name}.app"/>
+ </bundle-version>
+ </pkg-ref>
+ <options customize="never" require-scripts="false" hostArchitectures="x86_64,arm64"/>
+ <choices-outline>
+ <line choice="default">
+ <line choice="${CFBundleIdentifier}"/>
+ </line>
+ </choices-outline>
+ <choice id="default"/>
+ <choice id="${CFBundleIdentifier}" visible="false">
+ <pkg-ref id="${CFBundleIdentifier}"/>
+ </choice>
+ <pkg-ref id="${CFBundleIdentifier}" version="${simple_version}" installKBytes="${installKBytes}">#${app_name_url_encoded}.pkg</pkg-ref>
+</installer-gui-script> \ No newline at end of file
diff --git a/python/mozbuild/mozpack/apple_pkg/PackageInfo.template b/python/mozbuild/mozpack/apple_pkg/PackageInfo.template
new file mode 100644
index 0000000000..74d47e396c
--- /dev/null
+++ b/python/mozbuild/mozpack/apple_pkg/PackageInfo.template
@@ -0,0 +1,19 @@
+<?xml version="1.0" encoding="utf-8"?>
+<pkg-info overwrite-permissions="true" relocatable="false" identifier="${CFBundleIdentifier}" postinstall-action="none" version="${simple_version}" format-version="2" generator-version="InstallCmds-681 (18F132)" install-location="/Applications" auth="root">
+ <payload numberOfFiles="${numberOfFiles}" installKBytes="${installKBytes}"/>
+ <bundle path="./${app_name}.app" id="${CFBundleIdentifier}" CFBundleShortVersionString="${CFBundleShortVersionString}" CFBundleVersion="${CFBundleVersion}"/>
+ <bundle-version>
+ <bundle id="${CFBundleIdentifier}"/>
+ </bundle-version>
+ <upgrade-bundle>
+ <bundle id="${CFBundleIdentifier}"/>
+ </upgrade-bundle>
+ <update-bundle/>
+ <atomic-update-bundle/>
+ <strict-identifier>
+ <bundle id="${CFBundleIdentifier}"/>
+ </strict-identifier>
+ <relocate>
+ <bundle id="${CFBundleIdentifier}"/>
+ </relocate>
+</pkg-info> \ No newline at end of file
diff --git a/python/mozbuild/mozpack/archive.py b/python/mozbuild/mozpack/archive.py
new file mode 100644
index 0000000000..89bf14b179
--- /dev/null
+++ b/python/mozbuild/mozpack/archive.py
@@ -0,0 +1,153 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import bz2
+import gzip
+import stat
+import tarfile
+
+from .files import BaseFile, File
+
+# 2016-01-01T00:00:00+0000
+DEFAULT_MTIME = 1451606400
+
+
+# Python 3.9 contains this change:
+# https://github.com/python/cpython/commit/674935b8caf33e47c78f1b8e197b1b77a04992d2
+# which changes the output of tar creation compared to earlier versions.
+# As this code is used to generate tar files that are meant to be deterministic
+# across versions of python (specifically, it's used as part of computing the hash
+# of docker images, which needs to be identical between CI (which uses python 3.8),
+# and developer environments (using arbitrary versions of python, at this point,
+# most probably more recent than 3.9)).
+# What we do is subblass TarInfo so that if used on python >= 3.9, it reproduces the
+# behavior from python < 3.9.
+# Here's how it goes:
+# - the behavior in python >= 3.9 is the same as python < 3.9 when the type encoded
+# in the tarinfo is CHRTYPE or BLKTYPE.
+# - the value of the type is only compared in the context of choosing which behavior
+# to take
+# - we replace the type with the same value (so that using the value has no changes)
+# but that pretends to be the same as CHRTYPE so that the condition that enables the
+# old behavior is taken.
+class HackedType(bytes):
+ def __eq__(self, other):
+ if other == tarfile.CHRTYPE:
+ return True
+ return self == other
+
+
+class TarInfo(tarfile.TarInfo):
+ @staticmethod
+ def _create_header(info, format, encoding, errors):
+ info["type"] = HackedType(info["type"])
+ return tarfile.TarInfo._create_header(info, format, encoding, errors)
+
+
+def create_tar_from_files(fp, files):
+ """Create a tar file deterministically.
+
+ Receives a dict mapping names of files in the archive to local filesystem
+ paths or ``mozpack.files.BaseFile`` instances.
+
+ The files will be archived and written to the passed file handle opened
+ for writing.
+
+ Only regular files can be written.
+
+ FUTURE accept a filename argument (or create APIs to write files)
+ """
+ # The format is explicitly set to tarfile.GNU_FORMAT, because this default format
+ # has been changed in Python 3.8.
+ with tarfile.open(
+ name="", mode="w", fileobj=fp, dereference=True, format=tarfile.GNU_FORMAT
+ ) as tf:
+ for archive_path, f in sorted(files.items()):
+ if not isinstance(f, BaseFile):
+ f = File(f)
+
+ ti = TarInfo(archive_path)
+ ti.mode = f.mode or 0o0644
+ ti.type = tarfile.REGTYPE
+
+ if not ti.isreg():
+ raise ValueError("not a regular file: %s" % f)
+
+ # Disallow setuid and setgid bits. This is an arbitrary restriction.
+ # However, since we set uid/gid to root:root, setuid and setgid
+ # would be a glaring security hole if the archive were
+ # uncompressed as root.
+ if ti.mode & (stat.S_ISUID | stat.S_ISGID):
+ raise ValueError("cannot add file with setuid or setgid set: " "%s" % f)
+
+ # Set uid, gid, username, and group as deterministic values.
+ ti.uid = 0
+ ti.gid = 0
+ ti.uname = ""
+ ti.gname = ""
+
+ # Set mtime to a constant value.
+ ti.mtime = DEFAULT_MTIME
+
+ ti.size = f.size()
+ # tarfile wants to pass a size argument to read(). So just
+ # wrap/buffer in a proper file object interface.
+ tf.addfile(ti, f.open())
+
+
+def create_tar_gz_from_files(fp, files, filename=None, compresslevel=9):
+ """Create a tar.gz file deterministically from files.
+
+ This is a glorified wrapper around ``create_tar_from_files`` that
+ adds gzip compression.
+
+ The passed file handle should be opened for writing in binary mode.
+ When the function returns, all data has been written to the handle.
+ """
+ # Offset 3-7 in the gzip header contains an mtime. Pin it to a known
+ # value so output is deterministic.
+ gf = gzip.GzipFile(
+ filename=filename or "",
+ mode="wb",
+ fileobj=fp,
+ compresslevel=compresslevel,
+ mtime=DEFAULT_MTIME,
+ )
+ with gf:
+ create_tar_from_files(gf, files)
+
+
+class _BZ2Proxy(object):
+ """File object that proxies writes to a bz2 compressor."""
+
+ def __init__(self, fp, compresslevel=9):
+ self.fp = fp
+ self.compressor = bz2.BZ2Compressor(compresslevel)
+ self.pos = 0
+
+ def tell(self):
+ return self.pos
+
+ def write(self, data):
+ data = self.compressor.compress(data)
+ self.pos += len(data)
+ self.fp.write(data)
+
+ def close(self):
+ data = self.compressor.flush()
+ self.pos += len(data)
+ self.fp.write(data)
+
+
+def create_tar_bz2_from_files(fp, files, compresslevel=9):
+ """Create a tar.bz2 file deterministically from files.
+
+ This is a glorified wrapper around ``create_tar_from_files`` that
+ adds bzip2 compression.
+
+ This function is similar to ``create_tar_gzip_from_files()``.
+ """
+ proxy = _BZ2Proxy(fp, compresslevel=compresslevel)
+ create_tar_from_files(proxy, files)
+ proxy.close()
diff --git a/python/mozbuild/mozpack/chrome/__init__.py b/python/mozbuild/mozpack/chrome/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozpack/chrome/__init__.py
diff --git a/python/mozbuild/mozpack/chrome/flags.py b/python/mozbuild/mozpack/chrome/flags.py
new file mode 100644
index 0000000000..6b096c862a
--- /dev/null
+++ b/python/mozbuild/mozpack/chrome/flags.py
@@ -0,0 +1,278 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import re
+from collections import OrderedDict
+
+import six
+from packaging.version import Version
+
+from mozpack.errors import errors
+
+
+class Flag(object):
+ """
+ Class for flags in manifest entries in the form:
+ "flag" (same as "flag=true")
+ "flag=yes|true|1"
+ "flag=no|false|0"
+ """
+
+ def __init__(self, name):
+ """
+ Initialize a Flag with the given name.
+ """
+ self.name = name
+ self.value = None
+
+ def add_definition(self, definition):
+ """
+ Add a flag value definition. Replaces any previously set value.
+ """
+ if definition == self.name:
+ self.value = True
+ return
+ assert definition.startswith(self.name)
+ if definition[len(self.name)] != "=":
+ return errors.fatal("Malformed flag: %s" % definition)
+ value = definition[len(self.name) + 1 :]
+ if value in ("yes", "true", "1", "no", "false", "0"):
+ self.value = value
+ else:
+ return errors.fatal("Unknown value in: %s" % definition)
+
+ def matches(self, value):
+ """
+ Return whether the flag value matches the given value. The values
+ are canonicalized for comparison.
+ """
+ if value in ("yes", "true", "1", True):
+ return self.value in ("yes", "true", "1", True)
+ if value in ("no", "false", "0", False):
+ return self.value in ("no", "false", "0", False, None)
+ raise RuntimeError("Invalid value: %s" % value)
+
+ def __str__(self):
+ """
+ Serialize the flag value in the same form given to the last
+ add_definition() call.
+ """
+ if self.value is None:
+ return ""
+ if self.value is True:
+ return self.name
+ return "%s=%s" % (self.name, self.value)
+
+ def __eq__(self, other):
+ return str(self) == other
+
+
+class StringFlag(object):
+ """
+ Class for string flags in manifest entries in the form:
+ "flag=string"
+ "flag!=string"
+ """
+
+ def __init__(self, name):
+ """
+ Initialize a StringFlag with the given name.
+ """
+ self.name = name
+ self.values = []
+
+ def add_definition(self, definition):
+ """
+ Add a string flag definition.
+ """
+ assert definition.startswith(self.name)
+ value = definition[len(self.name) :]
+ if value.startswith("="):
+ self.values.append(("==", value[1:]))
+ elif value.startswith("!="):
+ self.values.append(("!=", value[2:]))
+ else:
+ return errors.fatal("Malformed flag: %s" % definition)
+
+ def matches(self, value):
+ """
+ Return whether one of the string flag definitions matches the given
+ value.
+ For example,
+
+ flag = StringFlag('foo')
+ flag.add_definition('foo!=bar')
+ flag.matches('bar') returns False
+ flag.matches('qux') returns True
+ flag = StringFlag('foo')
+ flag.add_definition('foo=bar')
+ flag.add_definition('foo=baz')
+ flag.matches('bar') returns True
+ flag.matches('baz') returns True
+ flag.matches('qux') returns False
+ """
+ if not self.values:
+ return True
+ for comparison, val in self.values:
+ if eval("value %s val" % comparison):
+ return True
+ return False
+
+ def __str__(self):
+ """
+ Serialize the flag definitions in the same form given to each
+ add_definition() call.
+ """
+ res = []
+ for comparison, val in self.values:
+ if comparison == "==":
+ res.append("%s=%s" % (self.name, val))
+ else:
+ res.append("%s!=%s" % (self.name, val))
+ return " ".join(res)
+
+ def __eq__(self, other):
+ return str(self) == other
+
+
+class VersionFlag(object):
+ """
+ Class for version flags in manifest entries in the form:
+ "flag=version"
+ "flag<=version"
+ "flag<version"
+ "flag>=version"
+ "flag>version"
+ """
+
+ def __init__(self, name):
+ """
+ Initialize a VersionFlag with the given name.
+ """
+ self.name = name
+ self.values = []
+
+ def add_definition(self, definition):
+ """
+ Add a version flag definition.
+ """
+ assert definition.startswith(self.name)
+ value = definition[len(self.name) :]
+ if value.startswith("="):
+ self.values.append(("==", Version(value[1:])))
+ elif len(value) > 1 and value[0] in ["<", ">"]:
+ if value[1] == "=":
+ if len(value) < 3:
+ return errors.fatal("Malformed flag: %s" % definition)
+ self.values.append((value[0:2], Version(value[2:])))
+ else:
+ self.values.append((value[0], Version(value[1:])))
+ else:
+ return errors.fatal("Malformed flag: %s" % definition)
+
+ def matches(self, value):
+ """
+ Return whether one of the version flag definitions matches the given
+ value.
+ For example,
+
+ flag = VersionFlag('foo')
+ flag.add_definition('foo>=1.0')
+ flag.matches('1.0') returns True
+ flag.matches('1.1') returns True
+ flag.matches('0.9') returns False
+ flag = VersionFlag('foo')
+ flag.add_definition('foo>=1.0')
+ flag.add_definition('foo<0.5')
+ flag.matches('0.4') returns True
+ flag.matches('1.0') returns True
+ flag.matches('0.6') returns False
+ """
+ value = Version(value)
+ if not self.values:
+ return True
+ for comparison, val in self.values:
+ if eval("value %s val" % comparison):
+ return True
+ return False
+
+ def __str__(self):
+ """
+ Serialize the flag definitions in the same form given to each
+ add_definition() call.
+ """
+ res = []
+ for comparison, val in self.values:
+ if comparison == "==":
+ res.append("%s=%s" % (self.name, val))
+ else:
+ res.append("%s%s%s" % (self.name, comparison, val))
+ return " ".join(res)
+
+ def __eq__(self, other):
+ return str(self) == other
+
+
+class Flags(OrderedDict):
+ """
+ Class to handle a set of flags definitions given on a single manifest
+ entry.
+
+ """
+
+ FLAGS = {
+ "application": StringFlag,
+ "appversion": VersionFlag,
+ "platformversion": VersionFlag,
+ "contentaccessible": Flag,
+ "os": StringFlag,
+ "osversion": VersionFlag,
+ "abi": StringFlag,
+ "platform": Flag,
+ "xpcnativewrappers": Flag,
+ "tablet": Flag,
+ "process": StringFlag,
+ "backgroundtask": StringFlag,
+ }
+ RE = re.compile(r"([!<>=]+)")
+
+ def __init__(self, *flags):
+ """
+ Initialize a set of flags given in string form.
+ flags = Flags('contentaccessible=yes', 'appversion>=3.5')
+ """
+ OrderedDict.__init__(self)
+ for f in flags:
+ name = self.RE.split(f)
+ name = name[0]
+ if name not in self.FLAGS:
+ errors.fatal("Unknown flag: %s" % name)
+ continue
+ if name not in self:
+ self[name] = self.FLAGS[name](name)
+ self[name].add_definition(f)
+
+ def __str__(self):
+ """
+ Serialize the set of flags.
+ """
+ return " ".join(str(self[k]) for k in self)
+
+ def match(self, **filter):
+ """
+ Return whether the set of flags match the set of given filters.
+ flags = Flags('contentaccessible=yes', 'appversion>=3.5',
+ 'application=foo')
+
+ flags.match(application='foo') returns True
+ flags.match(application='foo', appversion='3.5') returns True
+ flags.match(application='foo', appversion='3.0') returns False
+
+ """
+ for name, value in six.iteritems(filter):
+ if name not in self:
+ continue
+ if not self[name].matches(value):
+ return False
+ return True
diff --git a/python/mozbuild/mozpack/chrome/manifest.py b/python/mozbuild/mozpack/chrome/manifest.py
new file mode 100644
index 0000000000..14c11d4c1d
--- /dev/null
+++ b/python/mozbuild/mozpack/chrome/manifest.py
@@ -0,0 +1,400 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import re
+
+import six
+from six.moves.urllib.parse import urlparse
+
+import mozpack.path as mozpath
+from mozpack.chrome.flags import Flags
+from mozpack.errors import errors
+
+
+class ManifestEntry(object):
+ """
+ Base class for all manifest entry types.
+ Subclasses may define the following class or member variables:
+
+ - localized: indicates whether the manifest entry is used for localized
+ data.
+ - type: the manifest entry type (e.g. 'content' in
+ 'content global content/global/')
+ - allowed_flags: a set of flags allowed to be defined for the given
+ manifest entry type.
+
+ A manifest entry is attached to a base path, defining where the manifest
+ entry is bound to, and that is used to find relative paths defined in
+ entries.
+ """
+
+ localized = False
+ type = None
+ allowed_flags = [
+ "application",
+ "platformversion",
+ "os",
+ "osversion",
+ "abi",
+ "xpcnativewrappers",
+ "tablet",
+ "process",
+ "contentaccessible",
+ "backgroundtask",
+ ]
+
+ def __init__(self, base, *flags):
+ """
+ Initialize a manifest entry with the given base path and flags.
+ """
+ self.base = base
+ self.flags = Flags(*flags)
+ if not all(f in self.allowed_flags for f in self.flags):
+ errors.fatal(
+ "%s unsupported for %s manifest entries"
+ % (
+ ",".join(f for f in self.flags if f not in self.allowed_flags),
+ self.type,
+ )
+ )
+
+ def serialize(self, *args):
+ """
+ Serialize the manifest entry.
+ """
+ entry = [self.type] + list(args)
+ flags = str(self.flags)
+ if flags:
+ entry.append(flags)
+ return " ".join(entry)
+
+ def __eq__(self, other):
+ return self.base == other.base and str(self) == str(other)
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def __repr__(self):
+ return "<%s@%s>" % (str(self), self.base)
+
+ def move(self, base):
+ """
+ Return a new manifest entry with a different base path.
+ """
+ return parse_manifest_line(base, str(self))
+
+ def rebase(self, base):
+ """
+ Return a new manifest entry with all relative paths defined in the
+ entry relative to a new base directory.
+ The base class doesn't define relative paths, so it is equivalent to
+ move().
+ """
+ return self.move(base)
+
+
+class ManifestEntryWithRelPath(ManifestEntry):
+ """
+ Abstract manifest entry type with a relative path definition.
+ """
+
+ def __init__(self, base, relpath, *flags):
+ ManifestEntry.__init__(self, base, *flags)
+ self.relpath = relpath
+
+ def __str__(self):
+ return self.serialize(self.relpath)
+
+ def rebase(self, base):
+ """
+ Return a new manifest entry with all relative paths defined in the
+ entry relative to a new base directory.
+ """
+ clone = ManifestEntry.rebase(self, base)
+ clone.relpath = mozpath.rebase(self.base, base, self.relpath)
+ return clone
+
+ @property
+ def path(self):
+ return mozpath.normpath(mozpath.join(self.base, self.relpath))
+
+
+class Manifest(ManifestEntryWithRelPath):
+ """
+ Class for 'manifest' entries.
+ manifest some/path/to/another.manifest
+ """
+
+ type = "manifest"
+
+
+class ManifestChrome(ManifestEntryWithRelPath):
+ """
+ Abstract class for chrome entries.
+ """
+
+ def __init__(self, base, name, relpath, *flags):
+ ManifestEntryWithRelPath.__init__(self, base, relpath, *flags)
+ self.name = name
+
+ @property
+ def location(self):
+ return mozpath.join(self.base, self.relpath)
+
+
+class ManifestContent(ManifestChrome):
+ """
+ Class for 'content' entries.
+ content global content/global/
+ """
+
+ type = "content"
+ allowed_flags = ManifestChrome.allowed_flags + [
+ "contentaccessible",
+ "platform",
+ ]
+
+ def __str__(self):
+ return self.serialize(self.name, self.relpath)
+
+
+class ManifestMultiContent(ManifestChrome):
+ """
+ Abstract class for chrome entries with multiple definitions.
+ Used for locale and skin entries.
+ """
+
+ type = None
+
+ def __init__(self, base, name, id, relpath, *flags):
+ ManifestChrome.__init__(self, base, name, relpath, *flags)
+ self.id = id
+
+ def __str__(self):
+ return self.serialize(self.name, self.id, self.relpath)
+
+
+class ManifestLocale(ManifestMultiContent):
+ """
+ Class for 'locale' entries.
+ locale global en-US content/en-US/
+ locale global fr content/fr/
+ """
+
+ localized = True
+ type = "locale"
+
+
+class ManifestSkin(ManifestMultiContent):
+ """
+ Class for 'skin' entries.
+ skin global classic/1.0 content/skin/classic/
+ """
+
+ type = "skin"
+
+
+class ManifestOverload(ManifestEntry):
+ """
+ Abstract class for chrome entries defining some kind of overloading.
+ Used for overlay, override or style entries.
+ """
+
+ type = None
+
+ def __init__(self, base, overloaded, overload, *flags):
+ ManifestEntry.__init__(self, base, *flags)
+ self.overloaded = overloaded
+ self.overload = overload
+
+ def __str__(self):
+ return self.serialize(self.overloaded, self.overload)
+
+
+class ManifestOverlay(ManifestOverload):
+ """
+ Class for 'overlay' entries.
+ overlay chrome://global/content/viewSource.xul \
+ chrome://browser/content/viewSourceOverlay.xul
+ """
+
+ type = "overlay"
+
+
+class ManifestStyle(ManifestOverload):
+ """
+ Class for 'style' entries.
+ style chrome://global/content/viewSource.xul \
+ chrome://browser/skin/
+ """
+
+ type = "style"
+
+
+class ManifestOverride(ManifestOverload):
+ """
+ Class for 'override' entries.
+ override chrome://global/locale/netError.dtd \
+ chrome://browser/locale/netError.dtd
+ """
+
+ type = "override"
+
+
+class ManifestResource(ManifestEntry):
+ """
+ Class for 'resource' entries.
+ resource gre-resources toolkit/res/
+ resource services-sync resource://gre/modules/services-sync/
+
+ The target may be a relative path or a resource or chrome url.
+ """
+
+ type = "resource"
+
+ def __init__(self, base, name, target, *flags):
+ ManifestEntry.__init__(self, base, *flags)
+ self.name = name
+ self.target = target
+
+ def __str__(self):
+ return self.serialize(self.name, self.target)
+
+ def rebase(self, base):
+ u = urlparse(self.target)
+ if u.scheme and u.scheme != "jar":
+ return ManifestEntry.rebase(self, base)
+ clone = ManifestEntry.rebase(self, base)
+ clone.target = mozpath.rebase(self.base, base, self.target)
+ return clone
+
+
+class ManifestBinaryComponent(ManifestEntryWithRelPath):
+ """
+ Class for 'binary-component' entries.
+ binary-component some/path/to/a/component.dll
+ """
+
+ type = "binary-component"
+
+
+class ManifestComponent(ManifestEntryWithRelPath):
+ """
+ Class for 'component' entries.
+ component {b2bba4df-057d-41ea-b6b1-94a10a8ede68} foo.js
+ """
+
+ type = "component"
+
+ def __init__(self, base, cid, file, *flags):
+ ManifestEntryWithRelPath.__init__(self, base, file, *flags)
+ self.cid = cid
+
+ def __str__(self):
+ return self.serialize(self.cid, self.relpath)
+
+
+class ManifestInterfaces(ManifestEntryWithRelPath):
+ """
+ Class for 'interfaces' entries.
+ interfaces foo.xpt
+ """
+
+ type = "interfaces"
+
+
+class ManifestCategory(ManifestEntry):
+ """
+ Class for 'category' entries.
+ category command-line-handler m-browser @mozilla.org/browser/clh;
+ """
+
+ type = "category"
+
+ def __init__(self, base, category, name, value, *flags):
+ ManifestEntry.__init__(self, base, *flags)
+ self.category = category
+ self.name = name
+ self.value = value
+
+ def __str__(self):
+ return self.serialize(self.category, self.name, self.value)
+
+
+class ManifestContract(ManifestEntry):
+ """
+ Class for 'contract' entries.
+ contract @mozilla.org/foo;1 {b2bba4df-057d-41ea-b6b1-94a10a8ede68}
+ """
+
+ type = "contract"
+
+ def __init__(self, base, contractID, cid, *flags):
+ ManifestEntry.__init__(self, base, *flags)
+ self.contractID = contractID
+ self.cid = cid
+
+ def __str__(self):
+ return self.serialize(self.contractID, self.cid)
+
+
+# All manifest classes by their type name.
+MANIFESTS_TYPES = dict(
+ [
+ (c.type, c)
+ for c in globals().values()
+ if type(c) == type
+ and issubclass(c, ManifestEntry)
+ and hasattr(c, "type")
+ and c.type
+ ]
+)
+
+MANIFEST_RE = re.compile(r"^#.*$")
+
+
+def parse_manifest_line(base, line):
+ """
+ Parse a line from a manifest file with the given base directory and
+ return the corresponding ManifestEntry instance.
+ """
+ # Remove comments
+ cmd = MANIFEST_RE.sub("", line).strip().split()
+ if not cmd:
+ return None
+ if not cmd[0] in MANIFESTS_TYPES:
+ return errors.fatal("Unknown manifest directive: %s" % cmd[0])
+ return MANIFESTS_TYPES[cmd[0]](base, *cmd[1:])
+
+
+def parse_manifest(root, path, fileobj=None):
+ """
+ Parse a manifest file.
+ """
+ base = mozpath.dirname(path)
+ if root:
+ path = os.path.normpath(os.path.abspath(os.path.join(root, path)))
+ if not fileobj:
+ fileobj = open(path)
+ linenum = 0
+ for line in fileobj:
+ line = six.ensure_text(line)
+ linenum += 1
+ with errors.context(path, linenum):
+ e = parse_manifest_line(base, line)
+ if e:
+ yield e
+
+
+def is_manifest(path):
+ """
+ Return whether the given path is that of a manifest file.
+ """
+ return (
+ path.endswith(".manifest")
+ and not path.endswith(".CRT.manifest")
+ and not path.endswith(".exe.manifest")
+ and os.path.basename(path) != "cose.manifest"
+ )
diff --git a/python/mozbuild/mozpack/copier.py b/python/mozbuild/mozpack/copier.py
new file mode 100644
index 0000000000..c042e5432f
--- /dev/null
+++ b/python/mozbuild/mozpack/copier.py
@@ -0,0 +1,605 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import concurrent.futures as futures
+import errno
+import os
+import stat
+import sys
+from collections import Counter, OrderedDict, defaultdict
+
+import six
+
+import mozpack.path as mozpath
+from mozpack.errors import errors
+from mozpack.files import BaseFile, Dest
+
+
+class FileRegistry(object):
+ """
+ Generic container to keep track of a set of BaseFile instances. It
+ preserves the order under which the files are added, but doesn't keep
+ track of empty directories (directories are not stored at all).
+ The paths associated with the BaseFile instances are relative to an
+ unspecified (virtual) root directory.
+
+ registry = FileRegistry()
+ registry.add('foo/bar', file_instance)
+ """
+
+ def __init__(self):
+ self._files = OrderedDict()
+ self._required_directories = Counter()
+ self._partial_paths_cache = {}
+
+ def _partial_paths(self, path):
+ """
+ Turn "foo/bar/baz/zot" into ["foo/bar/baz", "foo/bar", "foo"].
+ """
+ dir_name = path.rpartition("/")[0]
+ if not dir_name:
+ return []
+
+ partial_paths = self._partial_paths_cache.get(dir_name)
+ if partial_paths:
+ return partial_paths
+
+ partial_paths = [dir_name] + self._partial_paths(dir_name)
+
+ self._partial_paths_cache[dir_name] = partial_paths
+ return partial_paths
+
+ def add(self, path, content):
+ """
+ Add a BaseFile instance to the container, under the given path.
+ """
+ assert isinstance(content, BaseFile)
+ if path in self._files:
+ return errors.error("%s already added" % path)
+ if self._required_directories[path] > 0:
+ return errors.error("Can't add %s: it is a required directory" % path)
+ # Check whether any parent of the given path is already stored
+ partial_paths = self._partial_paths(path)
+ for partial_path in partial_paths:
+ if partial_path in self._files:
+ return errors.error("Can't add %s: %s is a file" % (path, partial_path))
+ self._files[path] = content
+ self._required_directories.update(partial_paths)
+
+ def match(self, pattern):
+ """
+ Return the list of paths, stored in the container, matching the
+ given pattern. See the mozpack.path.match documentation for a
+ description of the handled patterns.
+ """
+ if "*" in pattern:
+ return [p for p in self.paths() if mozpath.match(p, pattern)]
+ if pattern == "":
+ return self.paths()
+ if pattern in self._files:
+ return [pattern]
+ return [p for p in self.paths() if mozpath.basedir(p, [pattern]) == pattern]
+
+ def remove(self, pattern):
+ """
+ Remove paths matching the given pattern from the container. See the
+ mozpack.path.match documentation for a description of the handled
+ patterns.
+ """
+ items = self.match(pattern)
+ if not items:
+ return errors.error(
+ "Can't remove %s: %s"
+ % (pattern, "not matching anything previously added")
+ )
+ for i in items:
+ del self._files[i]
+ self._required_directories.subtract(self._partial_paths(i))
+
+ def paths(self):
+ """
+ Return all paths stored in the container, in the order they were added.
+ """
+ return list(self._files)
+
+ def __len__(self):
+ """
+ Return number of paths stored in the container.
+ """
+ return len(self._files)
+
+ def __contains__(self, pattern):
+ raise RuntimeError(
+ "'in' operator forbidden for %s. Use contains()." % self.__class__.__name__
+ )
+
+ def contains(self, pattern):
+ """
+ Return whether the container contains paths matching the given
+ pattern. See the mozpack.path.match documentation for a description of
+ the handled patterns.
+ """
+ return len(self.match(pattern)) > 0
+
+ def __getitem__(self, path):
+ """
+ Return the BaseFile instance stored in the container for the given
+ path.
+ """
+ return self._files[path]
+
+ def __iter__(self):
+ """
+ Iterate over all (path, BaseFile instance) pairs from the container.
+ for path, file in registry:
+ (...)
+ """
+ return six.iteritems(self._files)
+
+ def required_directories(self):
+ """
+ Return the set of directories required by the paths in the container,
+ in no particular order. The returned directories are relative to an
+ unspecified (virtual) root directory (and do not include said root
+ directory).
+ """
+ return set(k for k, v in self._required_directories.items() if v > 0)
+
+ def output_to_inputs_tree(self):
+ """
+ Return a dictionary mapping each output path to the set of its
+ required input paths.
+
+ All paths are normalized.
+ """
+ tree = {}
+ for output, file in self:
+ output = mozpath.normpath(output)
+ tree[output] = set(mozpath.normpath(f) for f in file.inputs())
+ return tree
+
+ def input_to_outputs_tree(self):
+ """
+ Return a dictionary mapping each input path to the set of
+ impacted output paths.
+
+ All paths are normalized.
+ """
+ tree = defaultdict(set)
+ for output, file in self:
+ output = mozpath.normpath(output)
+ for input in file.inputs():
+ input = mozpath.normpath(input)
+ tree[input].add(output)
+ return dict(tree)
+
+
+class FileRegistrySubtree(object):
+ """A proxy class to give access to a subtree of an existing FileRegistry.
+
+ Note this doesn't implement the whole FileRegistry interface."""
+
+ def __new__(cls, base, registry):
+ if not base:
+ return registry
+ return object.__new__(cls)
+
+ def __init__(self, base, registry):
+ self._base = base
+ self._registry = registry
+
+ def _get_path(self, path):
+ # mozpath.join will return a trailing slash if path is empty, and we
+ # don't want that.
+ return mozpath.join(self._base, path) if path else self._base
+
+ def add(self, path, content):
+ return self._registry.add(self._get_path(path), content)
+
+ def match(self, pattern):
+ return [
+ mozpath.relpath(p, self._base)
+ for p in self._registry.match(self._get_path(pattern))
+ ]
+
+ def remove(self, pattern):
+ return self._registry.remove(self._get_path(pattern))
+
+ def paths(self):
+ return [p for p, f in self]
+
+ def __len__(self):
+ return len(self.paths())
+
+ def contains(self, pattern):
+ return self._registry.contains(self._get_path(pattern))
+
+ def __getitem__(self, path):
+ return self._registry[self._get_path(path)]
+
+ def __iter__(self):
+ for p, f in self._registry:
+ if mozpath.basedir(p, [self._base]):
+ yield mozpath.relpath(p, self._base), f
+
+
+class FileCopyResult(object):
+ """Represents results of a FileCopier.copy operation."""
+
+ def __init__(self):
+ self.updated_files = set()
+ self.existing_files = set()
+ self.removed_files = set()
+ self.removed_directories = set()
+
+ @property
+ def updated_files_count(self):
+ return len(self.updated_files)
+
+ @property
+ def existing_files_count(self):
+ return len(self.existing_files)
+
+ @property
+ def removed_files_count(self):
+ return len(self.removed_files)
+
+ @property
+ def removed_directories_count(self):
+ return len(self.removed_directories)
+
+
+class FileCopier(FileRegistry):
+ """
+ FileRegistry with the ability to copy the registered files to a separate
+ directory.
+ """
+
+ def copy(
+ self,
+ destination,
+ skip_if_older=True,
+ remove_unaccounted=True,
+ remove_all_directory_symlinks=True,
+ remove_empty_directories=True,
+ ):
+ """
+ Copy all registered files to the given destination path. The given
+ destination can be an existing directory, or not exist at all. It
+ can't be e.g. a file.
+ The copy process acts a bit like rsync: files are not copied when they
+ don't need to (see mozpack.files for details on file.copy).
+
+ By default, files in the destination directory that aren't
+ registered are removed and empty directories are deleted. In
+ addition, all directory symlinks in the destination directory
+ are deleted: this is a conservative approach to ensure that we
+ never accidently write files into a directory that is not the
+ destination directory. In the worst case, we might have a
+ directory symlink in the object directory to the source
+ directory.
+
+ To disable removing of unregistered files, pass
+ remove_unaccounted=False. To disable removing empty
+ directories, pass remove_empty_directories=False. In rare
+ cases, you might want to maintain directory symlinks in the
+ destination directory (at least those that are not required to
+ be regular directories): pass
+ remove_all_directory_symlinks=False. Exercise caution with
+ this flag: you almost certainly do not want to preserve
+ directory symlinks.
+
+ Returns a FileCopyResult that details what changed.
+ """
+ assert isinstance(destination, six.string_types)
+ assert not os.path.exists(destination) or os.path.isdir(destination)
+
+ result = FileCopyResult()
+ have_symlinks = hasattr(os, "symlink")
+ destination = os.path.normpath(destination)
+
+ # We create the destination directory specially. We can't do this as
+ # part of the loop doing mkdir() below because that loop munges
+ # symlinks and permissions and parent directories of the destination
+ # directory may have their own weird schema. The contract is we only
+ # manage children of destination, not its parents.
+ try:
+ os.makedirs(destination)
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+
+ # Because we could be handling thousands of files, code in this
+ # function is optimized to minimize system calls. We prefer CPU time
+ # in Python over possibly I/O bound filesystem calls to stat() and
+ # friends.
+
+ required_dirs = set([destination])
+ required_dirs |= set(
+ os.path.normpath(os.path.join(destination, d))
+ for d in self.required_directories()
+ )
+
+ # Ensure destination directories are in place and proper.
+ #
+ # The "proper" bit is important. We need to ensure that directories
+ # have appropriate permissions or we will be unable to discover
+ # and write files. Furthermore, we need to verify directories aren't
+ # symlinks.
+ #
+ # Symlinked directories (a symlink whose target is a directory) are
+ # incompatible with us because our manifest talks in terms of files,
+ # not directories. If we leave symlinked directories unchecked, we
+ # would blindly follow symlinks and this might confuse file
+ # installation. For example, if an existing directory is a symlink
+ # to directory X and we attempt to install a symlink in this directory
+ # to a file in directory X, we may create a recursive symlink!
+ for d in sorted(required_dirs, key=len):
+ try:
+ os.mkdir(d)
+ except OSError as error:
+ if error.errno != errno.EEXIST:
+ raise
+
+ # We allow the destination to be a symlink because the caller
+ # is responsible for managing the destination and we assume
+ # they know what they are doing.
+ if have_symlinks and d != destination:
+ st = os.lstat(d)
+ if stat.S_ISLNK(st.st_mode):
+ # While we have remove_unaccounted, it doesn't apply
+ # to directory symlinks because if it did, our behavior
+ # could be very wrong.
+ os.remove(d)
+ os.mkdir(d)
+
+ if not os.access(d, os.W_OK):
+ umask = os.umask(0o077)
+ os.umask(umask)
+ os.chmod(d, 0o777 & ~umask)
+
+ if isinstance(remove_unaccounted, FileRegistry):
+ existing_files = set(
+ os.path.normpath(os.path.join(destination, p))
+ for p in remove_unaccounted.paths()
+ )
+ existing_dirs = set(
+ os.path.normpath(os.path.join(destination, p))
+ for p in remove_unaccounted.required_directories()
+ )
+ existing_dirs |= {os.path.normpath(destination)}
+ else:
+ # While we have remove_unaccounted, it doesn't apply to empty
+ # directories because it wouldn't make sense: an empty directory
+ # is empty, so removing it should have no effect.
+ existing_dirs = set()
+ existing_files = set()
+ for root, dirs, files in os.walk(destination):
+ # We need to perform the same symlink detection as above.
+ # os.walk() doesn't follow symlinks into directories by
+ # default, so we need to check dirs (we can't wait for root).
+ if have_symlinks:
+ filtered = []
+ for d in dirs:
+ full = os.path.join(root, d)
+ st = os.lstat(full)
+ if stat.S_ISLNK(st.st_mode):
+ # This directory symlink is not a required
+ # directory: any such symlink would have been
+ # removed and a directory created above.
+ if remove_all_directory_symlinks:
+ os.remove(full)
+ result.removed_files.add(os.path.normpath(full))
+ else:
+ existing_files.add(os.path.normpath(full))
+ else:
+ filtered.append(d)
+
+ dirs[:] = filtered
+
+ existing_dirs.add(os.path.normpath(root))
+
+ for d in dirs:
+ existing_dirs.add(os.path.normpath(os.path.join(root, d)))
+
+ for f in files:
+ existing_files.add(os.path.normpath(os.path.join(root, f)))
+
+ # Now we reconcile the state of the world against what we want.
+ dest_files = set()
+
+ # Install files.
+ #
+ # Creating/appending new files on Windows/NTFS is slow. So we use a
+ # thread pool to speed it up significantly. The performance of this
+ # loop is so critical to common build operations on Linux that the
+ # overhead of the thread pool is worth avoiding, so we have 2 code
+ # paths. We also employ a low water mark to prevent thread pool
+ # creation if number of files is too small to benefit.
+ copy_results = []
+ if sys.platform == "win32" and len(self) > 100:
+ with futures.ThreadPoolExecutor(4) as e:
+ fs = []
+ for p, f in self:
+ destfile = os.path.normpath(os.path.join(destination, p))
+ fs.append((destfile, e.submit(f.copy, destfile, skip_if_older)))
+
+ copy_results = [(path, f.result) for path, f in fs]
+ else:
+ for p, f in self:
+ destfile = os.path.normpath(os.path.join(destination, p))
+ copy_results.append((destfile, f.copy(destfile, skip_if_older)))
+
+ for destfile, copy_result in copy_results:
+ dest_files.add(destfile)
+ if copy_result:
+ result.updated_files.add(destfile)
+ else:
+ result.existing_files.add(destfile)
+
+ # Remove files no longer accounted for.
+ if remove_unaccounted:
+ for f in existing_files - dest_files:
+ # Windows requires write access to remove files.
+ if os.name == "nt" and not os.access(f, os.W_OK):
+ # It doesn't matter what we set permissions to since we
+ # will remove this file shortly.
+ os.chmod(f, 0o600)
+
+ os.remove(f)
+ result.removed_files.add(f)
+
+ if not remove_empty_directories:
+ return result
+
+ # Figure out which directories can be removed. This is complicated
+ # by the fact we optionally remove existing files. This would be easy
+ # if we walked the directory tree after installing files. But, we're
+ # trying to minimize system calls.
+
+ # Start with the ideal set.
+ remove_dirs = existing_dirs - required_dirs
+
+ # Then don't remove directories if we didn't remove unaccounted files
+ # and one of those files exists.
+ if not remove_unaccounted:
+ parents = set()
+ pathsep = os.path.sep
+ for f in existing_files:
+ path = f
+ while True:
+ # All the paths are normalized and relative by this point,
+ # so os.path.dirname would only do extra work.
+ dirname = path.rpartition(pathsep)[0]
+ if dirname in parents:
+ break
+ parents.add(dirname)
+ path = dirname
+ remove_dirs -= parents
+
+ # Remove empty directories that aren't required.
+ for d in sorted(remove_dirs, key=len, reverse=True):
+ try:
+ try:
+ os.rmdir(d)
+ except OSError as e:
+ if e.errno in (errno.EPERM, errno.EACCES):
+ # Permissions may not allow deletion. So ensure write
+ # access is in place before attempting to rmdir again.
+ os.chmod(d, 0o700)
+ os.rmdir(d)
+ else:
+ raise
+ except OSError as e:
+ # If remove_unaccounted is a # FileRegistry, then we have a
+ # list of directories that may not be empty, so ignore rmdir
+ # ENOTEMPTY errors for them.
+ if (
+ isinstance(remove_unaccounted, FileRegistry)
+ and e.errno == errno.ENOTEMPTY
+ ):
+ continue
+ raise
+ result.removed_directories.add(d)
+
+ return result
+
+
+class Jarrer(FileRegistry, BaseFile):
+ """
+ FileRegistry with the ability to copy and pack the registered files as a
+ jar file. Also acts as a BaseFile instance, to be copied with a FileCopier.
+ """
+
+ def __init__(self, compress=True):
+ """
+ Create a Jarrer instance. See mozpack.mozjar.JarWriter documentation
+ for details on the compress argument.
+ """
+ self.compress = compress
+ self._preload = []
+ self._compress_options = {} # Map path to compress boolean option.
+ FileRegistry.__init__(self)
+
+ def add(self, path, content, compress=None):
+ FileRegistry.add(self, path, content)
+ if compress is not None:
+ self._compress_options[path] = compress
+
+ def copy(self, dest, skip_if_older=True):
+ """
+ Pack all registered files in the given destination jar. The given
+ destination jar may be a path to jar file, or a Dest instance for
+ a jar file.
+ If the destination jar file exists, its (compressed) contents are used
+ instead of the registered BaseFile instances when appropriate.
+ """
+
+ class DeflaterDest(Dest):
+ """
+ Dest-like class, reading from a file-like object initially, but
+ switching to a Deflater object if written to.
+
+ dest = DeflaterDest(original_file)
+ dest.read() # Reads original_file
+ dest.write(data) # Creates a Deflater and write data there
+ dest.read() # Re-opens the Deflater and reads from it
+ """
+
+ def __init__(self, orig=None, compress=True):
+ self.mode = None
+ self.deflater = orig
+ self.compress = compress
+
+ def read(self, length=-1):
+ if self.mode != "r":
+ assert self.mode is None
+ self.mode = "r"
+ return self.deflater.read(length)
+
+ def write(self, data):
+ if self.mode != "w":
+ from mozpack.mozjar import Deflater
+
+ self.deflater = Deflater(self.compress)
+ self.mode = "w"
+ self.deflater.write(data)
+
+ def exists(self):
+ return self.deflater is not None
+
+ if isinstance(dest, six.string_types):
+ dest = Dest(dest)
+ assert isinstance(dest, Dest)
+
+ from mozpack.mozjar import JarReader, JarWriter
+
+ try:
+ old_jar = JarReader(fileobj=dest)
+ except Exception:
+ old_jar = []
+
+ old_contents = dict([(f.filename, f) for f in old_jar])
+
+ with JarWriter(fileobj=dest, compress=self.compress) as jar:
+ for path, file in self:
+ compress = self._compress_options.get(path, self.compress)
+ if path in old_contents:
+ deflater = DeflaterDest(old_contents[path], compress)
+ else:
+ deflater = DeflaterDest(compress=compress)
+ file.copy(deflater, skip_if_older)
+ jar.add(path, deflater.deflater, mode=file.mode, compress=compress)
+ if self._preload:
+ jar.preload(self._preload)
+
+ def open(self):
+ raise RuntimeError("unsupported")
+
+ def preload(self, paths):
+ """
+ Add the given set of paths to the list of preloaded files. See
+ mozpack.mozjar.JarWriter documentation for details on jar preloading.
+ """
+ self._preload.extend(paths)
diff --git a/python/mozbuild/mozpack/dmg.py b/python/mozbuild/mozpack/dmg.py
new file mode 100644
index 0000000000..334f3a69cc
--- /dev/null
+++ b/python/mozbuild/mozpack/dmg.py
@@ -0,0 +1,230 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import platform
+import shutil
+import subprocess
+from pathlib import Path
+from typing import List
+
+import mozfile
+
+from mozbuild.util import ensureParentDir
+
+is_linux = platform.system() == "Linux"
+is_osx = platform.system() == "Darwin"
+
+
+def chmod(dir):
+ "Set permissions of DMG contents correctly"
+ subprocess.check_call(["chmod", "-R", "a+rX,a-st,u+w,go-w", dir])
+
+
+def rsync(source: Path, dest: Path):
+ "rsync the contents of directory source into directory dest"
+ # Ensure a trailing slash on directories so rsync copies the *contents* of source.
+ raw_source = str(source)
+ if source.is_dir():
+ raw_source = str(source) + "/"
+ subprocess.check_call(["rsync", "-a", "--copy-unsafe-links", raw_source, dest])
+
+
+def set_folder_icon(dir: Path, tmpdir: Path, hfs_tool: Path = None):
+ "Set HFS attributes of dir to use a custom icon"
+ if is_linux:
+ hfs = tmpdir / "staged.hfs"
+ subprocess.check_call([hfs_tool, hfs, "attr", "/", "C"])
+ elif is_osx:
+ subprocess.check_call(["SetFile", "-a", "C", dir])
+
+
+def generate_hfs_file(
+ stagedir: Path, tmpdir: Path, volume_name: str, mkfshfs_tool: Path
+):
+ """
+ When cross compiling, we zero fill an hfs file, that we will turn into
+ a DMG. To do so we test the size of the staged dir, and add some slight
+ padding to that.
+ """
+ hfs = tmpdir / "staged.hfs"
+ output = subprocess.check_output(["du", "-s", stagedir])
+ size = int(output.split()[0]) / 1000 # Get in MB
+ size = int(size * 1.02) # Bump the used size slightly larger.
+ # Setup a proper file sized out with zero's
+ subprocess.check_call(
+ [
+ "dd",
+ "if=/dev/zero",
+ "of={}".format(hfs),
+ "bs=1M",
+ "count={}".format(size),
+ ]
+ )
+ subprocess.check_call([mkfshfs_tool, "-v", volume_name, hfs])
+
+
+def create_app_symlink(stagedir: Path, tmpdir: Path, hfs_tool: Path = None):
+ """
+ Make a symlink to /Applications. The symlink name is a space
+ so we don't have to localize it. The Applications folder icon
+ will be shown in Finder, which should be clear enough for users.
+ """
+ if is_linux:
+ hfs = os.path.join(tmpdir, "staged.hfs")
+ subprocess.check_call([hfs_tool, hfs, "symlink", "/ ", "/Applications"])
+ elif is_osx:
+ os.symlink("/Applications", stagedir / " ")
+
+
+def create_dmg_from_staged(
+ stagedir: Path,
+ output_dmg: Path,
+ tmpdir: Path,
+ volume_name: str,
+ hfs_tool: Path = None,
+ dmg_tool: Path = None,
+):
+ "Given a prepared directory stagedir, produce a DMG at output_dmg."
+ if is_linux:
+ # The dmg tool doesn't create the destination directories, and silently
+ # returns success if the parent directory doesn't exist.
+ ensureParentDir(output_dmg)
+
+ hfs = os.path.join(tmpdir, "staged.hfs")
+ subprocess.check_call([hfs_tool, hfs, "addall", stagedir])
+ subprocess.check_call(
+ [dmg_tool, "build", hfs, output_dmg],
+ # dmg is seriously chatty
+ stdout=subprocess.DEVNULL,
+ )
+ elif is_osx:
+ hybrid = tmpdir / "hybrid.dmg"
+ subprocess.check_call(
+ [
+ "hdiutil",
+ "makehybrid",
+ "-hfs",
+ "-hfs-volume-name",
+ volume_name,
+ "-hfs-openfolder",
+ stagedir,
+ "-ov",
+ stagedir,
+ "-o",
+ hybrid,
+ ]
+ )
+ subprocess.check_call(
+ [
+ "hdiutil",
+ "convert",
+ "-format",
+ "UDBZ",
+ "-imagekey",
+ "bzip2-level=9",
+ "-ov",
+ hybrid,
+ "-o",
+ output_dmg,
+ ]
+ )
+
+
+def create_dmg(
+ source_directory: Path,
+ output_dmg: Path,
+ volume_name: str,
+ extra_files: List[tuple],
+ dmg_tool: Path,
+ hfs_tool: Path,
+ mkfshfs_tool: Path,
+):
+ """
+ Create a DMG disk image at the path output_dmg from source_directory.
+
+ Use volume_name as the disk image volume name, and
+ use extra_files as a list of tuples of (filename, relative path) to copy
+ into the disk image.
+ """
+ if platform.system() not in ("Darwin", "Linux"):
+ raise Exception("Don't know how to build a DMG on '%s'" % platform.system())
+
+ with mozfile.TemporaryDirectory() as tmp:
+ tmpdir = Path(tmp)
+ stagedir = tmpdir / "stage"
+ stagedir.mkdir()
+
+ # Copy the app bundle over using rsync
+ rsync(source_directory, stagedir)
+ # Copy extra files
+ for source, target in extra_files:
+ full_target = stagedir / target
+ full_target.parent.mkdir(parents=True, exist_ok=True)
+ shutil.copyfile(source, full_target)
+ if is_linux:
+ # Not needed in osx
+ generate_hfs_file(stagedir, tmpdir, volume_name, mkfshfs_tool)
+ create_app_symlink(stagedir, tmpdir, hfs_tool)
+ # Set the folder attributes to use a custom icon
+ set_folder_icon(stagedir, tmpdir, hfs_tool)
+ chmod(stagedir)
+ create_dmg_from_staged(
+ stagedir, output_dmg, tmpdir, volume_name, hfs_tool, dmg_tool
+ )
+
+
+def extract_dmg_contents(
+ dmgfile: Path,
+ destdir: Path,
+ dmg_tool: Path = None,
+ hfs_tool: Path = None,
+):
+ if is_linux:
+ with mozfile.TemporaryDirectory() as tmpdir:
+ hfs_file = os.path.join(tmpdir, "firefox.hfs")
+ subprocess.check_call(
+ [dmg_tool, "extract", dmgfile, hfs_file],
+ # dmg is seriously chatty
+ stdout=subprocess.DEVNULL,
+ )
+ subprocess.check_call([hfs_tool, hfs_file, "extractall", "/", destdir])
+ else:
+ # TODO: find better way to resolve topsrcdir (checkout directory)
+ topsrcdir = Path(__file__).parent.parent.parent.parent.resolve()
+ unpack_diskimage = topsrcdir / "build/package/mac_osx/unpack-diskimage"
+ unpack_mountpoint = Path("/tmp/app-unpack")
+ subprocess.check_call([unpack_diskimage, dmgfile, unpack_mountpoint, destdir])
+
+
+def extract_dmg(
+ dmgfile: Path,
+ output: Path,
+ dmg_tool: Path = None,
+ hfs_tool: Path = None,
+ dsstore: Path = None,
+ icon: Path = None,
+ background: Path = None,
+):
+ if platform.system() not in ("Darwin", "Linux"):
+ raise Exception("Don't know how to extract a DMG on '%s'" % platform.system())
+
+ with mozfile.TemporaryDirectory() as tmp:
+ tmpdir = Path(tmp)
+ extract_dmg_contents(dmgfile, tmpdir, dmg_tool, hfs_tool)
+ applications_symlink = tmpdir / " "
+ if applications_symlink.is_symlink():
+ # Rsync will fail on the presence of this symlink
+ applications_symlink.unlink()
+ rsync(tmpdir, output)
+
+ if dsstore:
+ dsstore.parent.mkdir(parents=True, exist_ok=True)
+ rsync(tmpdir / ".DS_Store", dsstore)
+ if background:
+ background.parent.mkdir(parents=True, exist_ok=True)
+ rsync(tmpdir / ".background" / background.name, background)
+ if icon:
+ icon.parent.mkdir(parents=True, exist_ok=True)
+ rsync(tmpdir / ".VolumeIcon.icns", icon)
diff --git a/python/mozbuild/mozpack/errors.py b/python/mozbuild/mozpack/errors.py
new file mode 100644
index 0000000000..25c0e8549c
--- /dev/null
+++ b/python/mozbuild/mozpack/errors.py
@@ -0,0 +1,151 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import sys
+from contextlib import contextmanager
+
+
+class ErrorMessage(Exception):
+ """Exception type raised from errors.error() and errors.fatal()"""
+
+
+class AccumulatedErrors(Exception):
+ """Exception type raised from errors.accumulate()"""
+
+
+class ErrorCollector(object):
+ """
+ Error handling/logging class. A global instance, errors, is provided for
+ convenience.
+
+ Warnings, errors and fatal errors may be logged by calls to the following
+ functions:
+ - errors.warn(message)
+ - errors.error(message)
+ - errors.fatal(message)
+
+ Warnings only send the message on the logging output, while errors and
+ fatal errors send the message and throw an ErrorMessage exception. The
+ exception, however, may be deferred. See further below.
+
+ Errors may be ignored by calling:
+ - errors.ignore_errors()
+
+ After calling that function, only fatal errors throw an exception.
+
+ The warnings, errors or fatal errors messages may be augmented with context
+ information when a context is provided. Context is defined by a pair
+ (filename, linenumber), and may be set with errors.context() used as a
+
+ context manager:
+
+ .. code-block:: python
+
+ with errors.context(filename, linenumber):
+ errors.warn(message)
+
+ Arbitrary nesting is supported, both for errors.context calls:
+
+ .. code-block:: python
+
+ with errors.context(filename1, linenumber1):
+ errors.warn(message)
+ with errors.context(filename2, linenumber2):
+ errors.warn(message)
+
+ as well as for function calls:
+
+ .. code-block:: python
+
+ def func():
+ errors.warn(message)
+ with errors.context(filename, linenumber):
+ func()
+
+ Errors and fatal errors can have their exception thrown at a later time,
+ allowing for several different errors to be reported at once before
+ throwing. This is achieved with errors.accumulate() as a context manager:
+
+ .. code-block:: python
+
+ with errors.accumulate():
+ if test1:
+ errors.error(message1)
+ if test2:
+ errors.error(message2)
+
+ In such cases, a single AccumulatedErrors exception is thrown, but doesn't
+ contain information about the exceptions. The logged messages do.
+ """
+
+ out = sys.stderr
+ WARN = 1
+ ERROR = 2
+ FATAL = 3
+ _level = ERROR
+ _context = []
+ _count = None
+
+ def ignore_errors(self, ignore=True):
+ if ignore:
+ self._level = self.FATAL
+ else:
+ self._level = self.ERROR
+
+ def _full_message(self, level, msg):
+ if level >= self._level:
+ level = "error"
+ else:
+ level = "warning"
+ if self._context:
+ file, line = self._context[-1]
+ return "%s: %s:%d: %s" % (level, file, line, msg)
+ return "%s: %s" % (level, msg)
+
+ def _handle(self, level, msg):
+ msg = self._full_message(level, msg)
+ if level >= self._level:
+ if self._count is None:
+ raise ErrorMessage(msg)
+ self._count += 1
+ print(msg, file=self.out)
+
+ def fatal(self, msg):
+ self._handle(self.FATAL, msg)
+
+ def error(self, msg):
+ self._handle(self.ERROR, msg)
+
+ def warn(self, msg):
+ self._handle(self.WARN, msg)
+
+ def get_context(self):
+ if self._context:
+ return self._context[-1]
+
+ @contextmanager
+ def context(self, file, line):
+ if file and line:
+ self._context.append((file, line))
+ yield
+ if file and line:
+ self._context.pop()
+
+ @contextmanager
+ def accumulate(self):
+ assert self._count is None
+ self._count = 0
+ yield
+ count = self._count
+ self._count = None
+ if count:
+ raise AccumulatedErrors()
+
+ @property
+ def count(self):
+ # _count can be None.
+ return self._count if self._count else 0
+
+
+errors = ErrorCollector()
diff --git a/python/mozbuild/mozpack/executables.py b/python/mozbuild/mozpack/executables.py
new file mode 100644
index 0000000000..dd6849cabe
--- /dev/null
+++ b/python/mozbuild/mozpack/executables.py
@@ -0,0 +1,140 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import struct
+import subprocess
+from io import BytesIO
+
+from mozpack.errors import errors
+
+MACHO_SIGNATURES = [
+ 0xFEEDFACE, # mach-o 32-bits big endian
+ 0xCEFAEDFE, # mach-o 32-bits little endian
+ 0xFEEDFACF, # mach-o 64-bits big endian
+ 0xCFFAEDFE, # mach-o 64-bits little endian
+]
+
+FAT_SIGNATURE = 0xCAFEBABE # mach-o FAT binary
+
+ELF_SIGNATURE = 0x7F454C46 # Elf binary
+
+UNKNOWN = 0
+MACHO = 1
+ELF = 2
+
+
+def get_type(path_or_fileobj):
+ """
+ Check the signature of the give file and returns what kind of executable
+ matches.
+ """
+ if hasattr(path_or_fileobj, "peek"):
+ f = BytesIO(path_or_fileobj.peek(8))
+ elif hasattr(path_or_fileobj, "read"):
+ f = path_or_fileobj
+ else:
+ f = open(path_or_fileobj, "rb")
+ signature = f.read(4)
+ if len(signature) < 4:
+ return UNKNOWN
+ signature = struct.unpack(">L", signature)[0]
+ if signature == ELF_SIGNATURE:
+ return ELF
+ if signature in MACHO_SIGNATURES:
+ return MACHO
+ if signature != FAT_SIGNATURE:
+ return UNKNOWN
+ # We have to sanity check the second four bytes, because Java class
+ # files use the same magic number as Mach-O fat binaries.
+ # This logic is adapted from file(1), which says that Mach-O uses
+ # these bytes to count the number of architectures within, while
+ # Java uses it for a version number. Conveniently, there are only
+ # 18 labelled Mach-O architectures, and Java's first released
+ # class format used the version 43.0.
+ num = f.read(4)
+ if len(num) < 4:
+ return UNKNOWN
+ num = struct.unpack(">L", num)[0]
+ if num < 20:
+ return MACHO
+ return UNKNOWN
+
+
+def is_executable(path):
+ """
+ Return whether a given file path points to an executable or a library,
+ where an executable or library is identified by:
+ - the file extension on OS/2 and WINNT
+ - the file signature on OS/X and ELF systems (GNU/Linux, Android, BSD, Solaris)
+
+ As this function is intended for use to choose between the ExecutableFile
+ and File classes in FileFinder, and choosing ExecutableFile only matters
+ on OS/2, OS/X, ELF and WINNT (in GCC build) systems, we don't bother
+ detecting other kind of executables.
+ """
+ from buildconfig import substs
+
+ if not os.path.exists(path):
+ return False
+
+ if substs["OS_ARCH"] == "WINNT":
+ return path.lower().endswith((substs["DLL_SUFFIX"], substs["BIN_SUFFIX"]))
+
+ return get_type(path) != UNKNOWN
+
+
+def may_strip(path):
+ """
+ Return whether strip() should be called
+ """
+ from buildconfig import substs
+
+ # Bug 1658632: clang-11-based strip complains about d3dcompiler_47.dll.
+ # It's not clear why this happens, but as a quick fix just avoid stripping
+ # this DLL. It's not from our build anyway.
+ if "d3dcompiler" in path:
+ return False
+ return bool(substs.get("PKG_STRIP"))
+
+
+def strip(path):
+ """
+ Execute the STRIP command with STRIP_FLAGS on the given path.
+ """
+ from buildconfig import substs
+
+ strip = substs["STRIP"]
+ flags = substs.get("STRIP_FLAGS", [])
+ cmd = [strip] + flags + [path]
+ if subprocess.call(cmd) != 0:
+ errors.fatal("Error executing " + " ".join(cmd))
+
+
+def may_elfhack(path):
+ """
+ Return whether elfhack() should be called
+ """
+ # elfhack only supports libraries. We should check the ELF header for
+ # the right flag, but checking the file extension works too.
+ from buildconfig import substs
+
+ return (
+ "USE_ELF_HACK" in substs
+ and substs["USE_ELF_HACK"]
+ and path.endswith(substs["DLL_SUFFIX"])
+ and "COMPILE_ENVIRONMENT" in substs
+ and substs["COMPILE_ENVIRONMENT"]
+ )
+
+
+def elfhack(path):
+ """
+ Execute the elfhack command on the given path.
+ """
+ from buildconfig import topobjdir
+
+ cmd = [os.path.join(topobjdir, "build/unix/elfhack/elfhack"), path]
+ if subprocess.call(cmd) != 0:
+ errors.fatal("Error executing " + " ".join(cmd))
diff --git a/python/mozbuild/mozpack/files.py b/python/mozbuild/mozpack/files.py
new file mode 100644
index 0000000000..691c248b02
--- /dev/null
+++ b/python/mozbuild/mozpack/files.py
@@ -0,0 +1,1271 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import bisect
+import codecs
+import errno
+import inspect
+import os
+import platform
+import shutil
+import stat
+import subprocess
+import uuid
+from collections import OrderedDict
+from io import BytesIO
+from itertools import chain, takewhile
+from tarfile import TarFile, TarInfo
+from tempfile import NamedTemporaryFile, mkstemp
+
+import six
+from jsmin import JavascriptMinify
+
+import mozbuild.makeutil as makeutil
+import mozpack.path as mozpath
+from mozbuild.preprocessor import Preprocessor
+from mozbuild.util import FileAvoidWrite, ensure_unicode, memoize
+from mozpack.chrome.manifest import ManifestEntry, ManifestInterfaces
+from mozpack.errors import ErrorMessage, errors
+from mozpack.executables import elfhack, is_executable, may_elfhack, may_strip, strip
+from mozpack.mozjar import JarReader
+
+try:
+ import hglib
+except ImportError:
+ hglib = None
+
+
+# For clean builds, copying files on win32 using CopyFile through ctypes is
+# ~2x as fast as using shutil.copyfile.
+if platform.system() != "Windows":
+ _copyfile = shutil.copyfile
+else:
+ import ctypes
+
+ _kernel32 = ctypes.windll.kernel32
+ _CopyFileA = _kernel32.CopyFileA
+ _CopyFileW = _kernel32.CopyFileW
+
+ def _copyfile(src, dest):
+ # False indicates `dest` should be overwritten if it exists already.
+ if isinstance(src, six.text_type) and isinstance(dest, six.text_type):
+ _CopyFileW(src, dest, False)
+ elif isinstance(src, str) and isinstance(dest, str):
+ _CopyFileA(src, dest, False)
+ else:
+ raise TypeError("mismatched path types!")
+
+
+# Helper function; ensures we always open files with the correct encoding when
+# opening them in text mode.
+def _open(path, mode="r"):
+ if six.PY3 and "b" not in mode:
+ return open(path, mode, encoding="utf-8")
+ return open(path, mode)
+
+
+class Dest(object):
+ """
+ Helper interface for BaseFile.copy. The interface works as follows:
+ - read() and write() can be used to sequentially read/write from the underlying file.
+ - a call to read() after a write() will re-open the underlying file and read from it.
+ - a call to write() after a read() will re-open the underlying file, emptying it, and write to it.
+ """
+
+ def __init__(self, path):
+ self.file = None
+ self.mode = None
+ self.path = ensure_unicode(path)
+
+ @property
+ def name(self):
+ return self.path
+
+ def read(self, length=-1):
+ if self.mode != "r":
+ self.file = _open(self.path, mode="rb")
+ self.mode = "r"
+ return self.file.read(length)
+
+ def write(self, data):
+ if self.mode != "w":
+ self.file = _open(self.path, mode="wb")
+ self.mode = "w"
+ to_write = six.ensure_binary(data)
+ return self.file.write(to_write)
+
+ def exists(self):
+ return os.path.exists(self.path)
+
+ def close(self):
+ if self.mode:
+ self.mode = None
+ self.file.close()
+ self.file = None
+
+
+class BaseFile(object):
+ """
+ Base interface and helper for file copying. Derived class may implement
+ their own copy function, or rely on BaseFile.copy using the open() member
+ function and/or the path property.
+ """
+
+ @staticmethod
+ def is_older(first, second):
+ """
+ Compares the modification time of two files, and returns whether the
+ ``first`` file is older than the ``second`` file.
+ """
+ # os.path.getmtime returns a result in seconds with precision up to
+ # the microsecond. But microsecond is too precise because
+ # shutil.copystat only copies milliseconds, and seconds is not
+ # enough precision.
+ return int(os.path.getmtime(first) * 1000) <= int(
+ os.path.getmtime(second) * 1000
+ )
+
+ @staticmethod
+ def any_newer(dest, inputs):
+ """
+ Compares the modification time of ``dest`` to multiple input files, and
+ returns whether any of the ``inputs`` is newer (has a later mtime) than
+ ``dest``.
+ """
+ # os.path.getmtime returns a result in seconds with precision up to
+ # the microsecond. But microsecond is too precise because
+ # shutil.copystat only copies milliseconds, and seconds is not
+ # enough precision.
+ dest_mtime = int(os.path.getmtime(dest) * 1000)
+ for input in inputs:
+ try:
+ src_mtime = int(os.path.getmtime(input) * 1000)
+ except OSError as e:
+ if e.errno == errno.ENOENT:
+ # If an input file was removed, we should update.
+ return True
+ raise
+ if dest_mtime < src_mtime:
+ return True
+ return False
+
+ @staticmethod
+ def normalize_mode(mode):
+ # Normalize file mode:
+ # - keep file type (e.g. S_IFREG)
+ ret = stat.S_IFMT(mode)
+ # - expand user read and execute permissions to everyone
+ if mode & 0o0400:
+ ret |= 0o0444
+ if mode & 0o0100:
+ ret |= 0o0111
+ # - keep user write permissions
+ if mode & 0o0200:
+ ret |= 0o0200
+ # - leave away sticky bit, setuid, setgid
+ return ret
+
+ def copy(self, dest, skip_if_older=True):
+ """
+ Copy the BaseFile content to the destination given as a string or a
+ Dest instance. Avoids replacing existing files if the BaseFile content
+ matches that of the destination, or in case of plain files, if the
+ destination is newer than the original file. This latter behaviour is
+ disabled when skip_if_older is False.
+ Returns whether a copy was actually performed (True) or not (False).
+ """
+ if isinstance(dest, six.string_types):
+ dest = Dest(dest)
+ else:
+ assert isinstance(dest, Dest)
+
+ can_skip_content_check = False
+ if not dest.exists():
+ can_skip_content_check = True
+ elif getattr(self, "path", None) and getattr(dest, "path", None):
+ if skip_if_older and BaseFile.is_older(self.path, dest.path):
+ return False
+ elif os.path.getsize(self.path) != os.path.getsize(dest.path):
+ can_skip_content_check = True
+
+ if can_skip_content_check:
+ if getattr(self, "path", None) and getattr(dest, "path", None):
+ # The destination directory must exist, or CopyFile will fail.
+ destdir = os.path.dirname(dest.path)
+ try:
+ os.makedirs(destdir)
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+ _copyfile(self.path, dest.path)
+ shutil.copystat(self.path, dest.path)
+ else:
+ # Ensure the file is always created
+ if not dest.exists():
+ dest.write(b"")
+ shutil.copyfileobj(self.open(), dest)
+ return True
+
+ src = self.open()
+ accumulated_src_content = []
+ while True:
+ dest_content = dest.read(32768)
+ src_content = src.read(32768)
+ accumulated_src_content.append(src_content)
+ if len(dest_content) == len(src_content) == 0:
+ break
+ # If the read content differs between origin and destination,
+ # write what was read up to now, and copy the remainder.
+ if six.ensure_binary(dest_content) != six.ensure_binary(src_content):
+ dest.write(b"".join(accumulated_src_content))
+ shutil.copyfileobj(src, dest)
+ break
+ if hasattr(self, "path") and hasattr(dest, "path"):
+ shutil.copystat(self.path, dest.path)
+ return True
+
+ def open(self):
+ """
+ Return a file-like object allowing to read() the content of the
+ associated file. This is meant to be overloaded in subclasses to return
+ a custom file-like object.
+ """
+ assert self.path is not None
+ return open(self.path, "rb")
+
+ def read(self):
+ raise NotImplementedError("BaseFile.read() not implemented. Bug 1170329.")
+
+ def size(self):
+ """Returns size of the entry.
+
+ Derived classes are highly encouraged to override this with a more
+ optimal implementation.
+ """
+ return len(self.read())
+
+ @property
+ def mode(self):
+ """
+ Return the file's unix mode, or None if it has no meaning.
+ """
+ return None
+
+ def inputs(self):
+ """
+ Return an iterable of the input file paths that impact this output file.
+ """
+ raise NotImplementedError("BaseFile.inputs() not implemented.")
+
+
+class File(BaseFile):
+ """
+ File class for plain files.
+ """
+
+ def __init__(self, path):
+ self.path = ensure_unicode(path)
+
+ @property
+ def mode(self):
+ """
+ Return the file's unix mode, as returned by os.stat().st_mode.
+ """
+ if platform.system() == "Windows":
+ return None
+ assert self.path is not None
+ mode = os.stat(self.path).st_mode
+ return self.normalize_mode(mode)
+
+ def read(self):
+ """Return the contents of the file."""
+ with open(self.path, "rb") as fh:
+ return fh.read()
+
+ def size(self):
+ return os.stat(self.path).st_size
+
+ def inputs(self):
+ return (self.path,)
+
+
+class ExecutableFile(File):
+ """
+ File class for executable and library files on OS/2, OS/X and ELF systems.
+ (see mozpack.executables.is_executable documentation).
+ """
+
+ def __init__(self, path):
+ File.__init__(self, path)
+
+ def copy(self, dest, skip_if_older=True):
+ real_dest = dest
+ if not isinstance(dest, six.string_types):
+ fd, dest = mkstemp()
+ os.close(fd)
+ os.remove(dest)
+ assert isinstance(dest, six.string_types)
+ # If File.copy didn't actually copy because dest is newer, check the
+ # file sizes. If dest is smaller, it means it is already stripped and
+ # elfhacked, so we can skip.
+ if not File.copy(self, dest, skip_if_older) and os.path.getsize(
+ self.path
+ ) > os.path.getsize(dest):
+ return False
+ try:
+ if may_strip(dest):
+ strip(dest)
+ if may_elfhack(dest):
+ elfhack(dest)
+ except ErrorMessage:
+ os.remove(dest)
+ raise
+
+ if real_dest != dest:
+ f = File(dest)
+ ret = f.copy(real_dest, skip_if_older)
+ os.remove(dest)
+ return ret
+ return True
+
+
+class AbsoluteSymlinkFile(File):
+ """File class that is copied by symlinking (if available).
+
+ This class only works if the target path is absolute.
+ """
+
+ def __init__(self, path):
+ if not os.path.isabs(path):
+ raise ValueError("Symlink target not absolute: %s" % path)
+
+ File.__init__(self, path)
+
+ def copy(self, dest, skip_if_older=True):
+ assert isinstance(dest, six.string_types)
+
+ # The logic in this function is complicated by the fact that symlinks
+ # aren't universally supported. So, where symlinks aren't supported, we
+ # fall back to file copying. Keep in mind that symlink support is
+ # per-filesystem, not per-OS.
+
+ # Handle the simple case where symlinks are definitely not supported by
+ # falling back to file copy.
+ if not hasattr(os, "symlink"):
+ return File.copy(self, dest, skip_if_older=skip_if_older)
+
+ # Always verify the symlink target path exists.
+ if not os.path.exists(self.path):
+ errors.fatal("Symlink target path does not exist: %s" % self.path)
+
+ st = None
+
+ try:
+ st = os.lstat(dest)
+ except OSError as ose:
+ if ose.errno != errno.ENOENT:
+ raise
+
+ # If the dest is a symlink pointing to us, we have nothing to do.
+ # If it's the wrong symlink, the filesystem must support symlinks,
+ # so we replace with a proper symlink.
+ if st and stat.S_ISLNK(st.st_mode):
+ link = os.readlink(dest)
+ if link == self.path:
+ return False
+
+ os.remove(dest)
+ os.symlink(self.path, dest)
+ return True
+
+ # If the destination doesn't exist, we try to create a symlink. If that
+ # fails, we fall back to copy code.
+ if not st:
+ try:
+ os.symlink(self.path, dest)
+ return True
+ except OSError:
+ return File.copy(self, dest, skip_if_older=skip_if_older)
+
+ # Now the complicated part. If the destination exists, we could be
+ # replacing a file with a symlink. Or, the filesystem may not support
+ # symlinks. We want to minimize I/O overhead for performance reasons,
+ # so we keep the existing destination file around as long as possible.
+ # A lot of the system calls would be eliminated if we cached whether
+ # symlinks are supported. However, even if we performed a single
+ # up-front test of whether the root of the destination directory
+ # supports symlinks, there's no guarantee that all operations for that
+ # dest (or source) would be on the same filesystem and would support
+ # symlinks.
+ #
+ # Our strategy is to attempt to create a new symlink with a random
+ # name. If that fails, we fall back to copy mode. If that works, we
+ # remove the old destination and move the newly-created symlink into
+ # its place.
+
+ temp_dest = os.path.join(os.path.dirname(dest), str(uuid.uuid4()))
+ try:
+ os.symlink(self.path, temp_dest)
+ # TODO Figure out exactly how symlink creation fails and only trap
+ # that.
+ except EnvironmentError:
+ return File.copy(self, dest, skip_if_older=skip_if_older)
+
+ # If removing the original file fails, don't forget to clean up the
+ # temporary symlink.
+ try:
+ os.remove(dest)
+ except EnvironmentError:
+ os.remove(temp_dest)
+ raise
+
+ os.rename(temp_dest, dest)
+ return True
+
+
+class HardlinkFile(File):
+ """File class that is copied by hard linking (if available)
+
+ This is similar to the AbsoluteSymlinkFile, but with hard links. The symlink
+ implementation requires paths to be absolute, because they are resolved at
+ read time, which makes relative paths messy. Hard links resolve paths at
+ link-creation time, so relative paths are fine.
+ """
+
+ def copy(self, dest, skip_if_older=True):
+ assert isinstance(dest, six.string_types)
+
+ if not hasattr(os, "link"):
+ return super(HardlinkFile, self).copy(dest, skip_if_older=skip_if_older)
+
+ try:
+ path_st = os.stat(self.path)
+ except OSError as e:
+ if e.errno == errno.ENOENT:
+ errors.fatal("Hard link target path does not exist: %s" % self.path)
+ else:
+ raise
+
+ st = None
+ try:
+ st = os.lstat(dest)
+ except OSError as e:
+ if e.errno != errno.ENOENT:
+ raise
+
+ if st:
+ # The dest already points to the right place.
+ if st.st_dev == path_st.st_dev and st.st_ino == path_st.st_ino:
+ return False
+ # The dest exists and it points to the wrong place
+ os.remove(dest)
+
+ # At this point, either the dest used to exist and we just deleted it,
+ # or it never existed. We can now safely create the hard link.
+ try:
+ os.link(self.path, dest)
+ except OSError:
+ # If we can't hard link, fall back to copying
+ return super(HardlinkFile, self).copy(dest, skip_if_older=skip_if_older)
+ return True
+
+
+class ExistingFile(BaseFile):
+ """
+ File class that represents a file that may exist but whose content comes
+ from elsewhere.
+
+ This purpose of this class is to account for files that are installed via
+ external means. It is typically only used in manifests or in registries to
+ account for files.
+
+ When asked to copy, this class does nothing because nothing is known about
+ the source file/data.
+
+ Instances of this class come in two flavors: required and optional. If an
+ existing file is required, it must exist during copy() or an error is
+ raised.
+ """
+
+ def __init__(self, required):
+ self.required = required
+
+ def copy(self, dest, skip_if_older=True):
+ if isinstance(dest, six.string_types):
+ dest = Dest(dest)
+ else:
+ assert isinstance(dest, Dest)
+
+ if not self.required:
+ return
+
+ if not dest.exists():
+ errors.fatal("Required existing file doesn't exist: %s" % dest.path)
+
+ def inputs(self):
+ return ()
+
+
+class PreprocessedFile(BaseFile):
+ """
+ File class for a file that is preprocessed. PreprocessedFile.copy() runs
+ the preprocessor on the file to create the output.
+ """
+
+ def __init__(
+ self,
+ path,
+ depfile_path,
+ marker,
+ defines,
+ extra_depends=None,
+ silence_missing_directive_warnings=False,
+ ):
+ self.path = ensure_unicode(path)
+ self.depfile = ensure_unicode(depfile_path)
+ self.marker = marker
+ self.defines = defines
+ self.extra_depends = list(extra_depends or [])
+ self.silence_missing_directive_warnings = silence_missing_directive_warnings
+
+ def inputs(self):
+ pp = Preprocessor(defines=self.defines, marker=self.marker)
+ pp.setSilenceDirectiveWarnings(self.silence_missing_directive_warnings)
+
+ with _open(self.path, "r") as input:
+ with _open(os.devnull, "w") as output:
+ pp.processFile(input=input, output=output)
+
+ # This always yields at least self.path.
+ return pp.includes
+
+ def copy(self, dest, skip_if_older=True):
+ """
+ Invokes the preprocessor to create the destination file.
+ """
+ if isinstance(dest, six.string_types):
+ dest = Dest(dest)
+ else:
+ assert isinstance(dest, Dest)
+
+ # We have to account for the case where the destination exists and is a
+ # symlink to something. Since we know the preprocessor is certainly not
+ # going to create a symlink, we can just remove the existing one. If the
+ # destination is not a symlink, we leave it alone, since we're going to
+ # overwrite its contents anyway.
+ # If symlinks aren't supported at all, we can skip this step.
+ # See comment in AbsoluteSymlinkFile about Windows.
+ if hasattr(os, "symlink") and platform.system() != "Windows":
+ if os.path.islink(dest.path):
+ os.remove(dest.path)
+
+ pp_deps = set(self.extra_depends)
+
+ # If a dependency file was specified, and it exists, add any
+ # dependencies from that file to our list.
+ if self.depfile and os.path.exists(self.depfile):
+ target = mozpath.normpath(dest.name)
+ with _open(self.depfile, "rt") as fileobj:
+ for rule in makeutil.read_dep_makefile(fileobj):
+ if target in rule.targets():
+ pp_deps.update(rule.dependencies())
+
+ skip = False
+ if dest.exists() and skip_if_older:
+ # If a dependency file was specified, and it doesn't exist,
+ # assume that the preprocessor needs to be rerun. That will
+ # regenerate the dependency file.
+ if self.depfile and not os.path.exists(self.depfile):
+ skip = False
+ else:
+ skip = not BaseFile.any_newer(dest.path, pp_deps)
+
+ if skip:
+ return False
+
+ deps_out = None
+ if self.depfile:
+ deps_out = FileAvoidWrite(self.depfile)
+ pp = Preprocessor(defines=self.defines, marker=self.marker)
+ pp.setSilenceDirectiveWarnings(self.silence_missing_directive_warnings)
+
+ with _open(self.path, "r") as input:
+ pp.processFile(input=input, output=dest, depfile=deps_out)
+
+ dest.close()
+ if self.depfile:
+ deps_out.close()
+
+ return True
+
+
+class GeneratedFile(BaseFile):
+ """
+ File class for content with no previous existence on the filesystem.
+ """
+
+ def __init__(self, content):
+ self._content = content
+
+ @property
+ def content(self):
+ if inspect.isfunction(self._content):
+ self._content = self._content()
+ return six.ensure_binary(self._content)
+
+ @content.setter
+ def content(self, content):
+ self._content = content
+
+ def open(self):
+ return BytesIO(self.content)
+
+ def read(self):
+ return self.content
+
+ def size(self):
+ return len(self.content)
+
+ def inputs(self):
+ return ()
+
+
+class DeflatedFile(BaseFile):
+ """
+ File class for members of a jar archive. DeflatedFile.copy() effectively
+ extracts the file from the jar archive.
+ """
+
+ def __init__(self, file):
+ from mozpack.mozjar import JarFileReader
+
+ assert isinstance(file, JarFileReader)
+ self.file = file
+
+ def open(self):
+ self.file.seek(0)
+ return self.file
+
+
+class ExtractedTarFile(GeneratedFile):
+ """
+ File class for members of a tar archive. Contents of the underlying file
+ are extracted immediately and stored in memory.
+ """
+
+ def __init__(self, tar, info):
+ assert isinstance(info, TarInfo)
+ assert isinstance(tar, TarFile)
+ GeneratedFile.__init__(self, tar.extractfile(info).read())
+ self._unix_mode = self.normalize_mode(info.mode)
+
+ @property
+ def mode(self):
+ return self._unix_mode
+
+ def read(self):
+ return self.content
+
+
+class ManifestFile(BaseFile):
+ """
+ File class for a manifest file. It takes individual manifest entries (using
+ the add() and remove() member functions), and adjusts them to be relative
+ to the base path for the manifest, given at creation.
+ Example:
+ There is a manifest entry "content foobar foobar/content/" relative
+ to "foobar/chrome". When packaging, the entry will be stored in
+ jar:foobar/omni.ja!/chrome/chrome.manifest, which means the entry
+ will have to be relative to "chrome" instead of "foobar/chrome". This
+ doesn't really matter when serializing the entry, since this base path
+ is not written out, but it matters when moving the entry at the same
+ time, e.g. to jar:foobar/omni.ja!/chrome.manifest, which we don't do
+ currently but could in the future.
+ """
+
+ def __init__(self, base, entries=None):
+ self._base = base
+ self._entries = []
+ self._interfaces = []
+ for e in entries or []:
+ self.add(e)
+
+ def add(self, entry):
+ """
+ Add the given entry to the manifest. Entries are rebased at open() time
+ instead of add() time so that they can be more easily remove()d.
+ """
+ assert isinstance(entry, ManifestEntry)
+ if isinstance(entry, ManifestInterfaces):
+ self._interfaces.append(entry)
+ else:
+ self._entries.append(entry)
+
+ def remove(self, entry):
+ """
+ Remove the given entry from the manifest.
+ """
+ assert isinstance(entry, ManifestEntry)
+ if isinstance(entry, ManifestInterfaces):
+ self._interfaces.remove(entry)
+ else:
+ self._entries.remove(entry)
+
+ def open(self):
+ """
+ Return a file-like object allowing to read() the serialized content of
+ the manifest.
+ """
+ content = "".join(
+ "%s\n" % e.rebase(self._base)
+ for e in chain(self._entries, self._interfaces)
+ )
+ return BytesIO(six.ensure_binary(content))
+
+ def __iter__(self):
+ """
+ Iterate over entries in the manifest file.
+ """
+ return chain(self._entries, self._interfaces)
+
+ def isempty(self):
+ """
+ Return whether there are manifest entries to write
+ """
+ return len(self._entries) + len(self._interfaces) == 0
+
+
+class MinifiedCommentStripped(BaseFile):
+ """
+ File class for content minified by stripping comments. This wraps around a
+ BaseFile instance, and removes lines starting with a # from its content.
+ """
+
+ def __init__(self, file):
+ assert isinstance(file, BaseFile)
+ self._file = file
+
+ def open(self):
+ """
+ Return a file-like object allowing to read() the minified content of
+ the underlying file.
+ """
+ content = "".join(
+ l
+ for l in [six.ensure_text(s) for s in self._file.open().readlines()]
+ if not l.startswith("#")
+ )
+ return BytesIO(six.ensure_binary(content))
+
+
+class MinifiedJavaScript(BaseFile):
+ """
+ File class for minifying JavaScript files.
+ """
+
+ def __init__(self, file, verify_command=None):
+ assert isinstance(file, BaseFile)
+ self._file = file
+ self._verify_command = verify_command
+
+ def open(self):
+ output = six.StringIO()
+ minify = JavascriptMinify(
+ codecs.getreader("utf-8")(self._file.open()), output, quote_chars="'\"`"
+ )
+ minify.minify()
+ output.seek(0)
+ output_source = six.ensure_binary(output.getvalue())
+ output = BytesIO(output_source)
+
+ if not self._verify_command:
+ return output
+
+ input_source = self._file.open().read()
+
+ with NamedTemporaryFile("wb+") as fh1, NamedTemporaryFile("wb+") as fh2:
+ fh1.write(input_source)
+ fh2.write(output_source)
+ fh1.flush()
+ fh2.flush()
+
+ try:
+ args = list(self._verify_command)
+ args.extend([fh1.name, fh2.name])
+ subprocess.check_output(
+ args, stderr=subprocess.STDOUT, universal_newlines=True
+ )
+ except subprocess.CalledProcessError as e:
+ errors.warn(
+ "JS minification verification failed for %s:"
+ % (getattr(self._file, "path", "<unknown>"))
+ )
+ # Prefix each line with "Warning:" so mozharness doesn't
+ # think these error messages are real errors.
+ for line in e.output.splitlines():
+ errors.warn(line)
+
+ return self._file.open()
+
+ return output
+
+
+class BaseFinder(object):
+ def __init__(
+ self, base, minify=False, minify_js=False, minify_js_verify_command=None
+ ):
+ """
+ Initializes the instance with a reference base directory.
+
+ The optional minify argument specifies whether minification of code
+ should occur. minify_js is an additional option to control minification
+ of JavaScript. It requires minify to be True.
+
+ minify_js_verify_command can be used to optionally verify the results
+ of JavaScript minification. If defined, it is expected to be an iterable
+ that will constitute the first arguments to a called process which will
+ receive the filenames of the original and minified JavaScript files.
+ The invoked process can then verify the results. If minification is
+ rejected, the process exits with a non-0 exit code and the original
+ JavaScript source is used. An example value for this argument is
+ ('/path/to/js', '/path/to/verify/script.js').
+ """
+ if minify_js and not minify:
+ raise ValueError("minify_js requires minify.")
+
+ self.base = base
+ self._minify = minify
+ self._minify_js = minify_js
+ self._minify_js_verify_command = minify_js_verify_command
+
+ def find(self, pattern):
+ """
+ Yield path, BaseFile_instance pairs for all files under the base
+ directory and its subdirectories that match the given pattern. See the
+ mozpack.path.match documentation for a description of the handled
+ patterns.
+ """
+ while pattern.startswith("/"):
+ pattern = pattern[1:]
+ for p, f in self._find(pattern):
+ yield p, self._minify_file(p, f)
+
+ def get(self, path):
+ """Obtain a single file.
+
+ Where ``find`` is tailored towards matching multiple files, this method
+ is used for retrieving a single file. Use this method when performance
+ is critical.
+
+ Returns a ``BaseFile`` if at most one file exists or ``None`` otherwise.
+ """
+ files = list(self.find(path))
+ if len(files) != 1:
+ return None
+ return files[0][1]
+
+ def __iter__(self):
+ """
+ Iterates over all files under the base directory (excluding files
+ starting with a '.' and files at any level under a directory starting
+ with a '.').
+ for path, file in finder:
+ ...
+ """
+ return self.find("")
+
+ def __contains__(self, pattern):
+ raise RuntimeError(
+ "'in' operator forbidden for %s. Use contains()." % self.__class__.__name__
+ )
+
+ def contains(self, pattern):
+ """
+ Return whether some files under the base directory match the given
+ pattern. See the mozpack.path.match documentation for a description of
+ the handled patterns.
+ """
+ return any(self.find(pattern))
+
+ def _minify_file(self, path, file):
+ """
+ Return an appropriate MinifiedSomething wrapper for the given BaseFile
+ instance (file), according to the file type (determined by the given
+ path), if the FileFinder was created with minification enabled.
+ Otherwise, just return the given BaseFile instance.
+ """
+ if not self._minify or isinstance(file, ExecutableFile):
+ return file
+
+ if path.endswith((".ftl", ".properties")):
+ return MinifiedCommentStripped(file)
+
+ if self._minify_js and path.endswith((".js", ".jsm")):
+ return MinifiedJavaScript(file, self._minify_js_verify_command)
+
+ return file
+
+ def _find_helper(self, pattern, files, file_getter):
+ """Generic implementation of _find.
+
+ A few *Finder implementations share logic for returning results.
+ This function implements the custom logic.
+
+ The ``file_getter`` argument is a callable that receives a path
+ that is known to exist. The callable should return a ``BaseFile``
+ instance.
+ """
+ if "*" in pattern:
+ for p in files:
+ if mozpath.match(p, pattern):
+ yield p, file_getter(p)
+ elif pattern == "":
+ for p in files:
+ yield p, file_getter(p)
+ elif pattern in files:
+ yield pattern, file_getter(pattern)
+ else:
+ for p in files:
+ if mozpath.basedir(p, [pattern]) == pattern:
+ yield p, file_getter(p)
+
+
+class FileFinder(BaseFinder):
+ """
+ Helper to get appropriate BaseFile instances from the file system.
+ """
+
+ def __init__(
+ self,
+ base,
+ find_executables=False,
+ ignore=(),
+ ignore_broken_symlinks=False,
+ find_dotfiles=False,
+ **kargs
+ ):
+ """
+ Create a FileFinder for files under the given base directory.
+
+ The find_executables argument determines whether the finder needs to
+ try to guess whether files are executables. Disabling this guessing
+ when not necessary can speed up the finder significantly.
+
+ ``ignore`` accepts an iterable of patterns to ignore. Entries are
+ strings that match paths relative to ``base`` using
+ ``mozpath.match()``. This means if an entry corresponds
+ to a directory, all files under that directory will be ignored. If
+ an entry corresponds to a file, that particular file will be ignored.
+ ``ignore_broken_symlinks`` is passed by the packager to work around an
+ issue with the build system not cleaning up stale files in some common
+ cases. See bug 1297381.
+ """
+ BaseFinder.__init__(self, base, **kargs)
+ self.find_dotfiles = find_dotfiles
+ self.find_executables = find_executables
+ self.ignore = ignore
+ self.ignore_broken_symlinks = ignore_broken_symlinks
+
+ def _find(self, pattern):
+ """
+ Actual implementation of FileFinder.find(), dispatching to specialized
+ member functions depending on what kind of pattern was given.
+ Note all files with a name starting with a '.' are ignored when
+ scanning directories, but are not ignored when explicitely requested.
+ """
+ if "*" in pattern:
+ return self._find_glob("", mozpath.split(pattern))
+ elif os.path.isdir(os.path.join(self.base, pattern)):
+ return self._find_dir(pattern)
+ else:
+ f = self.get(pattern)
+ return ((pattern, f),) if f else ()
+
+ def _find_dir(self, path):
+ """
+ Actual implementation of FileFinder.find() when the given pattern
+ corresponds to an existing directory under the base directory.
+ Ignores file names starting with a '.' under the given path. If the
+ path itself has leafs starting with a '.', they are not ignored.
+ """
+ for p in self.ignore:
+ if mozpath.match(path, p):
+ return
+
+ # The sorted makes the output idempotent. Otherwise, we are
+ # likely dependent on filesystem implementation details, such as
+ # inode ordering.
+ for p in sorted(os.listdir(os.path.join(self.base, path))):
+ if p.startswith("."):
+ if p in (".", ".."):
+ continue
+ if not self.find_dotfiles:
+ continue
+ for p_, f in self._find(mozpath.join(path, p)):
+ yield p_, f
+
+ def get(self, path):
+ srcpath = os.path.join(self.base, path)
+ if not os.path.lexists(srcpath):
+ return None
+
+ if self.ignore_broken_symlinks and not os.path.exists(srcpath):
+ return None
+
+ for p in self.ignore:
+ if mozpath.match(path, p):
+ return None
+
+ if self.find_executables and is_executable(srcpath):
+ return ExecutableFile(srcpath)
+ else:
+ return File(srcpath)
+
+ def _find_glob(self, base, pattern):
+ """
+ Actual implementation of FileFinder.find() when the given pattern
+ contains globbing patterns ('*' or '**'). This is meant to be an
+ equivalent of:
+ for p, f in self:
+ if mozpath.match(p, pattern):
+ yield p, f
+ but avoids scanning the entire tree.
+ """
+ if not pattern:
+ for p, f in self._find(base):
+ yield p, f
+ elif pattern[0] == "**":
+ for p, f in self._find(base):
+ if mozpath.match(p, mozpath.join(*pattern)):
+ yield p, f
+ elif "*" in pattern[0]:
+ if not os.path.exists(os.path.join(self.base, base)):
+ return
+
+ for p in self.ignore:
+ if mozpath.match(base, p):
+ return
+
+ # See above comment w.r.t. sorted() and idempotent behavior.
+ for p in sorted(os.listdir(os.path.join(self.base, base))):
+ if p.startswith(".") and not pattern[0].startswith("."):
+ continue
+ if mozpath.match(p, pattern[0]):
+ for p_, f in self._find_glob(mozpath.join(base, p), pattern[1:]):
+ yield p_, f
+ else:
+ for p, f in self._find_glob(mozpath.join(base, pattern[0]), pattern[1:]):
+ yield p, f
+
+
+class JarFinder(BaseFinder):
+ """
+ Helper to get appropriate DeflatedFile instances from a JarReader.
+ """
+
+ def __init__(self, base, reader, **kargs):
+ """
+ Create a JarFinder for files in the given JarReader. The base argument
+ is used as an indication of the Jar file location.
+ """
+ assert isinstance(reader, JarReader)
+ BaseFinder.__init__(self, base, **kargs)
+ self._files = OrderedDict((f.filename, f) for f in reader)
+
+ def _find(self, pattern):
+ """
+ Actual implementation of JarFinder.find(), dispatching to specialized
+ member functions depending on what kind of pattern was given.
+ """
+ return self._find_helper(
+ pattern, self._files, lambda x: DeflatedFile(self._files[x])
+ )
+
+
+class TarFinder(BaseFinder):
+ """
+ Helper to get files from a TarFile.
+ """
+
+ def __init__(self, base, tar, **kargs):
+ """
+ Create a TarFinder for files in the given TarFile. The base argument
+ is used as an indication of the Tar file location.
+ """
+ assert isinstance(tar, TarFile)
+ self._tar = tar
+ BaseFinder.__init__(self, base, **kargs)
+ self._files = OrderedDict((f.name, f) for f in tar if f.isfile())
+
+ def _find(self, pattern):
+ """
+ Actual implementation of TarFinder.find(), dispatching to specialized
+ member functions depending on what kind of pattern was given.
+ """
+ return self._find_helper(
+ pattern, self._files, lambda x: ExtractedTarFile(self._tar, self._files[x])
+ )
+
+
+class ComposedFinder(BaseFinder):
+ """
+ Composes multiple File Finders in some sort of virtual file system.
+
+ A ComposedFinder is initialized from a dictionary associating paths
+ to `*Finder instances.`
+
+ Note this could be optimized to be smarter than getting all the files
+ in advance.
+ """
+
+ def __init__(self, finders):
+ # Can't import globally, because of the dependency of mozpack.copier
+ # on this module.
+ from mozpack.copier import FileRegistry
+
+ self.files = FileRegistry()
+
+ for base, finder in sorted(six.iteritems(finders)):
+ if self.files.contains(base):
+ self.files.remove(base)
+ for p, f in finder.find(""):
+ self.files.add(mozpath.join(base, p), f)
+
+ def find(self, pattern):
+ for p in self.files.match(pattern):
+ yield p, self.files[p]
+
+
+class MercurialFile(BaseFile):
+ """File class for holding data from Mercurial."""
+
+ def __init__(self, client, rev, path):
+ self._content = client.cat(
+ [six.ensure_binary(path)], rev=six.ensure_binary(rev)
+ )
+
+ def open(self):
+ return BytesIO(six.ensure_binary(self._content))
+
+ def read(self):
+ return self._content
+
+
+class MercurialRevisionFinder(BaseFinder):
+ """A finder that operates on a specific Mercurial revision."""
+
+ def __init__(self, repo, rev=".", recognize_repo_paths=False, **kwargs):
+ """Create a finder attached to a specific revision in a repository.
+
+ If no revision is given, open the parent of the working directory.
+
+ ``recognize_repo_paths`` will enable a mode where ``.get()`` will
+ recognize full paths that include the repo's path. Typically Finder
+ instances are "bound" to a base directory and paths are relative to
+ that directory. This mode changes that. When this mode is activated,
+ ``.find()`` will not work! This mode exists to support the moz.build
+ reader, which uses absolute paths instead of relative paths. The reader
+ should eventually be rewritten to use relative paths and this hack
+ should be removed (TODO bug 1171069).
+ """
+ if not hglib:
+ raise Exception("hglib package not found")
+
+ super(MercurialRevisionFinder, self).__init__(base=repo, **kwargs)
+
+ self._root = mozpath.normpath(repo).rstrip("/")
+ self._recognize_repo_paths = recognize_repo_paths
+
+ # We change directories here otherwise we have to deal with relative
+ # paths.
+ oldcwd = os.getcwd()
+ os.chdir(self._root)
+ try:
+ self._client = hglib.open(path=repo, encoding=b"utf-8")
+ finally:
+ os.chdir(oldcwd)
+ self._rev = rev if rev is not None else "."
+ self._files = OrderedDict()
+
+ # Immediately populate the list of files in the repo since nearly every
+ # operation requires this list.
+ out = self._client.rawcommand(
+ [
+ b"files",
+ b"--rev",
+ six.ensure_binary(self._rev),
+ ]
+ )
+ for relpath in out.splitlines():
+ # Mercurial may use \ as path separator on Windows. So use
+ # normpath().
+ self._files[six.ensure_text(mozpath.normpath(relpath))] = None
+
+ def _find(self, pattern):
+ if self._recognize_repo_paths:
+ raise NotImplementedError("cannot use find with recognize_repo_path")
+
+ return self._find_helper(pattern, self._files, self._get)
+
+ def get(self, path):
+ path = mozpath.normpath(path)
+ if self._recognize_repo_paths:
+ if not path.startswith(self._root):
+ raise ValueError(
+ "lookups in recognize_repo_paths mode must be "
+ "prefixed with repo path: %s" % path
+ )
+ path = path[len(self._root) + 1 :]
+
+ try:
+ return self._get(path)
+ except KeyError:
+ return None
+
+ def _get(self, path):
+ # We lazy populate self._files because potentially creating tens of
+ # thousands of MercurialFile instances for every file in the repo is
+ # inefficient.
+ f = self._files[path]
+ if not f:
+ f = MercurialFile(self._client, self._rev, path)
+ self._files[path] = f
+
+ return f
+
+
+class FileListFinder(BaseFinder):
+ """Finder for a literal list of file names."""
+
+ def __init__(self, files):
+ """files must be a sorted list."""
+ self._files = files
+
+ @memoize
+ def _match(self, pattern):
+ """Return a sorted list of all files matching the given pattern."""
+ # We don't use the utility _find_helper method because it's not tuned
+ # for performance in the way that we would like this class to be. That's
+ # a possible avenue for refactoring here.
+ ret = []
+ # We do this as an optimization to figure out where in the sorted list
+ # to search and where to stop searching.
+ components = pattern.split("/")
+ prefix = "/".join(takewhile(lambda s: "*" not in s, components))
+ start = bisect.bisect_left(self._files, prefix)
+ for i in six.moves.range(start, len(self._files)):
+ f = self._files[i]
+ if not f.startswith(prefix):
+ break
+ # Skip hidden files while scanning.
+ if "/." in f[len(prefix) :]:
+ continue
+ if mozpath.match(f, pattern):
+ ret.append(f)
+ return ret
+
+ def find(self, pattern):
+ pattern = pattern.strip("/")
+ for path in self._match(pattern):
+ yield path, File(path)
diff --git a/python/mozbuild/mozpack/macpkg.py b/python/mozbuild/mozpack/macpkg.py
new file mode 100644
index 0000000000..cbeacbb388
--- /dev/null
+++ b/python/mozbuild/mozpack/macpkg.py
@@ -0,0 +1,217 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# TODO: Eventually consolidate with mozpack.pkg module. This is kept separate
+# for now because of the vast difference in API, and to avoid churn for the
+# users of this module (docker images, macos SDK artifacts) when changes are
+# necessary in mozpack.pkg
+import bz2
+import concurrent.futures
+import io
+import lzma
+import os
+import struct
+import zlib
+from xml.etree.ElementTree import XML
+
+from mozbuild.util import ReadOnlyNamespace
+
+
+class ZlibFile(object):
+ def __init__(self, fileobj):
+ self.fileobj = fileobj
+ self.decompressor = zlib.decompressobj()
+ self.buf = b""
+
+ def read(self, length):
+ cutoff = min(length, len(self.buf))
+ result = self.buf[:cutoff]
+ self.buf = self.buf[cutoff:]
+ while len(result) < length:
+ buf = self.fileobj.read(io.DEFAULT_BUFFER_SIZE)
+ if not buf:
+ break
+ buf = self.decompressor.decompress(buf)
+ cutoff = min(length - len(result), len(buf))
+ result += buf[:cutoff]
+ self.buf += buf[cutoff:]
+ return result
+
+
+def unxar(fileobj):
+ magic = fileobj.read(4)
+ if magic != b"xar!":
+ raise Exception("Not a XAR?")
+
+ header_size = fileobj.read(2)
+ header_size = struct.unpack(">H", header_size)[0]
+ if header_size > 64:
+ raise Exception(
+ f"Don't know how to handle a {header_size} bytes XAR header size"
+ )
+ header_size -= 6 # what we've read so far.
+ header = fileobj.read(header_size)
+ if len(header) != header_size:
+ raise Exception("Failed to read XAR header")
+ (
+ version,
+ compressed_toc_len,
+ uncompressed_toc_len,
+ checksum_type,
+ ) = struct.unpack(">HQQL", header[:22])
+ if version != 1:
+ raise Exception(f"XAR version {version} not supported")
+ toc = fileobj.read(compressed_toc_len)
+ base = fileobj.tell()
+ if len(toc) != compressed_toc_len:
+ raise Exception("Failed to read XAR TOC")
+ toc = zlib.decompress(toc)
+ if len(toc) != uncompressed_toc_len:
+ raise Exception("Corrupted XAR?")
+ toc = XML(toc).find("toc")
+ for f in toc.findall("file"):
+ if f.find("type").text != "file":
+ continue
+ filename = f.find("name").text
+ data = f.find("data")
+ length = int(data.find("length").text)
+ size = int(data.find("size").text)
+ offset = int(data.find("offset").text)
+ encoding = data.find("encoding").get("style")
+ fileobj.seek(base + offset, os.SEEK_SET)
+ content = Take(fileobj, length)
+ if encoding == "application/octet-stream":
+ if length != size:
+ raise Exception(f"{length} != {size}")
+ elif encoding == "application/x-bzip2":
+ content = bz2.BZ2File(content)
+ elif encoding == "application/x-gzip":
+ # Despite the encoding saying gzip, it is in fact, a raw zlib stream.
+ content = ZlibFile(content)
+ else:
+ raise Exception(f"XAR encoding {encoding} not supported")
+
+ yield filename, content
+
+
+class Pbzx(object):
+ def __init__(self, fileobj):
+ magic = fileobj.read(4)
+ if magic != b"pbzx":
+ raise Exception("Not a PBZX payload?")
+ # The first thing in the file looks like the size of each
+ # decompressed chunk except the last one. It should match
+ # decompressed_size in all cases except last, but we don't
+ # check.
+ chunk_size = fileobj.read(8)
+ chunk_size = struct.unpack(">Q", chunk_size)[0]
+ executor = concurrent.futures.ThreadPoolExecutor(max_workers=os.cpu_count())
+ self.chunk_getter = executor.map(self._uncompress_chunk, self._chunker(fileobj))
+ self._init_one_chunk()
+
+ @staticmethod
+ def _chunker(fileobj):
+ while True:
+ header = fileobj.read(16)
+ if header == b"":
+ break
+ if len(header) != 16:
+ raise Exception("Corrupted PBZX payload?")
+ decompressed_size, compressed_size = struct.unpack(">QQ", header)
+ chunk = fileobj.read(compressed_size)
+ yield decompressed_size, compressed_size, chunk
+
+ @staticmethod
+ def _uncompress_chunk(data):
+ decompressed_size, compressed_size, chunk = data
+ if compressed_size != decompressed_size:
+ chunk = lzma.decompress(chunk)
+ if len(chunk) != decompressed_size:
+ raise Exception("Corrupted PBZX payload?")
+ return chunk
+
+ def _init_one_chunk(self):
+ self.offset = 0
+ self.chunk = next(self.chunk_getter, "")
+
+ def read(self, length=None):
+ if length == 0:
+ return b""
+ if length and len(self.chunk) >= self.offset + length:
+ start = self.offset
+ self.offset += length
+ return self.chunk[start : self.offset]
+ else:
+ result = self.chunk[self.offset :]
+ self._init_one_chunk()
+ if self.chunk:
+ # XXX: suboptimal if length is larger than the chunk size
+ result += self.read(None if length is None else length - len(result))
+ return result
+
+
+class Take(object):
+ """
+ File object wrapper that allows to read at most a certain length.
+ """
+
+ def __init__(self, fileobj, limit):
+ self.fileobj = fileobj
+ self.limit = limit
+
+ def read(self, length=None):
+ if length is None:
+ length = self.limit
+ else:
+ length = min(length, self.limit)
+ result = self.fileobj.read(length)
+ self.limit -= len(result)
+ return result
+
+
+def uncpio(fileobj):
+ while True:
+ magic = fileobj.read(6)
+ # CPIO payloads in mac pkg files are using the portable ASCII format.
+ if magic != b"070707":
+ if magic.startswith(b"0707"):
+ raise Exception("Unsupported CPIO format")
+ raise Exception("Not a CPIO header")
+ header = fileobj.read(70)
+ (
+ dev,
+ ino,
+ mode,
+ uid,
+ gid,
+ nlink,
+ rdev,
+ mtime,
+ namesize,
+ filesize,
+ ) = struct.unpack(">6s6s6s6s6s6s6s11s6s11s", header)
+ dev = int(dev, 8)
+ ino = int(ino, 8)
+ mode = int(mode, 8)
+ nlink = int(nlink, 8)
+ namesize = int(namesize, 8)
+ filesize = int(filesize, 8)
+ name = fileobj.read(namesize)
+ if name[-1] != 0:
+ raise Exception("File name is not NUL terminated")
+ name = name[:-1]
+ if name == b"TRAILER!!!":
+ break
+
+ if b"/../" in name or name.startswith(b"../") or name == b"..":
+ raise Exception(".. is forbidden in file name")
+ if name.startswith(b"."):
+ name = name[1:]
+ if name.startswith(b"/"):
+ name = name[1:]
+ content = Take(fileobj, filesize)
+ yield name, ReadOnlyNamespace(mode=mode, nlink=nlink, dev=dev, ino=ino), content
+ # Ensure the content is totally consumed
+ while content.read(4096):
+ pass
diff --git a/python/mozbuild/mozpack/manifests.py b/python/mozbuild/mozpack/manifests.py
new file mode 100644
index 0000000000..2df6c729ea
--- /dev/null
+++ b/python/mozbuild/mozpack/manifests.py
@@ -0,0 +1,483 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import json
+from contextlib import contextmanager
+
+import six
+
+import mozpack.path as mozpath
+
+from .files import (
+ AbsoluteSymlinkFile,
+ ExistingFile,
+ File,
+ FileFinder,
+ GeneratedFile,
+ HardlinkFile,
+ PreprocessedFile,
+)
+
+
+# This probably belongs in a more generic module. Where?
+@contextmanager
+def _auto_fileobj(path, fileobj, mode="r"):
+ if path and fileobj:
+ raise AssertionError("Only 1 of path or fileobj may be defined.")
+
+ if not path and not fileobj:
+ raise AssertionError("Must specified 1 of path or fileobj.")
+
+ if path:
+ fileobj = open(path, mode)
+
+ try:
+ yield fileobj
+ finally:
+ if path:
+ fileobj.close()
+
+
+class UnreadableInstallManifest(Exception):
+ """Raised when an invalid install manifest is parsed."""
+
+
+class InstallManifest(object):
+ """Describes actions to be used with a copier.FileCopier instance.
+
+ This class facilitates serialization and deserialization of data used to
+ construct a copier.FileCopier and to perform copy operations.
+
+ The manifest defines source paths, destination paths, and a mechanism by
+ which the destination file should come into existence.
+
+ Entries in the manifest correspond to the following types:
+
+ copy -- The file specified as the source path will be copied to the
+ destination path.
+
+ link -- The destination path will be a symlink or hardlink to the source
+ path. If symlinks are not supported, a copy will be performed.
+
+ exists -- The destination path is accounted for and won't be deleted by
+ the FileCopier. If the destination path doesn't exist, an error is
+ raised.
+
+ optional -- The destination path is accounted for and won't be deleted by
+ the FileCopier. No error is raised if the destination path does not
+ exist.
+
+ patternlink -- Paths matched by the expression in the source path
+ will be symlinked or hardlinked to the destination directory.
+
+ patterncopy -- Similar to patternlink except files are copied, not
+ symlinked/hardlinked.
+
+ preprocess -- The file specified at the source path will be run through
+ the preprocessor, and the output will be written to the destination
+ path.
+
+ content -- The destination file will be created with the given content.
+
+ Version 1 of the manifest was the initial version.
+ Version 2 added optional path support
+ Version 3 added support for pattern entries.
+ Version 4 added preprocessed file support.
+ Version 5 added content support.
+ """
+
+ CURRENT_VERSION = 5
+
+ FIELD_SEPARATOR = "\x1f"
+
+ # Negative values are reserved for non-actionable items, that is, metadata
+ # that doesn't describe files in the destination.
+ LINK = 1
+ COPY = 2
+ REQUIRED_EXISTS = 3
+ OPTIONAL_EXISTS = 4
+ PATTERN_LINK = 5
+ PATTERN_COPY = 6
+ PREPROCESS = 7
+ CONTENT = 8
+
+ def __init__(self, path=None, fileobj=None):
+ """Create a new InstallManifest entry.
+
+ If path is defined, the manifest will be populated with data from the
+ file path.
+
+ If fileobj is defined, the manifest will be populated with data read
+ from the specified file object.
+
+ Both path and fileobj cannot be defined.
+ """
+ self._dests = {}
+ self._source_files = set()
+
+ if path or fileobj:
+ with _auto_fileobj(path, fileobj, "r") as fh:
+ self._source_files.add(fh.name)
+ self._load_from_fileobj(fh)
+
+ def _load_from_fileobj(self, fileobj):
+ version = fileobj.readline().rstrip()
+ if version not in ("1", "2", "3", "4", "5"):
+ raise UnreadableInstallManifest("Unknown manifest version: %s" % version)
+
+ for line in fileobj:
+ # Explicitly strip on \n so we don't strip out the FIELD_SEPARATOR
+ # as well.
+ line = line.rstrip("\n")
+
+ fields = line.split(self.FIELD_SEPARATOR)
+
+ record_type = int(fields[0])
+
+ if record_type == self.LINK:
+ dest, source = fields[1:]
+ self.add_link(source, dest)
+ continue
+
+ if record_type == self.COPY:
+ dest, source = fields[1:]
+ self.add_copy(source, dest)
+ continue
+
+ if record_type == self.REQUIRED_EXISTS:
+ _, path = fields
+ self.add_required_exists(path)
+ continue
+
+ if record_type == self.OPTIONAL_EXISTS:
+ _, path = fields
+ self.add_optional_exists(path)
+ continue
+
+ if record_type == self.PATTERN_LINK:
+ _, base, pattern, dest = fields[1:]
+ self.add_pattern_link(base, pattern, dest)
+ continue
+
+ if record_type == self.PATTERN_COPY:
+ _, base, pattern, dest = fields[1:]
+ self.add_pattern_copy(base, pattern, dest)
+ continue
+
+ if record_type == self.PREPROCESS:
+ dest, source, deps, marker, defines, warnings = fields[1:]
+
+ self.add_preprocess(
+ source,
+ dest,
+ deps,
+ marker,
+ self._decode_field_entry(defines),
+ silence_missing_directive_warnings=bool(int(warnings)),
+ )
+ continue
+
+ if record_type == self.CONTENT:
+ dest, content = fields[1:]
+
+ self.add_content(
+ six.ensure_text(self._decode_field_entry(content)), dest
+ )
+ continue
+
+ # Don't fail for non-actionable items, allowing
+ # forward-compatibility with those we will add in the future.
+ if record_type >= 0:
+ raise UnreadableInstallManifest("Unknown record type: %d" % record_type)
+
+ def __len__(self):
+ return len(self._dests)
+
+ def __contains__(self, item):
+ return item in self._dests
+
+ def __eq__(self, other):
+ return isinstance(other, InstallManifest) and self._dests == other._dests
+
+ def __neq__(self, other):
+ return not self.__eq__(other)
+
+ def __ior__(self, other):
+ if not isinstance(other, InstallManifest):
+ raise ValueError("Can only | with another instance of InstallManifest.")
+
+ self.add_entries_from(other)
+
+ return self
+
+ def _encode_field_entry(self, data):
+ """Converts an object into a format that can be stored in the manifest file.
+
+ Complex data types, such as ``dict``, need to be converted into a text
+ representation before they can be written to a file.
+ """
+ return json.dumps(data, sort_keys=True)
+
+ def _decode_field_entry(self, data):
+ """Restores an object from a format that can be stored in the manifest file.
+
+ Complex data types, such as ``dict``, need to be converted into a text
+ representation before they can be written to a file.
+ """
+ return json.loads(data)
+
+ def write(self, path=None, fileobj=None, expand_pattern=False):
+ """Serialize this manifest to a file or file object.
+
+ If path is specified, that file will be written to. If fileobj is specified,
+ the serialized content will be written to that file object.
+
+ It is an error if both are specified.
+ """
+ with _auto_fileobj(path, fileobj, "wt") as fh:
+ fh.write("%d\n" % self.CURRENT_VERSION)
+
+ for dest in sorted(self._dests):
+ entry = self._dests[dest]
+
+ if expand_pattern and entry[0] in (
+ self.PATTERN_LINK,
+ self.PATTERN_COPY,
+ ):
+ type, base, pattern, dest = entry
+ type = self.LINK if type == self.PATTERN_LINK else self.COPY
+ finder = FileFinder(base)
+ paths = [f[0] for f in finder.find(pattern)]
+ for path in paths:
+ source = mozpath.join(base, path)
+ parts = ["%d" % type, mozpath.join(dest, path), source]
+ fh.write(
+ "%s\n"
+ % self.FIELD_SEPARATOR.join(
+ six.ensure_text(p) for p in parts
+ )
+ )
+ else:
+ parts = ["%d" % entry[0], dest]
+ parts.extend(entry[1:])
+ fh.write(
+ "%s\n"
+ % self.FIELD_SEPARATOR.join(six.ensure_text(p) for p in parts)
+ )
+
+ def add_link(self, source, dest):
+ """Add a link to this manifest.
+
+ dest will be either a symlink or hardlink to source.
+ """
+ self._add_entry(dest, (self.LINK, source))
+
+ def add_copy(self, source, dest):
+ """Add a copy to this manifest.
+
+ source will be copied to dest.
+ """
+ self._add_entry(dest, (self.COPY, source))
+
+ def add_required_exists(self, dest):
+ """Record that a destination file must exist.
+
+ This effectively prevents the listed file from being deleted.
+ """
+ self._add_entry(dest, (self.REQUIRED_EXISTS,))
+
+ def add_optional_exists(self, dest):
+ """Record that a destination file may exist.
+
+ This effectively prevents the listed file from being deleted. Unlike a
+ "required exists" file, files of this type do not raise errors if the
+ destination file does not exist.
+ """
+ self._add_entry(dest, (self.OPTIONAL_EXISTS,))
+
+ def add_pattern_link(self, base, pattern, dest):
+ """Add a pattern match that results in links being created.
+
+ A ``FileFinder`` will be created with its base set to ``base``
+ and ``FileFinder.find()`` will be called with ``pattern`` to discover
+ source files. Each source file will be either symlinked or hardlinked
+ under ``dest``.
+
+ Filenames under ``dest`` are constructed by taking the path fragment
+ after ``base`` and concatenating it with ``dest``. e.g.
+
+ <base>/foo/bar.h -> <dest>/foo/bar.h
+ """
+ self._add_entry(
+ mozpath.join(dest, pattern), (self.PATTERN_LINK, base, pattern, dest)
+ )
+
+ def add_pattern_copy(self, base, pattern, dest):
+ """Add a pattern match that results in copies.
+
+ See ``add_pattern_link()`` for usage.
+ """
+ self._add_entry(
+ mozpath.join(dest, pattern), (self.PATTERN_COPY, base, pattern, dest)
+ )
+
+ def add_preprocess(
+ self,
+ source,
+ dest,
+ deps,
+ marker="#",
+ defines={},
+ silence_missing_directive_warnings=False,
+ ):
+ """Add a preprocessed file to this manifest.
+
+ ``source`` will be passed through preprocessor.py, and the output will be
+ written to ``dest``.
+ """
+ self._add_entry(
+ dest,
+ (
+ self.PREPROCESS,
+ source,
+ deps,
+ marker,
+ self._encode_field_entry(defines),
+ "1" if silence_missing_directive_warnings else "0",
+ ),
+ )
+
+ def add_content(self, content, dest):
+ """Add a file with the given content."""
+ self._add_entry(
+ dest,
+ (
+ self.CONTENT,
+ self._encode_field_entry(content),
+ ),
+ )
+
+ def _add_entry(self, dest, entry):
+ if dest in self._dests:
+ raise ValueError("Item already in manifest: %s" % dest)
+
+ self._dests[dest] = entry
+
+ def add_entries_from(self, other, base=""):
+ """
+ Copy data from another mozpack.copier.InstallManifest
+ instance, adding an optional base prefix to the destination.
+
+ This allows to merge two manifests into a single manifest, or
+ two take the tagged union of two manifests.
+ """
+ # We must copy source files to ourselves so extra dependencies from
+ # the preprocessor are taken into account. Ideally, we would track
+ # which source file each entry came from. However, this is more
+ # complicated and not yet implemented. The current implementation
+ # will result in over invalidation, possibly leading to performance
+ # loss.
+ self._source_files |= other._source_files
+
+ for dest in sorted(other._dests):
+ new_dest = mozpath.join(base, dest) if base else dest
+ entry = other._dests[dest]
+ if entry[0] in (self.PATTERN_LINK, self.PATTERN_COPY):
+ entry_type, entry_base, entry_pattern, entry_dest = entry
+ new_entry_dest = mozpath.join(base, entry_dest) if base else entry_dest
+ new_entry = (entry_type, entry_base, entry_pattern, new_entry_dest)
+ else:
+ new_entry = tuple(entry)
+
+ self._add_entry(new_dest, new_entry)
+
+ def populate_registry(self, registry, defines_override={}, link_policy="symlink"):
+ """Populate a mozpack.copier.FileRegistry instance with data from us.
+
+ The caller supplied a FileRegistry instance (or at least something that
+ conforms to its interface) and that instance is populated with data
+ from this manifest.
+
+ Defines can be given to override the ones in the manifest for
+ preprocessing.
+
+ The caller can set a link policy. This determines whether symlinks,
+ hardlinks, or copies are used for LINK and PATTERN_LINK.
+ """
+ assert link_policy in ("symlink", "hardlink", "copy")
+ for dest in sorted(self._dests):
+ entry = self._dests[dest]
+ install_type = entry[0]
+
+ if install_type == self.LINK:
+ if link_policy == "symlink":
+ cls = AbsoluteSymlinkFile
+ elif link_policy == "hardlink":
+ cls = HardlinkFile
+ else:
+ cls = File
+ registry.add(dest, cls(entry[1]))
+ continue
+
+ if install_type == self.COPY:
+ registry.add(dest, File(entry[1]))
+ continue
+
+ if install_type == self.REQUIRED_EXISTS:
+ registry.add(dest, ExistingFile(required=True))
+ continue
+
+ if install_type == self.OPTIONAL_EXISTS:
+ registry.add(dest, ExistingFile(required=False))
+ continue
+
+ if install_type in (self.PATTERN_LINK, self.PATTERN_COPY):
+ _, base, pattern, dest = entry
+ finder = FileFinder(base)
+ paths = [f[0] for f in finder.find(pattern)]
+
+ if install_type == self.PATTERN_LINK:
+ if link_policy == "symlink":
+ cls = AbsoluteSymlinkFile
+ elif link_policy == "hardlink":
+ cls = HardlinkFile
+ else:
+ cls = File
+ else:
+ cls = File
+
+ for path in paths:
+ source = mozpath.join(base, path)
+ registry.add(mozpath.join(dest, path), cls(source))
+
+ continue
+
+ if install_type == self.PREPROCESS:
+ defines = self._decode_field_entry(entry[4])
+ if defines_override:
+ defines.update(defines_override)
+ registry.add(
+ dest,
+ PreprocessedFile(
+ entry[1],
+ depfile_path=entry[2],
+ marker=entry[3],
+ defines=defines,
+ extra_depends=self._source_files,
+ silence_missing_directive_warnings=bool(int(entry[5])),
+ ),
+ )
+
+ continue
+
+ if install_type == self.CONTENT:
+ # GeneratedFile expect the buffer interface, which the unicode
+ # type doesn't have, so encode to a str.
+ content = self._decode_field_entry(entry[1]).encode("utf-8")
+ registry.add(dest, GeneratedFile(content))
+ continue
+
+ raise Exception(
+ "Unknown install type defined in manifest: %d" % install_type
+ )
diff --git a/python/mozbuild/mozpack/mozjar.py b/python/mozbuild/mozpack/mozjar.py
new file mode 100644
index 0000000000..6500ebfcec
--- /dev/null
+++ b/python/mozbuild/mozpack/mozjar.py
@@ -0,0 +1,842 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import struct
+import zlib
+from collections import OrderedDict
+from io import BytesIO, UnsupportedOperation
+from zipfile import ZIP_DEFLATED, ZIP_STORED
+
+import six
+
+import mozpack.path as mozpath
+from mozbuild.util import ensure_bytes
+
+JAR_STORED = ZIP_STORED
+JAR_DEFLATED = ZIP_DEFLATED
+MAX_WBITS = 15
+
+
+class JarReaderError(Exception):
+ """Error type for Jar reader errors."""
+
+
+class JarWriterError(Exception):
+ """Error type for Jar writer errors."""
+
+
+class JarStruct(object):
+ """
+ Helper used to define ZIP archive raw data structures. Data structures
+ handled by this helper all start with a magic number, defined in
+ subclasses MAGIC field as a 32-bits unsigned integer, followed by data
+ structured as described in subclasses STRUCT field.
+
+ The STRUCT field contains a list of (name, type) pairs where name is a
+ field name, and the type can be one of 'uint32', 'uint16' or one of the
+ field names. In the latter case, the field is considered to be a string
+ buffer with a length given in that field.
+ For example,
+
+ .. code-block:: python
+
+ STRUCT = [
+ ('version', 'uint32'),
+ ('filename_size', 'uint16'),
+ ('filename', 'filename_size')
+ ]
+
+ describes a structure with a 'version' 32-bits unsigned integer field,
+ followed by a 'filename_size' 16-bits unsigned integer field, followed by a
+ filename_size-long string buffer 'filename'.
+
+ Fields that are used as other fields size are not stored in objects. In the
+ above example, an instance of such subclass would only have two attributes:
+ - obj['version']
+ - obj['filename']
+
+ filename_size would be obtained with len(obj['filename']).
+
+ JarStruct subclasses instances can be either initialized from existing data
+ (deserialized), or with empty fields.
+ """
+
+ TYPE_MAPPING = {"uint32": (b"I", 4), "uint16": (b"H", 2)}
+
+ def __init__(self, data=None):
+ """
+ Create an instance from the given data. Data may be omitted to create
+ an instance with empty fields.
+ """
+ assert self.MAGIC and isinstance(self.STRUCT, OrderedDict)
+ self.size_fields = set(
+ t for t in six.itervalues(self.STRUCT) if t not in JarStruct.TYPE_MAPPING
+ )
+ self._values = {}
+ if data:
+ self._init_data(data)
+ else:
+ self._init_empty()
+
+ def _init_data(self, data):
+ """
+ Initialize an instance from data, following the data structure
+ described in self.STRUCT. The self.MAGIC signature is expected at
+ data[:4].
+ """
+ assert data is not None
+ self.signature, size = JarStruct.get_data("uint32", data)
+ if self.signature != self.MAGIC:
+ raise JarReaderError("Bad magic")
+ offset = size
+ # For all fields used as other fields sizes, keep track of their value
+ # separately.
+ sizes = dict((t, 0) for t in self.size_fields)
+ for name, t in six.iteritems(self.STRUCT):
+ if t in JarStruct.TYPE_MAPPING:
+ value, size = JarStruct.get_data(t, data[offset:])
+ else:
+ size = sizes[t]
+ value = data[offset : offset + size]
+ if isinstance(value, memoryview):
+ value = value.tobytes()
+ if name not in sizes:
+ self._values[name] = value
+ else:
+ sizes[name] = value
+ offset += size
+
+ def _init_empty(self):
+ """
+ Initialize an instance with empty fields.
+ """
+ self.signature = self.MAGIC
+ for name, t in six.iteritems(self.STRUCT):
+ if name in self.size_fields:
+ continue
+ self._values[name] = 0 if t in JarStruct.TYPE_MAPPING else ""
+
+ @staticmethod
+ def get_data(type, data):
+ """
+ Deserialize a single field of given type (must be one of
+ JarStruct.TYPE_MAPPING) at the given offset in the given data.
+ """
+ assert type in JarStruct.TYPE_MAPPING
+ assert data is not None
+ format, size = JarStruct.TYPE_MAPPING[type]
+ data = data[:size]
+ if isinstance(data, memoryview):
+ data = data.tobytes()
+ return struct.unpack(b"<" + format, data)[0], size
+
+ def serialize(self):
+ """
+ Serialize the data structure according to the data structure definition
+ from self.STRUCT.
+ """
+ serialized = struct.pack(b"<I", self.signature)
+ sizes = dict(
+ (t, name)
+ for name, t in six.iteritems(self.STRUCT)
+ if t not in JarStruct.TYPE_MAPPING
+ )
+ for name, t in six.iteritems(self.STRUCT):
+ if t in JarStruct.TYPE_MAPPING:
+ format, size = JarStruct.TYPE_MAPPING[t]
+ if name in sizes:
+ value = len(self[sizes[name]])
+ else:
+ value = self[name]
+ serialized += struct.pack(b"<" + format, value)
+ else:
+ serialized += ensure_bytes(self[name])
+ return serialized
+
+ @property
+ def size(self):
+ """
+ Return the size of the data structure, given the current values of all
+ variable length fields.
+ """
+ size = JarStruct.TYPE_MAPPING["uint32"][1]
+ for name, type in six.iteritems(self.STRUCT):
+ if type in JarStruct.TYPE_MAPPING:
+ size += JarStruct.TYPE_MAPPING[type][1]
+ else:
+ size += len(self[name])
+ return size
+
+ def __getitem__(self, key):
+ return self._values[key]
+
+ def __setitem__(self, key, value):
+ if key not in self.STRUCT:
+ raise KeyError(key)
+ if key in self.size_fields:
+ raise AttributeError("can't set attribute")
+ self._values[key] = value
+
+ def __contains__(self, key):
+ return key in self._values
+
+ def __iter__(self):
+ return six.iteritems(self._values)
+
+ def __repr__(self):
+ return "<%s %s>" % (
+ self.__class__.__name__,
+ " ".join("%s=%s" % (n, v) for n, v in self),
+ )
+
+
+class JarCdirEnd(JarStruct):
+ """
+ End of central directory record.
+ """
+
+ MAGIC = 0x06054B50
+ STRUCT = OrderedDict(
+ [
+ ("disk_num", "uint16"),
+ ("cdir_disk", "uint16"),
+ ("disk_entries", "uint16"),
+ ("cdir_entries", "uint16"),
+ ("cdir_size", "uint32"),
+ ("cdir_offset", "uint32"),
+ ("comment_size", "uint16"),
+ ("comment", "comment_size"),
+ ]
+ )
+
+
+CDIR_END_SIZE = JarCdirEnd().size
+
+
+class JarCdirEntry(JarStruct):
+ """
+ Central directory file header
+ """
+
+ MAGIC = 0x02014B50
+ STRUCT = OrderedDict(
+ [
+ ("creator_version", "uint16"),
+ ("min_version", "uint16"),
+ ("general_flag", "uint16"),
+ ("compression", "uint16"),
+ ("lastmod_time", "uint16"),
+ ("lastmod_date", "uint16"),
+ ("crc32", "uint32"),
+ ("compressed_size", "uint32"),
+ ("uncompressed_size", "uint32"),
+ ("filename_size", "uint16"),
+ ("extrafield_size", "uint16"),
+ ("filecomment_size", "uint16"),
+ ("disknum", "uint16"),
+ ("internal_attr", "uint16"),
+ ("external_attr", "uint32"),
+ ("offset", "uint32"),
+ ("filename", "filename_size"),
+ ("extrafield", "extrafield_size"),
+ ("filecomment", "filecomment_size"),
+ ]
+ )
+
+
+class JarLocalFileHeader(JarStruct):
+ """
+ Local file header
+ """
+
+ MAGIC = 0x04034B50
+ STRUCT = OrderedDict(
+ [
+ ("min_version", "uint16"),
+ ("general_flag", "uint16"),
+ ("compression", "uint16"),
+ ("lastmod_time", "uint16"),
+ ("lastmod_date", "uint16"),
+ ("crc32", "uint32"),
+ ("compressed_size", "uint32"),
+ ("uncompressed_size", "uint32"),
+ ("filename_size", "uint16"),
+ ("extra_field_size", "uint16"),
+ ("filename", "filename_size"),
+ ("extra_field", "extra_field_size"),
+ ]
+ )
+
+
+class JarFileReader(object):
+ """
+ File-like class for use by JarReader to give access to individual files
+ within a Jar archive.
+ """
+
+ def __init__(self, header, data):
+ """
+ Initialize a JarFileReader. header is the local file header
+ corresponding to the file in the jar archive, data a buffer containing
+ the file data.
+ """
+ assert header["compression"] in [JAR_DEFLATED, JAR_STORED]
+ self._data = data
+ # Copy some local file header fields.
+ for name in ["compressed_size", "uncompressed_size", "crc32"]:
+ setattr(self, name, header[name])
+ self.filename = six.ensure_text(header["filename"])
+ self.compressed = header["compression"] != JAR_STORED
+ self.compress = header["compression"]
+
+ def readable(self):
+ return True
+
+ def read(self, length=-1):
+ """
+ Read some amount of uncompressed data.
+ """
+ return self.uncompressed_data.read(length)
+
+ def readinto(self, b):
+ """
+ Read bytes into a pre-allocated, writable bytes-like object `b` and return
+ the number of bytes read.
+ """
+ return self.uncompressed_data.readinto(b)
+
+ def readlines(self):
+ """
+ Return a list containing all the lines of data in the uncompressed
+ data.
+ """
+ return self.read().splitlines(True)
+
+ def __iter__(self):
+ """
+ Iterator, to support the "for line in fileobj" constructs.
+ """
+ return iter(self.readlines())
+
+ def seek(self, pos, whence=os.SEEK_SET):
+ """
+ Change the current position in the uncompressed data. Subsequent reads
+ will start from there.
+ """
+ return self.uncompressed_data.seek(pos, whence)
+
+ def close(self):
+ """
+ Free the uncompressed data buffer.
+ """
+ self.uncompressed_data.close()
+
+ @property
+ def closed(self):
+ return self.uncompressed_data.closed
+
+ @property
+ def compressed_data(self):
+ """
+ Return the raw compressed data.
+ """
+ return self._data[: self.compressed_size]
+
+ @property
+ def uncompressed_data(self):
+ """
+ Return the uncompressed data.
+ """
+ if hasattr(self, "_uncompressed_data"):
+ return self._uncompressed_data
+ data = self.compressed_data
+ if self.compress == JAR_STORED:
+ data = data.tobytes()
+ elif self.compress == JAR_DEFLATED:
+ data = zlib.decompress(data.tobytes(), -MAX_WBITS)
+ else:
+ assert False # Can't be another value per __init__
+ if len(data) != self.uncompressed_size:
+ raise JarReaderError("Corrupted file? %s" % self.filename)
+ self._uncompressed_data = BytesIO(data)
+ return self._uncompressed_data
+
+
+class JarReader(object):
+ """
+ Class with methods to read Jar files. Can open standard jar files as well
+ as Mozilla jar files (see further details in the JarWriter documentation).
+ """
+
+ def __init__(self, file=None, fileobj=None, data=None):
+ """
+ Opens the given file as a Jar archive. Use the given file-like object
+ if one is given instead of opening the given file name.
+ """
+ if fileobj:
+ data = fileobj.read()
+ elif file:
+ data = open(file, "rb").read()
+ self._data = memoryview(data)
+ # The End of Central Directory Record has a variable size because of
+ # comments it may contain, so scan for it from the end of the file.
+ offset = -CDIR_END_SIZE
+ while True:
+ signature = JarStruct.get_data("uint32", self._data[offset:])[0]
+ if signature == JarCdirEnd.MAGIC:
+ break
+ if offset == -len(self._data):
+ raise JarReaderError("Not a jar?")
+ offset -= 1
+ self._cdir_end = JarCdirEnd(self._data[offset:])
+
+ def close(self):
+ """
+ Free some resources associated with the Jar.
+ """
+ del self._data
+
+ @property
+ def compression(self):
+ entries = self.entries
+ if not entries:
+ return JAR_STORED
+ return max(f["compression"] for f in six.itervalues(entries))
+
+ @property
+ def entries(self):
+ """
+ Return an ordered dict of central directory entries, indexed by
+ filename, in the order they appear in the Jar archive central
+ directory. Directory entries are skipped.
+ """
+ if hasattr(self, "_entries"):
+ return self._entries
+ preload = 0
+ if self.is_optimized:
+ preload = JarStruct.get_data("uint32", self._data)[0]
+ entries = OrderedDict()
+ offset = self._cdir_end["cdir_offset"]
+ for e in six.moves.xrange(self._cdir_end["cdir_entries"]):
+ entry = JarCdirEntry(self._data[offset:])
+ offset += entry.size
+ # Creator host system. 0 is MSDOS, 3 is Unix
+ host = entry["creator_version"] >> 8
+ # External attributes values depend on host above. On Unix the
+ # higher bits are the stat.st_mode value. On MSDOS, the lower bits
+ # are the FAT attributes.
+ xattr = entry["external_attr"]
+ # Skip directories
+ if (host == 0 and xattr & 0x10) or (host == 3 and xattr & (0o040000 << 16)):
+ continue
+ entries[six.ensure_text(entry["filename"])] = entry
+ if entry["offset"] < preload:
+ self._last_preloaded = six.ensure_text(entry["filename"])
+ self._entries = entries
+ return entries
+
+ @property
+ def is_optimized(self):
+ """
+ Return whether the jar archive is optimized.
+ """
+ # In optimized jars, the central directory is at the beginning of the
+ # file, after a single 32-bits value, which is the length of data
+ # preloaded.
+ return self._cdir_end["cdir_offset"] == JarStruct.TYPE_MAPPING["uint32"][1]
+
+ @property
+ def last_preloaded(self):
+ """
+ Return the name of the last file that is set to be preloaded.
+ See JarWriter documentation for more details on preloading.
+ """
+ if hasattr(self, "_last_preloaded"):
+ return self._last_preloaded
+ self._last_preloaded = None
+ self.entries
+ return self._last_preloaded
+
+ def _getreader(self, entry):
+ """
+ Helper to create a JarFileReader corresponding to the given central
+ directory entry.
+ """
+ header = JarLocalFileHeader(self._data[entry["offset"] :])
+ for key, value in entry:
+ if key in header and header[key] != value:
+ raise JarReaderError(
+ "Central directory and file header "
+ + "mismatch. Corrupted archive?"
+ )
+ return JarFileReader(header, self._data[entry["offset"] + header.size :])
+
+ def __iter__(self):
+ """
+ Iterate over all files in the Jar archive, in the form of
+ JarFileReaders.
+ for file in jarReader:
+ ...
+ """
+ for entry in six.itervalues(self.entries):
+ yield self._getreader(entry)
+
+ def __getitem__(self, name):
+ """
+ Get a JarFileReader for the given file name.
+ """
+ return self._getreader(self.entries[name])
+
+ def __contains__(self, name):
+ """
+ Return whether the given file name appears in the Jar archive.
+ """
+ return name in self.entries
+
+
+class JarWriter(object):
+ """
+ Class with methods to write Jar files. Can write more-or-less standard jar
+ archives as well as jar archives optimized for Gecko. See the documentation
+ for the close() member function for a description of both layouts.
+ """
+
+ def __init__(self, file=None, fileobj=None, compress=True, compress_level=9):
+ """
+ Initialize a Jar archive in the given file. Use the given file-like
+ object if one is given instead of opening the given file name.
+ The compress option determines the default behavior for storing data
+ in the jar archive. The optimize options determines whether the jar
+ archive should be optimized for Gecko or not. ``compress_level``
+ defines the zlib compression level. It must be a value between 0 and 9
+ and defaults to 9, the highest and slowest level of compression.
+ """
+ if fileobj:
+ self._data = fileobj
+ else:
+ self._data = open(file, "wb")
+ if compress is True:
+ compress = JAR_DEFLATED
+ self._compress = compress
+ self._compress_level = compress_level
+ self._contents = OrderedDict()
+ self._last_preloaded = None
+
+ def __enter__(self):
+ """
+ Context manager __enter__ method for JarWriter.
+ """
+ return self
+
+ def __exit__(self, type, value, tb):
+ """
+ Context manager __exit__ method for JarWriter.
+ """
+ self.finish()
+
+ def finish(self):
+ """
+ Flush and close the Jar archive.
+
+ Standard jar archives are laid out like the following:
+ - Local file header 1
+ - File data 1
+ - Local file header 2
+ - File data 2
+ - (...)
+ - Central directory entry pointing at Local file header 1
+ - Central directory entry pointing at Local file header 2
+ - (...)
+ - End of central directory, pointing at first central directory
+ entry.
+
+ Jar archives optimized for Gecko are laid out like the following:
+ - 32-bits unsigned integer giving the amount of data to preload.
+ - Central directory entry pointing at Local file header 1
+ - Central directory entry pointing at Local file header 2
+ - (...)
+ - End of central directory, pointing at first central directory
+ entry.
+ - Local file header 1
+ - File data 1
+ - Local file header 2
+ - File data 2
+ - (...)
+ - End of central directory, pointing at first central directory
+ entry.
+
+ The duplication of the End of central directory is to accomodate some
+ Zip reading tools that want an end of central directory structure to
+ follow the central directory entries.
+ """
+ offset = 0
+ headers = {}
+ preload_size = 0
+ # Prepare central directory entries
+ for entry, content in six.itervalues(self._contents):
+ header = JarLocalFileHeader()
+ for name in entry.STRUCT:
+ if name in header:
+ header[name] = entry[name]
+ entry["offset"] = offset
+ offset += len(content) + header.size
+ if six.ensure_text(entry["filename"]) == self._last_preloaded:
+ preload_size = offset
+ headers[entry] = header
+ # Prepare end of central directory
+ end = JarCdirEnd()
+ end["disk_entries"] = len(self._contents)
+ end["cdir_entries"] = end["disk_entries"]
+ end["cdir_size"] = six.moves.reduce(
+ lambda x, y: x + y[0].size, self._contents.values(), 0
+ )
+ # On optimized archives, store the preloaded size and the central
+ # directory entries, followed by the first end of central directory.
+ if preload_size:
+ end["cdir_offset"] = 4
+ offset = end["cdir_size"] + end["cdir_offset"] + end.size
+ preload_size += offset
+ self._data.write(struct.pack("<I", preload_size))
+ for entry, _ in six.itervalues(self._contents):
+ entry["offset"] += offset
+ self._data.write(entry.serialize())
+ self._data.write(end.serialize())
+ # Store local file entries followed by compressed data
+ for entry, content in six.itervalues(self._contents):
+ self._data.write(headers[entry].serialize())
+ if isinstance(content, memoryview):
+ self._data.write(content.tobytes())
+ else:
+ self._data.write(content)
+ # On non optimized archives, store the central directory entries.
+ if not preload_size:
+ end["cdir_offset"] = offset
+ for entry, _ in six.itervalues(self._contents):
+ self._data.write(entry.serialize())
+ # Store the end of central directory.
+ self._data.write(end.serialize())
+ self._data.close()
+
+ def add(self, name, data, compress=None, mode=None, skip_duplicates=False):
+ """
+ Add a new member to the jar archive, with the given name and the given
+ data.
+ The compress option indicates how the given data should be compressed
+ (one of JAR_STORED or JAR_DEFLATE), or compressed according
+ to the default defined when creating the JarWriter (None). True and
+ False are allowed values for backwards compatibility, mapping,
+ respectively, to JAR_DEFLATE and JAR_STORED.
+ When the data should be compressed, it is only really compressed if
+ the compressed size is smaller than the uncompressed size.
+ The mode option gives the unix permissions that should be stored for the
+ jar entry, which defaults to 0o100644 (regular file, u+rw, g+r, o+r) if
+ not specified.
+ If a duplicated member is found skip_duplicates will prevent raising
+ an exception if set to True.
+ The given data may be a buffer, a file-like instance, a Deflater or a
+ JarFileReader instance. The latter two allow to avoid uncompressing
+ data to recompress it.
+ """
+ name = mozpath.normsep(six.ensure_text(name))
+
+ if name in self._contents and not skip_duplicates:
+ raise JarWriterError("File %s already in JarWriter" % name)
+ if compress is None:
+ compress = self._compress
+ if compress is True:
+ compress = JAR_DEFLATED
+ if compress is False:
+ compress = JAR_STORED
+ if isinstance(data, (JarFileReader, Deflater)) and data.compress == compress:
+ deflater = data
+ else:
+ deflater = Deflater(compress, compress_level=self._compress_level)
+ if isinstance(data, (six.binary_type, six.string_types)):
+ deflater.write(data)
+ elif hasattr(data, "read"):
+ try:
+ data.seek(0)
+ except (UnsupportedOperation, AttributeError):
+ pass
+ deflater.write(data.read())
+ else:
+ raise JarWriterError("Don't know how to handle %s" % type(data))
+ # Fill a central directory entry for this new member.
+ entry = JarCdirEntry()
+ entry["creator_version"] = 20
+ if mode is None:
+ # If no mode is given, default to u+rw, g+r, o+r.
+ mode = 0o000644
+ if not mode & 0o777000:
+ # If no file type is given, default to regular file.
+ mode |= 0o100000
+ # Set creator host system (upper byte of creator_version) to 3 (Unix) so
+ # mode is honored when there is one.
+ entry["creator_version"] |= 3 << 8
+ entry["external_attr"] = (mode & 0xFFFF) << 16
+ if deflater.compressed:
+ entry["min_version"] = 20 # Version 2.0 supports deflated streams
+ entry["general_flag"] = 2 # Max compression
+ entry["compression"] = deflater.compress
+ else:
+ entry["min_version"] = 10 # Version 1.0 for stored streams
+ entry["general_flag"] = 0
+ entry["compression"] = JAR_STORED
+ # January 1st, 2010. See bug 592369.
+ entry["lastmod_date"] = ((2010 - 1980) << 9) | (1 << 5) | 1
+ entry["lastmod_time"] = 0
+ entry["crc32"] = deflater.crc32
+ entry["compressed_size"] = deflater.compressed_size
+ entry["uncompressed_size"] = deflater.uncompressed_size
+ entry["filename"] = six.ensure_binary(name)
+ self._contents[name] = entry, deflater.compressed_data
+
+ def preload(self, files):
+ """
+ Set which members of the jar archive should be preloaded when opening
+ the archive in Gecko. This reorders the members according to the order
+ of given list.
+ """
+ new_contents = OrderedDict()
+ for f in files:
+ if f not in self._contents:
+ continue
+ new_contents[f] = self._contents[f]
+ self._last_preloaded = f
+ for f in self._contents:
+ if f not in new_contents:
+ new_contents[f] = self._contents[f]
+ self._contents = new_contents
+
+
+class Deflater(object):
+ """
+ File-like interface to zlib compression. The data is actually not
+ compressed unless the compressed form is smaller than the uncompressed
+ data.
+ """
+
+ def __init__(self, compress=True, compress_level=9):
+ """
+ Initialize a Deflater. The compress argument determines how to
+ compress.
+ """
+ self._data = BytesIO()
+ if compress is True:
+ compress = JAR_DEFLATED
+ elif compress is False:
+ compress = JAR_STORED
+ self.compress = compress
+ if compress == JAR_DEFLATED:
+ self._deflater = zlib.compressobj(compress_level, zlib.DEFLATED, -MAX_WBITS)
+ self._deflated = BytesIO()
+ else:
+ assert compress == JAR_STORED
+ self._deflater = None
+ self.crc32 = 0
+
+ def write(self, data):
+ """
+ Append a buffer to the Deflater.
+ """
+ if isinstance(data, memoryview):
+ data = data.tobytes()
+ data = six.ensure_binary(data)
+ self._data.write(data)
+
+ if self.compress:
+ if self._deflater:
+ self._deflated.write(self._deflater.compress(data))
+ else:
+ raise JarWriterError("Can't write after flush")
+
+ self.crc32 = zlib.crc32(data, self.crc32) & 0xFFFFFFFF
+
+ def close(self):
+ """
+ Close the Deflater.
+ """
+ self._data.close()
+ if self.compress:
+ self._deflated.close()
+
+ def _flush(self):
+ """
+ Flush the underlying zlib compression object.
+ """
+ if self.compress and self._deflater:
+ self._deflated.write(self._deflater.flush())
+ self._deflater = None
+
+ @property
+ def compressed(self):
+ """
+ Return whether the data should be compressed.
+ """
+ return self._compressed_size < self.uncompressed_size
+
+ @property
+ def _compressed_size(self):
+ """
+ Return the real compressed size of the data written to the Deflater. If
+ the Deflater is set not to compress, the uncompressed size is returned.
+ Otherwise, the actual compressed size is returned, whether or not it is
+ a win over the uncompressed size.
+ """
+ if self.compress:
+ self._flush()
+ return self._deflated.tell()
+ return self.uncompressed_size
+
+ @property
+ def compressed_size(self):
+ """
+ Return the compressed size of the data written to the Deflater. If the
+ Deflater is set not to compress, the uncompressed size is returned.
+ Otherwise, if the data should not be compressed (the real compressed
+ size is bigger than the uncompressed size), return the uncompressed
+ size.
+ """
+ if self.compressed:
+ return self._compressed_size
+ return self.uncompressed_size
+
+ @property
+ def uncompressed_size(self):
+ """
+ Return the size of the data written to the Deflater.
+ """
+ return self._data.tell()
+
+ @property
+ def compressed_data(self):
+ """
+ Return the compressed data, if the data should be compressed (real
+ compressed size smaller than the uncompressed size), or the
+ uncompressed data otherwise.
+ """
+ if self.compressed:
+ return self._deflated.getvalue()
+ return self._data.getvalue()
+
+
+class JarLog(dict):
+ """
+ Helper to read the file Gecko generates when setting MOZ_JAR_LOG_FILE.
+ The jar log is then available as a dict with the jar path as key, and
+ the corresponding access log as a list value. Only the first access to
+ a given member of a jar is stored.
+ """
+
+ def __init__(self, file=None, fileobj=None):
+ if not fileobj:
+ fileobj = open(file, "r")
+ for line in fileobj:
+ jar, path = line.strip().split(None, 1)
+ if not jar or not path:
+ continue
+ entry = self.setdefault(jar, [])
+ if path not in entry:
+ entry.append(path)
diff --git a/python/mozbuild/mozpack/packager/__init__.py b/python/mozbuild/mozpack/packager/__init__.py
new file mode 100644
index 0000000000..83b12e4696
--- /dev/null
+++ b/python/mozbuild/mozpack/packager/__init__.py
@@ -0,0 +1,445 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import codecs
+import json
+import os
+import re
+from collections import deque
+
+import six
+
+import mozpack.path as mozpath
+from mozbuild.preprocessor import Preprocessor
+from mozpack.chrome.manifest import (
+ Manifest,
+ ManifestBinaryComponent,
+ ManifestChrome,
+ ManifestInterfaces,
+ is_manifest,
+ parse_manifest,
+)
+from mozpack.errors import errors
+
+
+class Component(object):
+ """
+ Class that represents a component in a package manifest.
+ """
+
+ def __init__(self, name, destdir=""):
+ if name.find(" ") > 0:
+ errors.fatal('Malformed manifest: space in component name "%s"' % name)
+ self._name = name
+ self._destdir = destdir
+
+ def __repr__(self):
+ s = self.name
+ if self.destdir:
+ s += ' destdir="%s"' % self.destdir
+ return s
+
+ @property
+ def name(self):
+ return self._name
+
+ @property
+ def destdir(self):
+ return self._destdir
+
+ @staticmethod
+ def _triples(lst):
+ """
+ Split [1, 2, 3, 4, 5, 6, 7] into [(1, 2, 3), (4, 5, 6)].
+ """
+ return zip(*[iter(lst)] * 3)
+
+ KEY_VALUE_RE = re.compile(
+ r"""
+ \s* # optional whitespace.
+ ([a-zA-Z0-9_]+) # key.
+ \s*=\s* # optional space around =.
+ "([^"]*)" # value without surrounding quotes.
+ (?:\s+|$)
+ """,
+ re.VERBOSE,
+ )
+
+ @staticmethod
+ def _split_options(string):
+ """
+ Split 'key1="value1" key2="value2"' into
+ {'key1':'value1', 'key2':'value2'}.
+
+ Returned keys and values are all strings.
+
+ Throws ValueError if the input is malformed.
+ """
+ options = {}
+ splits = Component.KEY_VALUE_RE.split(string)
+ if len(splits) % 3 != 1:
+ # This should never happen -- we expect to always split
+ # into ['', ('key', 'val', '')*].
+ raise ValueError("Bad input")
+ if splits[0]:
+ raise ValueError("Unrecognized input " + splits[0])
+ for key, val, no_match in Component._triples(splits[1:]):
+ if no_match:
+ raise ValueError("Unrecognized input " + no_match)
+ options[key] = val
+ return options
+
+ @staticmethod
+ def _split_component_and_options(string):
+ """
+ Split 'name key1="value1" key2="value2"' into
+ ('name', {'key1':'value1', 'key2':'value2'}).
+
+ Returned name, keys and values are all strings.
+
+ Raises ValueError if the input is malformed.
+ """
+ splits = string.strip().split(None, 1)
+ if not splits:
+ raise ValueError("No component found")
+ component = splits[0].strip()
+ if not component:
+ raise ValueError("No component found")
+ if not re.match("[a-zA-Z0-9_\-]+$", component):
+ raise ValueError("Bad component name " + component)
+ options = Component._split_options(splits[1]) if len(splits) > 1 else {}
+ return component, options
+
+ @staticmethod
+ def from_string(string):
+ """
+ Create a component from a string.
+ """
+ try:
+ name, options = Component._split_component_and_options(string)
+ except ValueError as e:
+ errors.fatal("Malformed manifest: %s" % e)
+ return
+ destdir = options.pop("destdir", "")
+ if options:
+ errors.fatal(
+ "Malformed manifest: options %s not recognized" % options.keys()
+ )
+ return Component(name, destdir=destdir)
+
+
+class PackageManifestParser(object):
+ """
+ Class for parsing of a package manifest, after preprocessing.
+
+ A package manifest is a list of file paths, with some syntaxic sugar:
+ [] designates a toplevel component. Example: [xpcom]
+ - in front of a file specifies it to be removed
+ * wildcard support
+ ** expands to all files and zero or more directories
+ ; file comment
+
+ The parser takes input from the preprocessor line by line, and pushes
+ parsed information to a sink object.
+
+ The add and remove methods of the sink object are called with the
+ current Component instance and a path.
+ """
+
+ def __init__(self, sink):
+ """
+ Initialize the package manifest parser with the given sink.
+ """
+ self._component = Component("")
+ self._sink = sink
+
+ def handle_line(self, str):
+ """
+ Handle a line of input and push the parsed information to the sink
+ object.
+ """
+ # Remove comments.
+ str = str.strip()
+ if not str or str.startswith(";"):
+ return
+ if str.startswith("[") and str.endswith("]"):
+ self._component = Component.from_string(str[1:-1])
+ elif str.startswith("-"):
+ str = str[1:]
+ self._sink.remove(self._component, str)
+ elif "," in str:
+ errors.fatal("Incompatible syntax")
+ else:
+ self._sink.add(self._component, str)
+
+
+class PreprocessorOutputWrapper(object):
+ """
+ File-like helper to handle the preprocessor output and send it to a parser.
+ The parser's handle_line method is called in the relevant errors.context.
+ """
+
+ def __init__(self, preprocessor, parser):
+ self._parser = parser
+ self._pp = preprocessor
+
+ def write(self, str):
+ with errors.context(self._pp.context["FILE"], self._pp.context["LINE"]):
+ self._parser.handle_line(str)
+
+
+def preprocess(input, parser, defines={}):
+ """
+ Preprocess the file-like input with the given defines, and send the
+ preprocessed output line by line to the given parser.
+ """
+ pp = Preprocessor()
+ pp.context.update(defines)
+ pp.do_filter("substitution")
+ pp.out = PreprocessorOutputWrapper(pp, parser)
+ pp.do_include(input)
+
+
+def preprocess_manifest(sink, manifest, defines={}):
+ """
+ Preprocess the given file-like manifest with the given defines, and push
+ the parsed information to a sink. See PackageManifestParser documentation
+ for more details on the sink.
+ """
+ preprocess(manifest, PackageManifestParser(sink), defines)
+
+
+class CallDeque(deque):
+ """
+ Queue of function calls to make.
+ """
+
+ def append(self, function, *args):
+ deque.append(self, (errors.get_context(), function, args))
+
+ def execute(self):
+ while True:
+ try:
+ context, function, args = self.popleft()
+ except IndexError:
+ return
+ if context:
+ with errors.context(context[0], context[1]):
+ function(*args)
+ else:
+ function(*args)
+
+
+class SimplePackager(object):
+ """
+ Helper used to translate and buffer instructions from the
+ SimpleManifestSink to a formatter. Formatters expect some information to be
+ given first that the simple manifest contents can't guarantee before the
+ end of the input.
+ """
+
+ def __init__(self, formatter):
+ self.formatter = formatter
+ # Queue for formatter.add_interfaces()/add_manifest() calls.
+ self._queue = CallDeque()
+ # Queue for formatter.add_manifest() calls for ManifestChrome.
+ self._chrome_queue = CallDeque()
+ # Queue for formatter.add() calls.
+ self._file_queue = CallDeque()
+ # All paths containing addons. (key is path, value is whether it
+ # should be packed or unpacked)
+ self._addons = {}
+ # All manifest paths imported.
+ self._manifests = set()
+ # All manifest paths included from some other manifest.
+ self._included_manifests = {}
+ self._closed = False
+
+ # Parsing RDF is complex, and would require an external library to do
+ # properly. Just go with some hackish but probably sufficient regexp
+ UNPACK_ADDON_RE = re.compile(
+ r"""(?:
+ <em:unpack>true</em:unpack>
+ |em:unpack=(?P<quote>["']?)true(?P=quote)
+ )""",
+ re.VERBOSE,
+ )
+
+ def add(self, path, file):
+ """
+ Add the given BaseFile instance with the given path.
+ """
+ assert not self._closed
+ if is_manifest(path):
+ self._add_manifest_file(path, file)
+ elif path.endswith(".xpt"):
+ self._queue.append(self.formatter.add_interfaces, path, file)
+ else:
+ self._file_queue.append(self.formatter.add, path, file)
+ if mozpath.basename(path) == "install.rdf":
+ addon = True
+ install_rdf = six.ensure_text(file.open().read())
+ if self.UNPACK_ADDON_RE.search(install_rdf):
+ addon = "unpacked"
+ self._add_addon(mozpath.dirname(path), addon)
+ elif mozpath.basename(path) == "manifest.json":
+ manifest = six.ensure_text(file.open().read())
+ try:
+ parsed = json.loads(manifest)
+ except ValueError:
+ pass
+ if isinstance(parsed, dict) and "manifest_version" in parsed:
+ self._add_addon(mozpath.dirname(path), True)
+
+ def _add_addon(self, path, addon_type):
+ """
+ Add the given BaseFile to the collection of addons if a parent
+ directory is not already in the collection.
+ """
+ if mozpath.basedir(path, self._addons) is not None:
+ return
+
+ for dir in self._addons:
+ if mozpath.basedir(dir, [path]) is not None:
+ del self._addons[dir]
+ break
+
+ self._addons[path] = addon_type
+
+ def _add_manifest_file(self, path, file):
+ """
+ Add the given BaseFile with manifest file contents with the given path.
+ """
+ self._manifests.add(path)
+ base = ""
+ if hasattr(file, "path"):
+ # Find the directory the given path is relative to.
+ b = mozpath.normsep(file.path)
+ if b.endswith("/" + path) or b == path:
+ base = os.path.normpath(b[: -len(path)])
+ for e in parse_manifest(base, path, codecs.getreader("utf-8")(file.open())):
+ # ManifestResources need to be given after ManifestChrome, so just
+ # put all ManifestChrome in a separate queue to make them first.
+ if isinstance(e, ManifestChrome):
+ # e.move(e.base) just returns a clone of the entry.
+ self._chrome_queue.append(self.formatter.add_manifest, e.move(e.base))
+ elif not isinstance(e, (Manifest, ManifestInterfaces)):
+ self._queue.append(self.formatter.add_manifest, e.move(e.base))
+ # If a binary component is added to an addon, prevent the addon
+ # from being packed.
+ if isinstance(e, ManifestBinaryComponent):
+ addon = mozpath.basedir(e.base, self._addons)
+ if addon:
+ self._addons[addon] = "unpacked"
+ if isinstance(e, Manifest):
+ if e.flags:
+ errors.fatal("Flags are not supported on " + '"manifest" entries')
+ self._included_manifests[e.path] = path
+
+ def get_bases(self, addons=True):
+ """
+ Return all paths under which root manifests have been found. Root
+ manifests are manifests that are included in no other manifest.
+ `addons` indicates whether to include addon bases as well.
+ """
+ all_bases = set(
+ mozpath.dirname(m) for m in self._manifests - set(self._included_manifests)
+ )
+ if not addons:
+ all_bases -= set(self._addons)
+ else:
+ # If for some reason some detected addon doesn't have a
+ # non-included manifest.
+ all_bases |= set(self._addons)
+ return all_bases
+
+ def close(self):
+ """
+ Push all instructions to the formatter.
+ """
+ self._closed = True
+
+ bases = self.get_bases()
+ broken_bases = sorted(
+ m
+ for m, includer in six.iteritems(self._included_manifests)
+ if mozpath.basedir(m, bases) != mozpath.basedir(includer, bases)
+ )
+ for m in broken_bases:
+ errors.fatal(
+ '"%s" is included from "%s", which is outside "%s"'
+ % (m, self._included_manifests[m], mozpath.basedir(m, bases))
+ )
+ for base in sorted(bases):
+ self.formatter.add_base(base, self._addons.get(base, False))
+ self._chrome_queue.execute()
+ self._queue.execute()
+ self._file_queue.execute()
+
+
+class SimpleManifestSink(object):
+ """
+ Parser sink for "simple" package manifests. Simple package manifests use
+ the format described in the PackageManifestParser documentation, but don't
+ support file removals, and require manifests, interfaces and chrome data to
+ be explicitely listed.
+ Entries starting with bin/ are searched under bin/ in the FileFinder, but
+ are packaged without the bin/ prefix.
+ """
+
+ def __init__(self, finder, formatter):
+ """
+ Initialize the SimpleManifestSink. The given FileFinder is used to
+ get files matching the patterns given in the manifest. The given
+ formatter does the packaging job.
+ """
+ self._finder = finder
+ self.packager = SimplePackager(formatter)
+ self._closed = False
+ self._manifests = set()
+
+ @staticmethod
+ def normalize_path(path):
+ """
+ Remove any bin/ prefix.
+ """
+ if mozpath.basedir(path, ["bin"]) == "bin":
+ return mozpath.relpath(path, "bin")
+ return path
+
+ def add(self, component, pattern):
+ """
+ Add files with the given pattern in the given component.
+ """
+ assert not self._closed
+ added = False
+ for p, f in self._finder.find(pattern):
+ added = True
+ if is_manifest(p):
+ self._manifests.add(p)
+ dest = mozpath.join(component.destdir, SimpleManifestSink.normalize_path(p))
+ self.packager.add(dest, f)
+ if not added:
+ errors.error("Missing file(s): %s" % pattern)
+
+ def remove(self, component, pattern):
+ """
+ Remove files with the given pattern in the given component.
+ """
+ assert not self._closed
+ errors.fatal("Removal is unsupported")
+
+ def close(self, auto_root_manifest=True):
+ """
+ Add possibly missing bits and push all instructions to the formatter.
+ """
+ if auto_root_manifest:
+ # Simple package manifests don't contain the root manifests, so
+ # find and add them.
+ paths = [mozpath.dirname(m) for m in self._manifests]
+ path = mozpath.dirname(mozpath.commonprefix(paths))
+ for p, f in self._finder.find(mozpath.join(path, "chrome.manifest")):
+ if p not in self._manifests:
+ self.packager.add(SimpleManifestSink.normalize_path(p), f)
+ self.packager.close()
diff --git a/python/mozbuild/mozpack/packager/formats.py b/python/mozbuild/mozpack/packager/formats.py
new file mode 100644
index 0000000000..95a6dee2f6
--- /dev/null
+++ b/python/mozbuild/mozpack/packager/formats.py
@@ -0,0 +1,354 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from six.moves.urllib.parse import urlparse
+
+import mozpack.path as mozpath
+from mozpack.chrome.manifest import (
+ Manifest,
+ ManifestBinaryComponent,
+ ManifestChrome,
+ ManifestInterfaces,
+ ManifestMultiContent,
+ ManifestResource,
+)
+from mozpack.copier import FileRegistry, FileRegistrySubtree, Jarrer
+from mozpack.errors import errors
+from mozpack.files import ManifestFile
+
+"""
+Formatters are classes receiving packaging instructions and creating the
+appropriate package layout.
+
+There are three distinct formatters, each handling one of the different chrome
+formats:
+ - flat: essentially, copies files from the source with the same file system
+ layout. Manifests entries are grouped in a single manifest per directory,
+ as well as XPT interfaces.
+ - jar: chrome content is packaged in jar files.
+ - omni: chrome content, modules, non-binary components, and many other
+ elements are packaged in an omnijar file for each base directory.
+
+The base interface provides the following methods:
+ - add_base(path [, addon])
+ Register a base directory for an application or GRE, or an addon.
+ Base directories usually contain a root manifest (manifests not
+ included in any other manifest) named chrome.manifest.
+ The optional addon argument tells whether the base directory
+ is that of a packed addon (True), unpacked addon ('unpacked') or
+ otherwise (False).
+ The method may only be called in sorted order of `path` (alphanumeric
+ order, parents before children).
+ - add(path, content)
+ Add the given content (BaseFile instance) at the given virtual path
+ - add_interfaces(path, content)
+ Add the given content (BaseFile instance) as an interface. Equivalent
+ to add(path, content) with the right add_manifest().
+ - add_manifest(entry)
+ Add a ManifestEntry.
+ - contains(path)
+ Returns whether the given virtual path is known of the formatter.
+
+The virtual paths mentioned above are paths as they would be with a flat
+chrome.
+
+Formatters all take a FileCopier instance they will fill with the packaged
+data.
+"""
+
+
+class PiecemealFormatter(object):
+ """
+ Generic formatter that dispatches across different sub-formatters
+ according to paths.
+ """
+
+ def __init__(self, copier):
+ assert isinstance(copier, (FileRegistry, FileRegistrySubtree))
+ self.copier = copier
+ self._sub_formatter = {}
+ self._frozen_bases = False
+
+ def add_base(self, base, addon=False):
+ # Only allow to add a base directory before calls to _get_base()
+ assert not self._frozen_bases
+ assert base not in self._sub_formatter
+ assert all(base > b for b in self._sub_formatter)
+ self._add_base(base, addon)
+
+ def _get_base(self, path):
+ """
+ Return the deepest base directory containing the given path.
+ """
+ self._frozen_bases = True
+ base = mozpath.basedir(path, self._sub_formatter.keys())
+ relpath = mozpath.relpath(path, base) if base else path
+ return base, relpath
+
+ def add(self, path, content):
+ base, relpath = self._get_base(path)
+ if base is None:
+ return self.copier.add(relpath, content)
+ return self._sub_formatter[base].add(relpath, content)
+
+ def add_manifest(self, entry):
+ base, relpath = self._get_base(entry.base)
+ assert base is not None
+ return self._sub_formatter[base].add_manifest(entry.move(relpath))
+
+ def add_interfaces(self, path, content):
+ base, relpath = self._get_base(path)
+ assert base is not None
+ return self._sub_formatter[base].add_interfaces(relpath, content)
+
+ def contains(self, path):
+ assert "*" not in path
+ base, relpath = self._get_base(path)
+ if base is None:
+ return self.copier.contains(relpath)
+ return self._sub_formatter[base].contains(relpath)
+
+
+class FlatFormatter(PiecemealFormatter):
+ """
+ Formatter for the flat package format.
+ """
+
+ def _add_base(self, base, addon=False):
+ self._sub_formatter[base] = FlatSubFormatter(
+ FileRegistrySubtree(base, self.copier)
+ )
+
+
+class FlatSubFormatter(object):
+ """
+ Sub-formatter for the flat package format.
+ """
+
+ def __init__(self, copier):
+ assert isinstance(copier, (FileRegistry, FileRegistrySubtree))
+ self.copier = copier
+ self._chrome_db = {}
+
+ def add(self, path, content):
+ self.copier.add(path, content)
+
+ def add_manifest(self, entry):
+ # Store manifest entries in a single manifest per directory, named
+ # after their parent directory, except for root manifests, all named
+ # chrome.manifest.
+ if entry.base:
+ name = mozpath.basename(entry.base)
+ else:
+ name = "chrome"
+ path = mozpath.normpath(mozpath.join(entry.base, "%s.manifest" % name))
+ if not self.copier.contains(path):
+ # Add a reference to the manifest file in the parent manifest, if
+ # the manifest file is not a root manifest.
+ if entry.base:
+ parent = mozpath.dirname(entry.base)
+ relbase = mozpath.basename(entry.base)
+ relpath = mozpath.join(relbase, mozpath.basename(path))
+ self.add_manifest(Manifest(parent, relpath))
+ self.copier.add(path, ManifestFile(entry.base))
+
+ if isinstance(entry, ManifestChrome):
+ data = self._chrome_db.setdefault(entry.name, {})
+ if isinstance(entry, ManifestMultiContent):
+ entries = data.setdefault(entry.type, {}).setdefault(entry.id, [])
+ else:
+ entries = data.setdefault(entry.type, [])
+ for e in entries:
+ # Ideally, we'd actually check whether entry.flags are more
+ # specific than e.flags, but in practice the following test
+ # is enough for now.
+ if entry == e:
+ errors.warn('"%s" is duplicated. Skipping.' % entry)
+ return
+ if not entry.flags or e.flags and entry.flags == e.flags:
+ errors.fatal('"%s" overrides "%s"' % (entry, e))
+ entries.append(entry)
+
+ self.copier[path].add(entry)
+
+ def add_interfaces(self, path, content):
+ self.copier.add(path, content)
+ self.add_manifest(
+ ManifestInterfaces(mozpath.dirname(path), mozpath.basename(path))
+ )
+
+ def contains(self, path):
+ assert "*" not in path
+ return self.copier.contains(path)
+
+
+class JarFormatter(PiecemealFormatter):
+ """
+ Formatter for the jar package format. Assumes manifest entries related to
+ chrome are registered before the chrome data files are added. Also assumes
+ manifest entries for resources are registered after chrome manifest
+ entries.
+ """
+
+ def __init__(self, copier, compress=True):
+ PiecemealFormatter.__init__(self, copier)
+ self._compress = compress
+
+ def _add_base(self, base, addon=False):
+ if addon is True:
+ jarrer = Jarrer(self._compress)
+ self.copier.add(base + ".xpi", jarrer)
+ self._sub_formatter[base] = FlatSubFormatter(jarrer)
+ else:
+ self._sub_formatter[base] = JarSubFormatter(
+ FileRegistrySubtree(base, self.copier), self._compress
+ )
+
+
+class JarSubFormatter(PiecemealFormatter):
+ """
+ Sub-formatter for the jar package format. It is a PiecemealFormatter that
+ dispatches between further sub-formatter for each of the jar files it
+ dispatches the chrome data to, and a FlatSubFormatter for the non-chrome
+ files.
+ """
+
+ def __init__(self, copier, compress=True):
+ PiecemealFormatter.__init__(self, copier)
+ self._frozen_chrome = False
+ self._compress = compress
+ self._sub_formatter[""] = FlatSubFormatter(copier)
+
+ def _jarize(self, entry, relpath):
+ """
+ Transform a manifest entry in one pointing to chrome data in a jar.
+ Return the corresponding chrome path and the new entry.
+ """
+ base = entry.base
+ basepath = mozpath.split(relpath)[0]
+ chromepath = mozpath.join(base, basepath)
+ entry = (
+ entry.rebase(chromepath)
+ .move(mozpath.join(base, "jar:%s.jar!" % basepath))
+ .rebase(base)
+ )
+ return chromepath, entry
+
+ def add_manifest(self, entry):
+ if isinstance(entry, ManifestChrome) and not urlparse(entry.relpath).scheme:
+ chromepath, entry = self._jarize(entry, entry.relpath)
+ assert not self._frozen_chrome
+ if chromepath not in self._sub_formatter:
+ jarrer = Jarrer(self._compress)
+ self.copier.add(chromepath + ".jar", jarrer)
+ self._sub_formatter[chromepath] = FlatSubFormatter(jarrer)
+ elif isinstance(entry, ManifestResource) and not urlparse(entry.target).scheme:
+ chromepath, new_entry = self._jarize(entry, entry.target)
+ if chromepath in self._sub_formatter:
+ entry = new_entry
+ PiecemealFormatter.add_manifest(self, entry)
+
+
+class OmniJarFormatter(JarFormatter):
+ """
+ Formatter for the omnijar package format.
+ """
+
+ def __init__(self, copier, omnijar_name, compress=True, non_resources=()):
+ JarFormatter.__init__(self, copier, compress)
+ self._omnijar_name = omnijar_name
+ self._non_resources = non_resources
+
+ def _add_base(self, base, addon=False):
+ if addon:
+ # Because add_base is always called with parents before children,
+ # all the possible ancestry of `base` is already present in
+ # `_sub_formatter`.
+ parent_base = mozpath.basedir(base, self._sub_formatter.keys())
+ rel_base = mozpath.relpath(base, parent_base)
+ # If the addon is under a resource directory, package it in the
+ # omnijar.
+ parent_sub_formatter = self._sub_formatter[parent_base]
+ if parent_sub_formatter.is_resource(rel_base):
+ omnijar_sub_formatter = parent_sub_formatter._sub_formatter[
+ self._omnijar_name
+ ]
+ self._sub_formatter[base] = FlatSubFormatter(
+ FileRegistrySubtree(rel_base, omnijar_sub_formatter.copier)
+ )
+ return
+ JarFormatter._add_base(self, base, addon)
+ else:
+ self._sub_formatter[base] = OmniJarSubFormatter(
+ FileRegistrySubtree(base, self.copier),
+ self._omnijar_name,
+ self._compress,
+ self._non_resources,
+ )
+
+
+class OmniJarSubFormatter(PiecemealFormatter):
+ """
+ Sub-formatter for the omnijar package format. It is a PiecemealFormatter
+ that dispatches between a FlatSubFormatter for the resources data and
+ another FlatSubFormatter for the other files.
+ """
+
+ def __init__(self, copier, omnijar_name, compress=True, non_resources=()):
+ PiecemealFormatter.__init__(self, copier)
+ self._omnijar_name = omnijar_name
+ self._compress = compress
+ self._non_resources = non_resources
+ self._sub_formatter[""] = FlatSubFormatter(copier)
+ jarrer = Jarrer(self._compress)
+ self._sub_formatter[omnijar_name] = FlatSubFormatter(jarrer)
+
+ def _get_base(self, path):
+ base = self._omnijar_name if self.is_resource(path) else ""
+ # Only add the omnijar file if something ends up in it.
+ if base and not self.copier.contains(base):
+ self.copier.add(base, self._sub_formatter[base].copier)
+ return base, path
+
+ def add_manifest(self, entry):
+ base = ""
+ if not isinstance(entry, ManifestBinaryComponent):
+ base = self._omnijar_name
+ formatter = self._sub_formatter[base]
+ return formatter.add_manifest(entry)
+
+ def is_resource(self, path):
+ """
+ Return whether the given path corresponds to a resource to be put in an
+ omnijar archive.
+ """
+ if any(mozpath.match(path, p.replace("*", "**")) for p in self._non_resources):
+ return False
+ path = mozpath.split(path)
+ if path[0] == "chrome":
+ return len(path) == 1 or path[1] != "icons"
+ if path[0] == "components":
+ return path[-1].endswith((".js", ".xpt"))
+ if path[0] == "res":
+ return len(path) == 1 or (
+ path[1] != "cursors"
+ and path[1] != "touchbar"
+ and path[1] != "MainMenu.nib"
+ )
+ if path[0] == "defaults":
+ return len(path) != 3 or not (
+ path[2] == "channel-prefs.js" and path[1] in ["pref", "preferences"]
+ )
+ if len(path) <= 2 and path[-1] == "greprefs.js":
+ # Accommodate `greprefs.js` and `$ANDROID_CPU_ARCH/greprefs.js`.
+ return True
+ return path[0] in [
+ "modules",
+ "actors",
+ "dictionaries",
+ "hyphenation",
+ "localization",
+ "update.locale",
+ "contentaccessible",
+ ]
diff --git a/python/mozbuild/mozpack/packager/l10n.py b/python/mozbuild/mozpack/packager/l10n.py
new file mode 100644
index 0000000000..76871e15cd
--- /dev/null
+++ b/python/mozbuild/mozpack/packager/l10n.py
@@ -0,0 +1,304 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+Replace localized parts of a packaged directory with data from a langpack
+directory.
+"""
+
+import json
+import os
+
+import six
+from createprecomplete import generate_precomplete
+
+import mozpack.path as mozpath
+from mozpack.chrome.manifest import (
+ Manifest,
+ ManifestChrome,
+ ManifestEntryWithRelPath,
+ ManifestLocale,
+ is_manifest,
+)
+from mozpack.copier import FileCopier, Jarrer
+from mozpack.errors import errors
+from mozpack.files import ComposedFinder, GeneratedFile, ManifestFile
+from mozpack.mozjar import JAR_DEFLATED
+from mozpack.packager import Component, SimpleManifestSink, SimplePackager
+from mozpack.packager.formats import FlatFormatter, JarFormatter, OmniJarFormatter
+from mozpack.packager.unpack import UnpackFinder
+
+
+class LocaleManifestFinder(object):
+ def __init__(self, finder):
+ entries = self.entries = []
+ bases = self.bases = []
+
+ class MockFormatter(object):
+ def add_interfaces(self, path, content):
+ pass
+
+ def add(self, path, content):
+ pass
+
+ def add_manifest(self, entry):
+ if entry.localized:
+ entries.append(entry)
+
+ def add_base(self, base, addon=False):
+ bases.append(base)
+
+ # SimplePackager rejects "manifest foo.manifest" entries with
+ # additional flags (such as "manifest foo.manifest application=bar").
+ # Those type of entries are used by language packs to work as addons,
+ # but are not necessary for the purpose of l10n repacking. So we wrap
+ # the finder in order to remove those entries.
+ class WrapFinder(object):
+ def __init__(self, finder):
+ self._finder = finder
+
+ def find(self, pattern):
+ for p, f in self._finder.find(pattern):
+ if isinstance(f, ManifestFile):
+ unwanted = [
+ e for e in f._entries if isinstance(e, Manifest) and e.flags
+ ]
+ if unwanted:
+ f = ManifestFile(
+ f._base, [e for e in f._entries if e not in unwanted]
+ )
+ yield p, f
+
+ sink = SimpleManifestSink(WrapFinder(finder), MockFormatter())
+ sink.add(Component(""), "*")
+ sink.close(False)
+
+ # Find unique locales used in these manifest entries.
+ self.locales = list(
+ set(e.id for e in self.entries if isinstance(e, ManifestLocale))
+ )
+
+
+class L10NRepackFormatterMixin(object):
+ def __init__(self, *args, **kwargs):
+ super(L10NRepackFormatterMixin, self).__init__(*args, **kwargs)
+ self._dictionaries = {}
+
+ def add(self, path, file):
+ base, relpath = self._get_base(path)
+ if path.endswith(".dic"):
+ if relpath.startswith("dictionaries/"):
+ root, ext = mozpath.splitext(mozpath.basename(path))
+ self._dictionaries[root] = path
+ elif path.endswith("/built_in_addons.json"):
+ data = json.loads(six.ensure_text(file.open().read()))
+ data["dictionaries"] = self._dictionaries
+ # The GeneratedFile content is only really generated after
+ # all calls to formatter.add.
+ file = GeneratedFile(lambda: json.dumps(data))
+ elif relpath.startswith("META-INF/"):
+ # Ignore signatures inside omnijars. We drop these items: if we
+ # don't treat them as omnijar resources, they will be included in
+ # the top-level package, and that's not how omnijars are signed (Bug
+ # 1750676). If we treat them as omnijar resources, they will stay
+ # in the omnijar, as expected -- but the signatures won't be valid
+ # after repacking. Therefore, drop them.
+ return
+ super(L10NRepackFormatterMixin, self).add(path, file)
+
+
+def L10NRepackFormatter(klass):
+ class L10NRepackFormatter(L10NRepackFormatterMixin, klass):
+ pass
+
+ return L10NRepackFormatter
+
+
+FlatFormatter = L10NRepackFormatter(FlatFormatter)
+JarFormatter = L10NRepackFormatter(JarFormatter)
+OmniJarFormatter = L10NRepackFormatter(OmniJarFormatter)
+
+
+def _repack(app_finder, l10n_finder, copier, formatter, non_chrome=set()):
+ app = LocaleManifestFinder(app_finder)
+ l10n = LocaleManifestFinder(l10n_finder)
+
+ # The code further below assumes there's only one locale replaced with
+ # another one.
+ if len(app.locales) > 1:
+ errors.fatal("Multiple app locales aren't supported: " + ",".join(app.locales))
+ if len(l10n.locales) > 1:
+ errors.fatal(
+ "Multiple l10n locales aren't supported: " + ",".join(l10n.locales)
+ )
+ locale = app.locales[0]
+ l10n_locale = l10n.locales[0]
+
+ # For each base directory, store what path a locale chrome package name
+ # corresponds to.
+ # e.g., for the following entry under app/chrome:
+ # locale foo en-US path/to/files
+ # keep track that the locale path for foo in app is
+ # app/chrome/path/to/files.
+ # As there may be multiple locale entries with the same base, but with
+ # different flags, that tracking takes the flags into account when there
+ # are some. Example:
+ # locale foo en-US path/to/files/win os=Win
+ # locale foo en-US path/to/files/mac os=Darwin
+ def key(entry):
+ if entry.flags:
+ return "%s %s" % (entry.name, entry.flags)
+ return entry.name
+
+ l10n_paths = {}
+ for e in l10n.entries:
+ if isinstance(e, ManifestChrome):
+ base = mozpath.basedir(e.path, app.bases)
+ l10n_paths.setdefault(base, {})
+ l10n_paths[base][key(e)] = e.path
+
+ # For chrome and non chrome files or directories, store what langpack path
+ # corresponds to a package path.
+ paths = {}
+ for e in app.entries:
+ if isinstance(e, ManifestEntryWithRelPath):
+ base = mozpath.basedir(e.path, app.bases)
+ if base not in l10n_paths:
+ errors.fatal("Locale doesn't contain %s/" % base)
+ # Allow errors to accumulate
+ continue
+ if key(e) not in l10n_paths[base]:
+ errors.fatal("Locale doesn't have a manifest entry for '%s'" % e.name)
+ # Allow errors to accumulate
+ continue
+ paths[e.path] = l10n_paths[base][key(e)]
+
+ for pattern in non_chrome:
+ for base in app.bases:
+ path = mozpath.join(base, pattern)
+ left = set(p for p, f in app_finder.find(path))
+ right = set(p for p, f in l10n_finder.find(path))
+ for p in right:
+ paths[p] = p
+ for p in left - right:
+ paths[p] = None
+
+ # Create a new package, with non localized bits coming from the original
+ # package, and localized bits coming from the langpack.
+ packager = SimplePackager(formatter)
+ for p, f in app_finder:
+ if is_manifest(p):
+ # Remove localized manifest entries.
+ for e in [e for e in f if e.localized]:
+ f.remove(e)
+ # If the path is one that needs a locale replacement, use the
+ # corresponding file from the langpack.
+ path = None
+ if p in paths:
+ path = paths[p]
+ if not path:
+ continue
+ else:
+ base = mozpath.basedir(p, paths.keys())
+ if base:
+ subpath = mozpath.relpath(p, base)
+ path = mozpath.normpath(mozpath.join(paths[base], subpath))
+
+ if path:
+ files = [f for p, f in l10n_finder.find(path)]
+ if not len(files):
+ if base not in non_chrome:
+ finderBase = ""
+ if hasattr(l10n_finder, "base"):
+ finderBase = l10n_finder.base
+ errors.error("Missing file: %s" % os.path.join(finderBase, path))
+ else:
+ packager.add(path, files[0])
+ else:
+ packager.add(p, f)
+
+ # Add localized manifest entries from the langpack.
+ l10n_manifests = []
+ for base in set(e.base for e in l10n.entries):
+ m = ManifestFile(base, [e for e in l10n.entries if e.base == base])
+ path = mozpath.join(base, "chrome.%s.manifest" % l10n_locale)
+ l10n_manifests.append((path, m))
+ bases = packager.get_bases()
+ for path, m in l10n_manifests:
+ base = mozpath.basedir(path, bases)
+ packager.add(path, m)
+ # Add a "manifest $path" entry in the top manifest under that base.
+ m = ManifestFile(base)
+ m.add(Manifest(base, mozpath.relpath(path, base)))
+ packager.add(mozpath.join(base, "chrome.manifest"), m)
+
+ packager.close()
+
+ # Add any remaining non chrome files.
+ for pattern in non_chrome:
+ for base in bases:
+ for p, f in l10n_finder.find(mozpath.join(base, pattern)):
+ if not formatter.contains(p):
+ formatter.add(p, f)
+
+ # Resources in `localization` directories are packaged from the source and then
+ # if localized versions are present in the l10n dir, we package them as well
+ # keeping the source dir resources as a runtime fallback.
+ for p, f in l10n_finder.find("**/localization"):
+ if not formatter.contains(p):
+ formatter.add(p, f)
+
+ # Transplant jar preloading information.
+ for path, log in six.iteritems(app_finder.jarlogs):
+ assert isinstance(copier[path], Jarrer)
+ copier[path].preload([l.replace(locale, l10n_locale) for l in log])
+
+
+def repack(
+ source, l10n, extra_l10n={}, non_resources=[], non_chrome=set(), minify=False
+):
+ """
+ Replace localized data from the `source` directory with localized data
+ from `l10n` and `extra_l10n`.
+
+ The `source` argument points to a directory containing a packaged
+ application (in omnijar, jar or flat form).
+ The `l10n` argument points to a directory containing the main localized
+ data (usually in the form of a language pack addon) to use to replace
+ in the packaged application.
+ The `extra_l10n` argument contains a dict associating relative paths in
+ the source to separate directories containing localized data for them.
+ This can be used to point at different language pack addons for different
+ parts of the package application.
+ The `non_resources` argument gives a list of relative paths in the source
+ that should not be added in an omnijar in case the packaged application
+ is in that format.
+ The `non_chrome` argument gives a list of file/directory patterns for
+ localized files that are not listed in a chrome.manifest.
+ If `minify`, `.properties` files are minified.
+ """
+ app_finder = UnpackFinder(source, minify=minify)
+ l10n_finder = UnpackFinder(l10n, minify=minify)
+ if extra_l10n:
+ finders = {
+ "": l10n_finder,
+ }
+ for base, path in six.iteritems(extra_l10n):
+ finders[base] = UnpackFinder(path, minify=minify)
+ l10n_finder = ComposedFinder(finders)
+ copier = FileCopier()
+ compress = min(app_finder.compressed, JAR_DEFLATED)
+ if app_finder.kind == "flat":
+ formatter = FlatFormatter(copier)
+ elif app_finder.kind == "jar":
+ formatter = JarFormatter(copier, compress=compress)
+ elif app_finder.kind == "omni":
+ formatter = OmniJarFormatter(
+ copier, app_finder.omnijar, compress=compress, non_resources=non_resources
+ )
+
+ with errors.accumulate():
+ _repack(app_finder, l10n_finder, copier, formatter, non_chrome)
+ copier.copy(source, skip_if_older=False)
+ generate_precomplete(source)
diff --git a/python/mozbuild/mozpack/packager/unpack.py b/python/mozbuild/mozpack/packager/unpack.py
new file mode 100644
index 0000000000..dff295eb9b
--- /dev/null
+++ b/python/mozbuild/mozpack/packager/unpack.py
@@ -0,0 +1,200 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import codecs
+
+from six.moves.urllib.parse import urlparse
+
+import mozpack.path as mozpath
+from mozpack.chrome.manifest import (
+ ManifestEntryWithRelPath,
+ ManifestResource,
+ is_manifest,
+ parse_manifest,
+)
+from mozpack.copier import FileCopier, FileRegistry
+from mozpack.files import BaseFinder, DeflatedFile, FileFinder, ManifestFile
+from mozpack.mozjar import JarReader
+from mozpack.packager import SimplePackager
+from mozpack.packager.formats import FlatFormatter
+
+
+class UnpackFinder(BaseFinder):
+ """
+ Special Finder object that treats the source package directory as if it
+ were in the flat chrome format, whatever chrome format it actually is in.
+
+ This means that for example, paths like chrome/browser/content/... match
+ files under jar:chrome/browser.jar!/content/... in case of jar chrome
+ format.
+
+ The only argument to the constructor is a Finder instance or a path.
+ The UnpackFinder is populated with files from this Finder instance,
+ or with files from a FileFinder using the given path as its root.
+ """
+
+ def __init__(self, source, omnijar_name=None, unpack_xpi=True, **kwargs):
+ if isinstance(source, BaseFinder):
+ assert not kwargs
+ self._finder = source
+ else:
+ self._finder = FileFinder(source, **kwargs)
+ self.base = self._finder.base
+ self.files = FileRegistry()
+ self.kind = "flat"
+ if omnijar_name:
+ self.omnijar = omnijar_name
+ else:
+ # Can't include globally because of bootstrapping issues.
+ from buildconfig import substs
+
+ self.omnijar = substs.get("OMNIJAR_NAME", "omni.ja")
+ self.jarlogs = {}
+ self.compressed = False
+ self._unpack_xpi = unpack_xpi
+
+ jars = set()
+
+ for p, f in self._finder.find("*"):
+ # Skip the precomplete file, which is generated at packaging time.
+ if p == "precomplete":
+ continue
+ base = mozpath.dirname(p)
+ # If the file matches the omnijar pattern, it is an omnijar.
+ # All the files it contains go in the directory containing the full
+ # pattern. Manifests are merged if there is a corresponding manifest
+ # in the directory.
+ if self._maybe_zip(f) and mozpath.match(p, "**/%s" % self.omnijar):
+ jar = self._open_jar(p, f)
+ if "chrome.manifest" in jar:
+ self.kind = "omni"
+ self._fill_with_jar(p[: -len(self.omnijar) - 1], jar)
+ continue
+ # If the file is a manifest, scan its entries for some referencing
+ # jar: urls. If there are some, the files contained in the jar they
+ # point to, go under a directory named after the jar.
+ if is_manifest(p):
+ m = self.files[p] if self.files.contains(p) else ManifestFile(base)
+ for e in parse_manifest(
+ self.base, p, codecs.getreader("utf-8")(f.open())
+ ):
+ m.add(self._handle_manifest_entry(e, jars))
+ if self.files.contains(p):
+ continue
+ f = m
+ # If we're unpacking packed addons and the file is a packed addon,
+ # unpack it under a directory named after the xpi.
+ if self._unpack_xpi and p.endswith(".xpi") and self._maybe_zip(f):
+ self._fill_with_jar(p[:-4], self._open_jar(p, f))
+ continue
+ if p not in jars:
+ self.files.add(p, f)
+
+ def _fill_with_jar(self, base, jar):
+ for j in jar:
+ path = mozpath.join(base, j.filename)
+ if is_manifest(j.filename):
+ m = (
+ self.files[path]
+ if self.files.contains(path)
+ else ManifestFile(mozpath.dirname(path))
+ )
+ for e in parse_manifest(None, path, j):
+ m.add(e)
+ if not self.files.contains(path):
+ self.files.add(path, m)
+ continue
+ else:
+ self.files.add(path, DeflatedFile(j))
+
+ def _handle_manifest_entry(self, entry, jars):
+ jarpath = None
+ if (
+ isinstance(entry, ManifestEntryWithRelPath)
+ and urlparse(entry.relpath).scheme == "jar"
+ ):
+ jarpath, entry = self._unjarize(entry, entry.relpath)
+ elif (
+ isinstance(entry, ManifestResource)
+ and urlparse(entry.target).scheme == "jar"
+ ):
+ jarpath, entry = self._unjarize(entry, entry.target)
+ if jarpath:
+ # Don't defer unpacking the jar file. If we already saw
+ # it, take (and remove) it from the registry. If we
+ # haven't, try to find it now.
+ if self.files.contains(jarpath):
+ jar = self.files[jarpath]
+ self.files.remove(jarpath)
+ else:
+ jar = [f for p, f in self._finder.find(jarpath)]
+ assert len(jar) == 1
+ jar = jar[0]
+ if jarpath not in jars:
+ base = mozpath.splitext(jarpath)[0]
+ for j in self._open_jar(jarpath, jar):
+ self.files.add(mozpath.join(base, j.filename), DeflatedFile(j))
+ jars.add(jarpath)
+ self.kind = "jar"
+ return entry
+
+ def _open_jar(self, path, file):
+ """
+ Return a JarReader for the given BaseFile instance, keeping a log of
+ the preloaded entries it has.
+ """
+ jar = JarReader(fileobj=file.open())
+ self.compressed = max(self.compressed, jar.compression)
+ if jar.last_preloaded:
+ jarlog = list(jar.entries.keys())
+ self.jarlogs[path] = jarlog[: jarlog.index(jar.last_preloaded) + 1]
+ return jar
+
+ def find(self, path):
+ for p in self.files.match(path):
+ yield p, self.files[p]
+
+ def _maybe_zip(self, file):
+ """
+ Return whether the given BaseFile looks like a ZIP/Jar.
+ """
+ header = file.open().read(8)
+ return len(header) == 8 and (header[0:2] == b"PK" or header[4:6] == b"PK")
+
+ def _unjarize(self, entry, relpath):
+ """
+ Transform a manifest entry pointing to chrome data in a jar in one
+ pointing to the corresponding unpacked path. Return the jar path and
+ the new entry.
+ """
+ base = entry.base
+ jar, relpath = urlparse(relpath).path.split("!", 1)
+ entry = (
+ entry.rebase(mozpath.join(base, "jar:%s!" % jar))
+ .move(mozpath.join(base, mozpath.splitext(jar)[0]))
+ .rebase(base)
+ )
+ return mozpath.join(base, jar), entry
+
+
+def unpack_to_registry(source, registry, omnijar_name=None):
+ """
+ Transform a jar chrome or omnijar packaged directory into a flat package.
+
+ The given registry is filled with the flat package.
+ """
+ finder = UnpackFinder(source, omnijar_name)
+ packager = SimplePackager(FlatFormatter(registry))
+ for p, f in finder.find("*"):
+ packager.add(p, f)
+ packager.close()
+
+
+def unpack(source, omnijar_name=None):
+ """
+ Transform a jar chrome or omnijar packaged directory into a flat package.
+ """
+ copier = FileCopier()
+ unpack_to_registry(source, copier, omnijar_name)
+ copier.copy(source, skip_if_older=False)
diff --git a/python/mozbuild/mozpack/path.py b/python/mozbuild/mozpack/path.py
new file mode 100644
index 0000000000..3e5af0a06b
--- /dev/null
+++ b/python/mozbuild/mozpack/path.py
@@ -0,0 +1,246 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+Like :py:mod:`os.path`, with a reduced set of functions, and with normalized path
+separators (always use forward slashes).
+Also contains a few additional utilities not found in :py:mod:`os.path`.
+"""
+
+import ctypes
+import os
+import posixpath
+import re
+import sys
+
+
+def normsep(path):
+ """
+ Normalize path separators, by using forward slashes instead of whatever
+ :py:const:`os.sep` is.
+ """
+ if os.sep != "/":
+ # Python 2 is happy to do things like byte_string.replace(u'foo',
+ # u'bar'), but not Python 3.
+ if isinstance(path, bytes):
+ path = path.replace(os.sep.encode("ascii"), b"/")
+ else:
+ path = path.replace(os.sep, "/")
+ if os.altsep and os.altsep != "/":
+ if isinstance(path, bytes):
+ path = path.replace(os.altsep.encode("ascii"), b"/")
+ else:
+ path = path.replace(os.altsep, "/")
+ return path
+
+
+def cargo_workaround(path):
+ unc = "//?/"
+ if path.startswith(unc):
+ return path[len(unc) :]
+ return path
+
+
+def relpath(path, start):
+ path = normsep(path)
+ start = normsep(start)
+ if sys.platform == "win32":
+ # os.path.relpath can't handle relative paths between UNC and non-UNC
+ # paths, so strip a //?/ prefix if present (bug 1581248)
+ path = cargo_workaround(path)
+ start = cargo_workaround(start)
+ try:
+ rel = os.path.relpath(path, start)
+ except ValueError:
+ # On Windows this can throw a ValueError if the two paths are on
+ # different drives. In that case, just return the path.
+ return abspath(path)
+ rel = normsep(rel)
+ return "" if rel == "." else rel
+
+
+def realpath(path):
+ return normsep(os.path.realpath(path))
+
+
+def abspath(path):
+ return normsep(os.path.abspath(path))
+
+
+def join(*paths):
+ return normsep(os.path.join(*paths))
+
+
+def normpath(path):
+ return posixpath.normpath(normsep(path))
+
+
+def dirname(path):
+ return posixpath.dirname(normsep(path))
+
+
+def commonprefix(paths):
+ return posixpath.commonprefix([normsep(path) for path in paths])
+
+
+def basename(path):
+ return os.path.basename(path)
+
+
+def splitext(path):
+ return posixpath.splitext(normsep(path))
+
+
+def split(path):
+ """
+ Return the normalized path as a list of its components.
+
+ ``split('foo/bar/baz')`` returns ``['foo', 'bar', 'baz']``
+ """
+ return normsep(path).split("/")
+
+
+def basedir(path, bases):
+ """
+ Given a list of directories (`bases`), return which one contains the given
+ path. If several matches are found, the deepest base directory is returned.
+
+ ``basedir('foo/bar/baz', ['foo', 'baz', 'foo/bar'])`` returns ``'foo/bar'``
+ (`'foo'` and `'foo/bar'` both match, but `'foo/bar'` is the deepest match)
+ """
+ path = normsep(path)
+ bases = [normsep(b) for b in bases]
+ if path in bases:
+ return path
+ for b in sorted(bases, reverse=True):
+ if b == "" or path.startswith(b + "/"):
+ return b
+
+
+re_cache = {}
+# Python versions < 3.7 return r'\/' for re.escape('/').
+if re.escape("/") == "/":
+ MATCH_STAR_STAR_RE = re.compile(r"(^|/)\\\*\\\*/")
+ MATCH_STAR_STAR_END_RE = re.compile(r"(^|/)\\\*\\\*$")
+else:
+ MATCH_STAR_STAR_RE = re.compile(r"(^|\\\/)\\\*\\\*\\\/")
+ MATCH_STAR_STAR_END_RE = re.compile(r"(^|\\\/)\\\*\\\*$")
+
+
+def match(path, pattern):
+ """
+ Return whether the given path matches the given pattern.
+ An asterisk can be used to match any string, including the null string, in
+ one part of the path:
+
+ ``foo`` matches ``*``, ``f*`` or ``fo*o``
+
+ However, an asterisk matching a subdirectory may not match the null string:
+
+ ``foo/bar`` does *not* match ``foo/*/bar``
+
+ If the pattern matches one of the ancestor directories of the path, the
+ patch is considered matching:
+
+ ``foo/bar`` matches ``foo``
+
+ Two adjacent asterisks can be used to match files and zero or more
+ directories and subdirectories.
+
+ ``foo/bar`` matches ``foo/**/bar``, or ``**/bar``
+ """
+ if not pattern:
+ return True
+ if pattern not in re_cache:
+ p = re.escape(pattern)
+ p = MATCH_STAR_STAR_RE.sub(r"\1(?:.+/)?", p)
+ p = MATCH_STAR_STAR_END_RE.sub(r"(?:\1.+)?", p)
+ p = p.replace(r"\*", "[^/]*") + "(?:/.*)?$"
+ re_cache[pattern] = re.compile(p)
+ return re_cache[pattern].match(path) is not None
+
+
+def rebase(oldbase, base, relativepath):
+ """
+ Return `relativepath` relative to `base` instead of `oldbase`.
+ """
+ if base == oldbase:
+ return relativepath
+ if len(base) < len(oldbase):
+ assert basedir(oldbase, [base]) == base
+ relbase = relpath(oldbase, base)
+ result = join(relbase, relativepath)
+ else:
+ assert basedir(base, [oldbase]) == oldbase
+ relbase = relpath(base, oldbase)
+ result = relpath(relativepath, relbase)
+ result = normpath(result)
+ if relativepath.endswith("/") and not result.endswith("/"):
+ result += "/"
+ return result
+
+
+def readlink(path):
+ if hasattr(os, "readlink"):
+ return normsep(os.readlink(path))
+
+ # Unfortunately os.path.realpath doesn't support symlinks on Windows, and os.readlink
+ # is only available on Windows with Python 3.2+. We have to resort to ctypes...
+
+ assert sys.platform == "win32"
+
+ CreateFileW = ctypes.windll.kernel32.CreateFileW
+ CreateFileW.argtypes = [
+ ctypes.wintypes.LPCWSTR,
+ ctypes.wintypes.DWORD,
+ ctypes.wintypes.DWORD,
+ ctypes.wintypes.LPVOID,
+ ctypes.wintypes.DWORD,
+ ctypes.wintypes.DWORD,
+ ctypes.wintypes.HANDLE,
+ ]
+ CreateFileW.restype = ctypes.wintypes.HANDLE
+
+ GENERIC_READ = 0x80000000
+ FILE_SHARE_READ = 0x00000001
+ OPEN_EXISTING = 3
+ FILE_FLAG_BACKUP_SEMANTICS = 0x02000000
+
+ handle = CreateFileW(
+ path,
+ GENERIC_READ,
+ FILE_SHARE_READ,
+ 0,
+ OPEN_EXISTING,
+ FILE_FLAG_BACKUP_SEMANTICS,
+ 0,
+ )
+ assert handle != 1, "Failed getting a handle to: {}".format(path)
+
+ MAX_PATH = 260
+
+ buf = ctypes.create_unicode_buffer(MAX_PATH)
+ GetFinalPathNameByHandleW = ctypes.windll.kernel32.GetFinalPathNameByHandleW
+ GetFinalPathNameByHandleW.argtypes = [
+ ctypes.wintypes.HANDLE,
+ ctypes.wintypes.LPWSTR,
+ ctypes.wintypes.DWORD,
+ ctypes.wintypes.DWORD,
+ ]
+ GetFinalPathNameByHandleW.restype = ctypes.wintypes.DWORD
+
+ FILE_NAME_NORMALIZED = 0x0
+
+ rv = GetFinalPathNameByHandleW(handle, buf, MAX_PATH, FILE_NAME_NORMALIZED)
+ assert rv != 0 and rv <= MAX_PATH, "Failed getting final path for: {}".format(path)
+
+ CloseHandle = ctypes.windll.kernel32.CloseHandle
+ CloseHandle.argtypes = [ctypes.wintypes.HANDLE]
+ CloseHandle.restype = ctypes.wintypes.BOOL
+
+ rv = CloseHandle(handle)
+ assert rv != 0, "Failed closing handle"
+
+ # Remove leading '\\?\' from the result.
+ return normsep(buf.value[4:])
diff --git a/python/mozbuild/mozpack/pkg.py b/python/mozbuild/mozpack/pkg.py
new file mode 100644
index 0000000000..75a63b9746
--- /dev/null
+++ b/python/mozbuild/mozpack/pkg.py
@@ -0,0 +1,299 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import concurrent.futures
+import lzma
+import os
+import plistlib
+import struct
+import subprocess
+from pathlib import Path
+from string import Template
+from typing import List
+from urllib.parse import quote
+
+import mozfile
+
+TEMPLATE_DIRECTORY = Path(__file__).parent / "apple_pkg"
+PBZX_CHUNK_SIZE = 16 * 1024 * 1024 # 16MB chunks
+
+
+def get_apple_template(name: str) -> Template:
+ """
+ Given <name>, open file at <TEMPLATE_DIRECTORY>/<name>, read contents and
+ return as a Template
+
+ Args:
+ name: str, Filename for the template
+
+ Returns:
+ Template, loaded from file
+ """
+ tmpl_path = TEMPLATE_DIRECTORY / name
+ if not tmpl_path.is_file():
+ raise Exception(f"Could not find template: {tmpl_path}")
+ with tmpl_path.open("r") as tmpl:
+ contents = tmpl.read()
+ return Template(contents)
+
+
+def save_text_file(content: str, destination: Path):
+ """
+ Saves a text file to <destination> with provided <content>
+ Note: Overwrites contents
+
+ Args:
+ content: str, The desired contents of the file
+ destination: Path, The file path
+ """
+ with destination.open("w") as out_fd:
+ out_fd.write(content)
+ print(f"Created text file at {destination}")
+ print(f"Created text file size: {destination.stat().st_size} bytes")
+
+
+def get_app_info_plist(app_path: Path) -> dict:
+ """
+ Retrieve most information from Info.plist file of an app.
+ The Info.plist file should be located in ?.app/Contents/Info.plist
+
+ Note: Ignores properties that are not <string> type
+
+ Args:
+ app_path: Path, the .app file/directory path
+
+ Returns:
+ dict, the dictionary of properties found in Info.plist
+ """
+ info_plist = app_path / "Contents/Info.plist"
+ if not info_plist.is_file():
+ raise Exception(f"Could not find Info.plist in {info_plist}")
+
+ print(f"Reading app Info.plist from: {info_plist}")
+
+ with info_plist.open("rb") as plist_fd:
+ data = plistlib.load(plist_fd)
+
+ return data
+
+
+def create_payload(destination: Path, root_path: Path, cpio_tool: str):
+ """
+ Creates a payload at <destination> based on <root_path>
+
+ Args:
+ destination: Path, the destination Path
+ root_path: Path, the root directory Path
+ cpio_tool: str,
+ """
+ # Files to be cpio'd are root folder + contents
+ file_list = ["./"] + get_relative_glob_list(root_path, "**/*")
+
+ with mozfile.TemporaryDirectory() as tmp_dir:
+ tmp_payload_path = Path(tmp_dir) / "Payload"
+ print(f"Creating Payload with cpio from {root_path} to {tmp_payload_path}")
+ print(f"Found {len(file_list)} files")
+ with tmp_payload_path.open("wb") as tmp_payload:
+ process = subprocess.run(
+ [
+ cpio_tool,
+ "-o", # copy-out mode
+ "--format",
+ "odc", # old POSIX .1 portable format
+ "--owner",
+ "0:80", # clean ownership
+ ],
+ stdout=tmp_payload,
+ stderr=subprocess.PIPE,
+ input="\n".join(file_list) + "\n",
+ encoding="ascii",
+ cwd=root_path,
+ )
+ # cpio outputs number of blocks to stderr
+ print(f"[CPIO]: {process.stderr}")
+ if process.returncode:
+ raise Exception(f"CPIO error {process.returncode}")
+
+ tmp_payload_size = tmp_payload_path.stat().st_size
+ print(f"Uncompressed Payload size: {tmp_payload_size // 1024}kb")
+
+ def compress_chunk(chunk):
+ compressed_chunk = lzma.compress(chunk)
+ return len(chunk), compressed_chunk
+
+ def chunker(fileobj, chunk_size):
+ while True:
+ chunk = fileobj.read(chunk_size)
+ if not chunk:
+ break
+ yield chunk
+
+ with tmp_payload_path.open("rb") as f_in, destination.open(
+ "wb"
+ ) as f_out, concurrent.futures.ThreadPoolExecutor(
+ max_workers=os.cpu_count()
+ ) as executor:
+ f_out.write(b"pbzx")
+ f_out.write(struct.pack(">Q", PBZX_CHUNK_SIZE))
+ chunks = chunker(f_in, PBZX_CHUNK_SIZE)
+ for uncompressed_size, compressed_chunk in executor.map(
+ compress_chunk, chunks
+ ):
+ f_out.write(struct.pack(">Q", uncompressed_size))
+ if len(compressed_chunk) < uncompressed_size:
+ f_out.write(struct.pack(">Q", len(compressed_chunk)))
+ f_out.write(compressed_chunk)
+ else:
+ # Considering how unlikely this is, we prefer to just decompress
+ # here than to keep the original uncompressed chunk around
+ f_out.write(struct.pack(">Q", uncompressed_size))
+ f_out.write(lzma.decompress(compressed_chunk))
+
+ print(f"Compressed Payload file to {destination}")
+ print(f"Compressed Payload size: {destination.stat().st_size // 1024}kb")
+
+
+def create_bom(bom_path: Path, root_path: Path, mkbom_tool: Path):
+ """
+ Creates a Bill Of Materials file at <bom_path> based on <root_path>
+
+ Args:
+ bom_path: Path, destination Path for the BOM file
+ root_path: Path, root directory Path
+ mkbom_tool: Path, mkbom tool Path
+ """
+ print(f"Creating BOM file from {root_path} to {bom_path}")
+ subprocess.check_call(
+ [
+ mkbom_tool,
+ "-u",
+ "0",
+ "-g",
+ "80",
+ str(root_path),
+ str(bom_path),
+ ]
+ )
+ print(f"Created BOM File size: {bom_path.stat().st_size // 1024}kb")
+
+
+def get_relative_glob_list(source: Path, glob: str) -> List[str]:
+ """
+ Given a source path, return a list of relative path based on glob
+
+ Args:
+ source: Path, source directory Path
+ glob: str, unix style glob
+
+ Returns:
+ list[str], paths found in source directory
+ """
+ return [f"./{c.relative_to(source)}" for c in source.glob(glob)]
+
+
+def xar_package_folder(source_path: Path, destination: Path, xar_tool: Path):
+ """
+ Create a pkg from <source_path> to <destination>
+ The command is issued with <source_path> as cwd
+
+ Args:
+ source_path: Path, source absolute Path
+ destination: Path, destination absolute Path
+ xar_tool: Path, xar tool Path
+ """
+ if not source_path.is_absolute() or not destination.is_absolute():
+ raise Exception("Source and destination should be absolute.")
+
+ print(f"Creating pkg from {source_path} to {destination}")
+ # Create a list of ./<file> - noting xar takes care of <file>/**
+ file_list = get_relative_glob_list(source_path, "*")
+
+ subprocess.check_call(
+ [
+ xar_tool,
+ "--compression",
+ "none",
+ "-vcf",
+ destination,
+ *file_list,
+ ],
+ cwd=source_path,
+ )
+ print(f"Created PKG file to {destination}")
+ print(f"Created PKG size: {destination.stat().st_size // 1024}kb")
+
+
+def create_pkg(
+ source_app: Path,
+ output_pkg: Path,
+ mkbom_tool: Path,
+ xar_tool: Path,
+ cpio_tool: Path,
+):
+ """
+ Create a mac PKG installer from <source_app> to <output_pkg>
+
+ Args:
+ source_app: Path, source .app file/directory Path
+ output_pkg: Path, destination .pkg file
+ mkbom_tool: Path, mkbom tool Path
+ xar_tool: Path, xar tool Path
+ cpio: Path, cpio tool Path
+ """
+
+ app_name = source_app.name.rsplit(".", maxsplit=1)[0]
+
+ with mozfile.TemporaryDirectory() as tmpdir:
+ root_path = Path(tmpdir) / "darwin/root"
+ flat_path = Path(tmpdir) / "darwin/flat"
+
+ # Create required directories
+ # TODO: Investigate Resources folder contents for other lproj?
+ (flat_path / "Resources/en.lproj").mkdir(parents=True, exist_ok=True)
+ (flat_path / f"{app_name}.pkg").mkdir(parents=True, exist_ok=True)
+ root_path.mkdir(parents=True, exist_ok=True)
+
+ # Copy files over
+ subprocess.check_call(
+ [
+ "cp",
+ "-R",
+ str(source_app),
+ str(root_path),
+ ]
+ )
+
+ # Count all files (innards + itself)
+ file_count = len(list(source_app.glob("**/*"))) + 1
+ print(f"Calculated source files count: {file_count}")
+ # Get package contents size
+ package_size = sum(f.stat().st_size for f in source_app.glob("**/*")) // 1024
+ print(f"Calculated source package size: {package_size}kb")
+
+ app_info = get_app_info_plist(source_app)
+ app_info["numberOfFiles"] = file_count
+ app_info["installKBytes"] = package_size
+ app_info["app_name"] = app_name
+ app_info["app_name_url_encoded"] = quote(app_name)
+
+ # This seems arbitrary, there might be another way of doing it,
+ # but Info.plist doesn't provide the simple version we need
+ major_version = app_info["CFBundleShortVersionString"].split(".")[0]
+ app_info["simple_version"] = f"{major_version}.0.0"
+
+ pkg_info_tmpl = get_apple_template("PackageInfo.template")
+ pkg_info = pkg_info_tmpl.substitute(app_info)
+ save_text_file(pkg_info, flat_path / f"{app_name}.pkg/PackageInfo")
+
+ distribution_tmp = get_apple_template("Distribution.template")
+ distribution = distribution_tmp.substitute(app_info)
+ save_text_file(distribution, flat_path / "Distribution")
+
+ payload_path = flat_path / f"{app_name}.pkg/Payload"
+ create_payload(payload_path, root_path, cpio_tool)
+
+ bom_path = flat_path / f"{app_name}.pkg/Bom"
+ create_bom(bom_path, root_path, mkbom_tool)
+
+ xar_package_folder(flat_path, output_pkg, xar_tool)
diff --git a/python/mozbuild/mozpack/test/__init__.py b/python/mozbuild/mozpack/test/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozbuild/mozpack/test/__init__.py
diff --git a/python/mozbuild/mozpack/test/data/test_data b/python/mozbuild/mozpack/test/data/test_data
new file mode 100644
index 0000000000..fb7f0c4fc2
--- /dev/null
+++ b/python/mozbuild/mozpack/test/data/test_data
@@ -0,0 +1 @@
+test_data \ No newline at end of file
diff --git a/python/mozbuild/mozpack/test/python.ini b/python/mozbuild/mozpack/test/python.ini
new file mode 100644
index 0000000000..2b229de945
--- /dev/null
+++ b/python/mozbuild/mozpack/test/python.ini
@@ -0,0 +1,18 @@
+[DEFAULT]
+subsuite = mozbuild
+
+[test_archive.py]
+[test_chrome_flags.py]
+[test_chrome_manifest.py]
+[test_copier.py]
+[test_errors.py]
+[test_files.py]
+[test_manifests.py]
+[test_mozjar.py]
+[test_packager.py]
+[test_packager_formats.py]
+[test_packager_l10n.py]
+[test_packager_unpack.py]
+[test_path.py]
+[test_pkg.py]
+[test_unify.py]
diff --git a/python/mozbuild/mozpack/test/support/minify_js_verify.py b/python/mozbuild/mozpack/test/support/minify_js_verify.py
new file mode 100644
index 0000000000..88cc0ece0c
--- /dev/null
+++ b/python/mozbuild/mozpack/test/support/minify_js_verify.py
@@ -0,0 +1,15 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import sys
+
+if len(sys.argv) != 4:
+ raise Exception("Usage: minify_js_verify <exitcode> <orig> <minified>")
+
+retcode = int(sys.argv[1])
+
+if retcode:
+ print("Error message", file=sys.stderr)
+
+sys.exit(retcode)
diff --git a/python/mozbuild/mozpack/test/test_archive.py b/python/mozbuild/mozpack/test/test_archive.py
new file mode 100644
index 0000000000..3417f279df
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_archive.py
@@ -0,0 +1,197 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import hashlib
+import os
+import shutil
+import stat
+import tarfile
+import tempfile
+import unittest
+
+import pytest
+from mozunit import main
+
+from mozpack.archive import (
+ DEFAULT_MTIME,
+ create_tar_bz2_from_files,
+ create_tar_from_files,
+ create_tar_gz_from_files,
+)
+from mozpack.files import GeneratedFile
+
+MODE_STANDARD = stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH
+
+
+def file_hash(path):
+ h = hashlib.sha1()
+ with open(path, "rb") as fh:
+ while True:
+ data = fh.read(8192)
+ if not data:
+ break
+ h.update(data)
+
+ return h.hexdigest()
+
+
+class TestArchive(unittest.TestCase):
+ def _create_files(self, root):
+ files = {}
+ for i in range(10):
+ p = os.path.join(root, "file%02d" % i)
+ with open(p, "wb") as fh:
+ fh.write(b"file%02d" % i)
+ # Need to set permissions or umask may influence testing.
+ os.chmod(p, MODE_STANDARD)
+ files["file%02d" % i] = p
+
+ for i in range(10):
+ files["file%02d" % (i + 10)] = GeneratedFile(b"file%02d" % (i + 10))
+
+ return files
+
+ def _verify_basic_tarfile(self, tf):
+ self.assertEqual(len(tf.getmembers()), 20)
+
+ names = ["file%02d" % i for i in range(20)]
+ self.assertEqual(tf.getnames(), names)
+
+ for ti in tf.getmembers():
+ self.assertEqual(ti.uid, 0)
+ self.assertEqual(ti.gid, 0)
+ self.assertEqual(ti.uname, "")
+ self.assertEqual(ti.gname, "")
+ self.assertEqual(ti.mode, MODE_STANDARD)
+ self.assertEqual(ti.mtime, DEFAULT_MTIME)
+
+ @pytest.mark.xfail(
+ reason="ValueError is not thrown despite being provided directory."
+ )
+ def test_dirs_refused(self):
+ d = tempfile.mkdtemp()
+ try:
+ tp = os.path.join(d, "test.tar")
+ with open(tp, "wb") as fh:
+ with self.assertRaisesRegexp(ValueError, "not a regular"):
+ create_tar_from_files(fh, {"test": d})
+ finally:
+ shutil.rmtree(d)
+
+ @pytest.mark.xfail(reason="ValueError is not thrown despite uid/gid being set.")
+ def test_setuid_setgid_refused(self):
+ d = tempfile.mkdtemp()
+ try:
+ uid = os.path.join(d, "setuid")
+ gid = os.path.join(d, "setgid")
+ with open(uid, "a"):
+ pass
+ with open(gid, "a"):
+ pass
+
+ os.chmod(uid, MODE_STANDARD | stat.S_ISUID)
+ os.chmod(gid, MODE_STANDARD | stat.S_ISGID)
+
+ tp = os.path.join(d, "test.tar")
+ with open(tp, "wb") as fh:
+ with self.assertRaisesRegexp(ValueError, "cannot add file with setuid"):
+ create_tar_from_files(fh, {"test": uid})
+ with self.assertRaisesRegexp(ValueError, "cannot add file with setuid"):
+ create_tar_from_files(fh, {"test": gid})
+ finally:
+ shutil.rmtree(d)
+
+ def test_create_tar_basic(self):
+ d = tempfile.mkdtemp()
+ try:
+ files = self._create_files(d)
+
+ tp = os.path.join(d, "test.tar")
+ with open(tp, "wb") as fh:
+ create_tar_from_files(fh, files)
+
+ # Output should be deterministic.
+ self.assertEqual(file_hash(tp), "01cd314e277f060e98c7de6c8ea57f96b3a2065c")
+
+ with tarfile.open(tp, "r") as tf:
+ self._verify_basic_tarfile(tf)
+
+ finally:
+ shutil.rmtree(d)
+
+ @pytest.mark.xfail(reason="hash mismatch")
+ def test_executable_preserved(self):
+ d = tempfile.mkdtemp()
+ try:
+ p = os.path.join(d, "exec")
+ with open(p, "wb") as fh:
+ fh.write("#!/bin/bash\n")
+ os.chmod(p, MODE_STANDARD | stat.S_IXUSR)
+
+ tp = os.path.join(d, "test.tar")
+ with open(tp, "wb") as fh:
+ create_tar_from_files(fh, {"exec": p})
+
+ self.assertEqual(file_hash(tp), "357e1b81c0b6cfdfa5d2d118d420025c3c76ee93")
+
+ with tarfile.open(tp, "r") as tf:
+ m = tf.getmember("exec")
+ self.assertEqual(m.mode, MODE_STANDARD | stat.S_IXUSR)
+
+ finally:
+ shutil.rmtree(d)
+
+ def test_create_tar_gz_basic(self):
+ d = tempfile.mkdtemp()
+ try:
+ files = self._create_files(d)
+
+ gp = os.path.join(d, "test.tar.gz")
+ with open(gp, "wb") as fh:
+ create_tar_gz_from_files(fh, files)
+
+ self.assertEqual(file_hash(gp), "7c4da5adc5088cdf00911d5daf9a67b15de714b7")
+
+ with tarfile.open(gp, "r:gz") as tf:
+ self._verify_basic_tarfile(tf)
+
+ finally:
+ shutil.rmtree(d)
+
+ def test_tar_gz_name(self):
+ d = tempfile.mkdtemp()
+ try:
+ files = self._create_files(d)
+
+ gp = os.path.join(d, "test.tar.gz")
+ with open(gp, "wb") as fh:
+ create_tar_gz_from_files(fh, files, filename="foobar")
+
+ self.assertEqual(file_hash(gp), "721e00083c17d16df2edbddf40136298c06d0c49")
+
+ with tarfile.open(gp, "r:gz") as tf:
+ self._verify_basic_tarfile(tf)
+
+ finally:
+ shutil.rmtree(d)
+
+ def test_create_tar_bz2_basic(self):
+ d = tempfile.mkdtemp()
+ try:
+ files = self._create_files(d)
+
+ bp = os.path.join(d, "test.tar.bz2")
+ with open(bp, "wb") as fh:
+ create_tar_bz2_from_files(fh, files)
+
+ self.assertEqual(file_hash(bp), "eb5096d2fbb71df7b3d690001a6f2e82a5aad6a7")
+
+ with tarfile.open(bp, "r:bz2") as tf:
+ self._verify_basic_tarfile(tf)
+ finally:
+ shutil.rmtree(d)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozbuild/mozpack/test/test_chrome_flags.py b/python/mozbuild/mozpack/test/test_chrome_flags.py
new file mode 100644
index 0000000000..4f1a968dc2
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_chrome_flags.py
@@ -0,0 +1,150 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+
+import mozunit
+
+from mozpack.chrome.flags import Flag, Flags, StringFlag, VersionFlag
+from mozpack.errors import ErrorMessage
+
+
+class TestFlag(unittest.TestCase):
+ def test_flag(self):
+ flag = Flag("flag")
+ self.assertEqual(str(flag), "")
+ self.assertTrue(flag.matches(False))
+ self.assertTrue(flag.matches("false"))
+ self.assertFalse(flag.matches("true"))
+ self.assertRaises(ErrorMessage, flag.add_definition, "flag=")
+ self.assertRaises(ErrorMessage, flag.add_definition, "flag=42")
+ self.assertRaises(ErrorMessage, flag.add_definition, "flag!=false")
+
+ flag.add_definition("flag=1")
+ self.assertEqual(str(flag), "flag=1")
+ self.assertTrue(flag.matches(True))
+ self.assertTrue(flag.matches("1"))
+ self.assertFalse(flag.matches("no"))
+
+ flag.add_definition("flag=true")
+ self.assertEqual(str(flag), "flag=true")
+ self.assertTrue(flag.matches(True))
+ self.assertTrue(flag.matches("true"))
+ self.assertFalse(flag.matches("0"))
+
+ flag.add_definition("flag=no")
+ self.assertEqual(str(flag), "flag=no")
+ self.assertTrue(flag.matches("false"))
+ self.assertFalse(flag.matches("1"))
+
+ flag.add_definition("flag")
+ self.assertEqual(str(flag), "flag")
+ self.assertFalse(flag.matches("false"))
+ self.assertTrue(flag.matches("true"))
+ self.assertFalse(flag.matches(False))
+
+ def test_string_flag(self):
+ flag = StringFlag("flag")
+ self.assertEqual(str(flag), "")
+ self.assertTrue(flag.matches("foo"))
+ self.assertRaises(ErrorMessage, flag.add_definition, "flag>=2")
+
+ flag.add_definition("flag=foo")
+ self.assertEqual(str(flag), "flag=foo")
+ self.assertTrue(flag.matches("foo"))
+ self.assertFalse(flag.matches("bar"))
+
+ flag.add_definition("flag=bar")
+ self.assertEqual(str(flag), "flag=foo flag=bar")
+ self.assertTrue(flag.matches("foo"))
+ self.assertTrue(flag.matches("bar"))
+ self.assertFalse(flag.matches("baz"))
+
+ flag = StringFlag("flag")
+ flag.add_definition("flag!=bar")
+ self.assertEqual(str(flag), "flag!=bar")
+ self.assertTrue(flag.matches("foo"))
+ self.assertFalse(flag.matches("bar"))
+
+ def test_version_flag(self):
+ flag = VersionFlag("flag")
+ self.assertEqual(str(flag), "")
+ self.assertTrue(flag.matches("1.0"))
+ self.assertRaises(ErrorMessage, flag.add_definition, "flag!=2")
+
+ flag.add_definition("flag=1.0")
+ self.assertEqual(str(flag), "flag=1.0")
+ self.assertTrue(flag.matches("1.0"))
+ self.assertFalse(flag.matches("2.0"))
+
+ flag.add_definition("flag=2.0")
+ self.assertEqual(str(flag), "flag=1.0 flag=2.0")
+ self.assertTrue(flag.matches("1.0"))
+ self.assertTrue(flag.matches("2.0"))
+ self.assertFalse(flag.matches("3.0"))
+
+ flag = VersionFlag("flag")
+ flag.add_definition("flag>=2.0")
+ self.assertEqual(str(flag), "flag>=2.0")
+ self.assertFalse(flag.matches("1.0"))
+ self.assertTrue(flag.matches("2.0"))
+ self.assertTrue(flag.matches("3.0"))
+
+ flag.add_definition("flag<1.10")
+ self.assertEqual(str(flag), "flag>=2.0 flag<1.10")
+ self.assertTrue(flag.matches("1.0"))
+ self.assertTrue(flag.matches("1.9"))
+ self.assertFalse(flag.matches("1.10"))
+ self.assertFalse(flag.matches("1.20"))
+ self.assertTrue(flag.matches("2.0"))
+ self.assertTrue(flag.matches("3.0"))
+ self.assertRaises(Exception, flag.add_definition, "flag<")
+ self.assertRaises(Exception, flag.add_definition, "flag>")
+ self.assertRaises(Exception, flag.add_definition, "flag>=")
+ self.assertRaises(Exception, flag.add_definition, "flag<=")
+ self.assertRaises(Exception, flag.add_definition, "flag!=1.0")
+
+
+class TestFlags(unittest.TestCase):
+ def setUp(self):
+ self.flags = Flags(
+ "contentaccessible=yes",
+ "appversion>=3.5",
+ "application=foo",
+ "application=bar",
+ "appversion<2.0",
+ "platform",
+ "abi!=Linux_x86-gcc3",
+ )
+
+ def test_flags_str(self):
+ self.assertEqual(
+ str(self.flags),
+ "contentaccessible=yes "
+ + "appversion>=3.5 appversion<2.0 application=foo "
+ + "application=bar platform abi!=Linux_x86-gcc3",
+ )
+
+ def test_flags_match_unset(self):
+ self.assertTrue(self.flags.match(os="WINNT"))
+
+ def test_flags_match(self):
+ self.assertTrue(self.flags.match(application="foo"))
+ self.assertFalse(self.flags.match(application="qux"))
+
+ def test_flags_match_different(self):
+ self.assertTrue(self.flags.match(abi="WINNT_x86-MSVC"))
+ self.assertFalse(self.flags.match(abi="Linux_x86-gcc3"))
+
+ def test_flags_match_version(self):
+ self.assertTrue(self.flags.match(appversion="1.0"))
+ self.assertTrue(self.flags.match(appversion="1.5"))
+ self.assertFalse(self.flags.match(appversion="2.0"))
+ self.assertFalse(self.flags.match(appversion="3.0"))
+ self.assertTrue(self.flags.match(appversion="3.5"))
+ self.assertTrue(self.flags.match(appversion="3.10"))
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_chrome_manifest.py b/python/mozbuild/mozpack/test/test_chrome_manifest.py
new file mode 100644
index 0000000000..c1d5826bbc
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_chrome_manifest.py
@@ -0,0 +1,176 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import unittest
+
+import mozunit
+
+from mozpack.chrome.manifest import (
+ MANIFESTS_TYPES,
+ Manifest,
+ ManifestBinaryComponent,
+ ManifestCategory,
+ ManifestComponent,
+ ManifestContent,
+ ManifestContract,
+ ManifestInterfaces,
+ ManifestLocale,
+ ManifestOverlay,
+ ManifestOverride,
+ ManifestResource,
+ ManifestSkin,
+ ManifestStyle,
+ parse_manifest,
+ parse_manifest_line,
+)
+from mozpack.errors import AccumulatedErrors, errors
+from test_errors import TestErrors
+
+
+class TestManifest(unittest.TestCase):
+ def test_parse_manifest(self):
+ manifest = [
+ "content global content/global/",
+ "content global content/global/ application=foo application=bar"
+ + " platform",
+ "locale global en-US content/en-US/",
+ "locale global en-US content/en-US/ application=foo",
+ "skin global classic/1.0 content/skin/classic/",
+ "skin global classic/1.0 content/skin/classic/ application=foo"
+ + " os=WINNT",
+ "",
+ "manifest pdfjs/chrome.manifest",
+ "resource gre-resources toolkit/res/",
+ "override chrome://global/locale/netError.dtd"
+ + " chrome://browser/locale/netError.dtd",
+ "# Comment",
+ "component {b2bba4df-057d-41ea-b6b1-94a10a8ede68} foo.js",
+ "contract @mozilla.org/foo;1" + " {b2bba4df-057d-41ea-b6b1-94a10a8ede68}",
+ "interfaces foo.xpt",
+ "binary-component bar.so",
+ "category command-line-handler m-browser"
+ + " @mozilla.org/browser/clh;1"
+ + " application={ec8030f7-c20a-464f-9b0e-13a3a9e97384}",
+ "style chrome://global/content/viewSource.xul" + " chrome://browser/skin/",
+ "overlay chrome://global/content/viewSource.xul"
+ + " chrome://browser/content/viewSourceOverlay.xul",
+ ]
+ other_manifest = ["content global content/global/"]
+ expected_result = [
+ ManifestContent("", "global", "content/global/"),
+ ManifestContent(
+ "",
+ "global",
+ "content/global/",
+ "application=foo",
+ "application=bar",
+ "platform",
+ ),
+ ManifestLocale("", "global", "en-US", "content/en-US/"),
+ ManifestLocale("", "global", "en-US", "content/en-US/", "application=foo"),
+ ManifestSkin("", "global", "classic/1.0", "content/skin/classic/"),
+ ManifestSkin(
+ "",
+ "global",
+ "classic/1.0",
+ "content/skin/classic/",
+ "application=foo",
+ "os=WINNT",
+ ),
+ Manifest("", "pdfjs/chrome.manifest"),
+ ManifestResource("", "gre-resources", "toolkit/res/"),
+ ManifestOverride(
+ "",
+ "chrome://global/locale/netError.dtd",
+ "chrome://browser/locale/netError.dtd",
+ ),
+ ManifestComponent("", "{b2bba4df-057d-41ea-b6b1-94a10a8ede68}", "foo.js"),
+ ManifestContract(
+ "", "@mozilla.org/foo;1", "{b2bba4df-057d-41ea-b6b1-94a10a8ede68}"
+ ),
+ ManifestInterfaces("", "foo.xpt"),
+ ManifestBinaryComponent("", "bar.so"),
+ ManifestCategory(
+ "",
+ "command-line-handler",
+ "m-browser",
+ "@mozilla.org/browser/clh;1",
+ "application=" + "{ec8030f7-c20a-464f-9b0e-13a3a9e97384}",
+ ),
+ ManifestStyle(
+ "", "chrome://global/content/viewSource.xul", "chrome://browser/skin/"
+ ),
+ ManifestOverlay(
+ "",
+ "chrome://global/content/viewSource.xul",
+ "chrome://browser/content/viewSourceOverlay.xul",
+ ),
+ ]
+ with mozunit.MockedOpen(
+ {
+ "manifest": "\n".join(manifest),
+ "other/manifest": "\n".join(other_manifest),
+ }
+ ):
+ # Ensure we have tests for all types of manifests.
+ self.assertEqual(
+ set(type(e) for e in expected_result), set(MANIFESTS_TYPES.values())
+ )
+ self.assertEqual(
+ list(parse_manifest(os.curdir, "manifest")), expected_result
+ )
+ self.assertEqual(
+ list(parse_manifest(os.curdir, "other/manifest")),
+ [ManifestContent("other", "global", "content/global/")],
+ )
+
+ def test_manifest_rebase(self):
+ m = parse_manifest_line("chrome", "content global content/global/")
+ m = m.rebase("")
+ self.assertEqual(str(m), "content global chrome/content/global/")
+ m = m.rebase("chrome")
+ self.assertEqual(str(m), "content global content/global/")
+
+ m = parse_manifest_line("chrome/foo", "content global content/global/")
+ m = m.rebase("chrome")
+ self.assertEqual(str(m), "content global foo/content/global/")
+ m = m.rebase("chrome/foo")
+ self.assertEqual(str(m), "content global content/global/")
+
+ m = parse_manifest_line("modules/foo", "resource foo ./")
+ m = m.rebase("modules")
+ self.assertEqual(str(m), "resource foo foo/")
+ m = m.rebase("modules/foo")
+ self.assertEqual(str(m), "resource foo ./")
+
+ m = parse_manifest_line("chrome", "content browser browser/content/")
+ m = m.rebase("chrome/browser").move("jar:browser.jar!").rebase("")
+ self.assertEqual(str(m), "content browser jar:browser.jar!/content/")
+
+
+class TestManifestErrors(TestErrors, unittest.TestCase):
+ def test_parse_manifest_errors(self):
+ manifest = [
+ "skin global classic/1.0 content/skin/classic/ platform",
+ "",
+ "binary-component bar.so",
+ "unsupported foo",
+ ]
+ with mozunit.MockedOpen({"manifest": "\n".join(manifest)}):
+ with self.assertRaises(AccumulatedErrors):
+ with errors.accumulate():
+ list(parse_manifest(os.curdir, "manifest"))
+ out = self.get_output()
+ # Expecting 2 errors
+ self.assertEqual(len(out), 2)
+ path = os.path.abspath("manifest")
+ # First on line 1
+ self.assertTrue(out[0].startswith("error: %s:1: " % path))
+ # Second on line 4
+ self.assertTrue(out[1].startswith("error: %s:4: " % path))
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_copier.py b/python/mozbuild/mozpack/test/test_copier.py
new file mode 100644
index 0000000000..60ebd2c1e9
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_copier.py
@@ -0,0 +1,548 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import stat
+import unittest
+
+import mozunit
+import six
+
+import mozpack.path as mozpath
+from mozpack.copier import FileCopier, FileRegistry, FileRegistrySubtree, Jarrer
+from mozpack.errors import ErrorMessage
+from mozpack.files import ExistingFile, GeneratedFile
+from mozpack.mozjar import JarReader
+from mozpack.test.test_files import MatchTestTemplate, MockDest, TestWithTmpDir
+
+
+class BaseTestFileRegistry(MatchTestTemplate):
+ def add(self, path):
+ self.registry.add(path, GeneratedFile(path))
+
+ def do_check(self, pattern, result):
+ self.checked = True
+ if result:
+ self.assertTrue(self.registry.contains(pattern))
+ else:
+ self.assertFalse(self.registry.contains(pattern))
+ self.assertEqual(self.registry.match(pattern), result)
+
+ def do_test_file_registry(self, registry):
+ self.registry = registry
+ self.registry.add("foo", GeneratedFile(b"foo"))
+ bar = GeneratedFile(b"bar")
+ self.registry.add("bar", bar)
+ self.assertEqual(self.registry.paths(), ["foo", "bar"])
+ self.assertEqual(self.registry["bar"], bar)
+
+ self.assertRaises(
+ ErrorMessage, self.registry.add, "foo", GeneratedFile(b"foo2")
+ )
+
+ self.assertRaises(ErrorMessage, self.registry.remove, "qux")
+
+ self.assertRaises(
+ ErrorMessage, self.registry.add, "foo/bar", GeneratedFile(b"foobar")
+ )
+ self.assertRaises(
+ ErrorMessage, self.registry.add, "foo/bar/baz", GeneratedFile(b"foobar")
+ )
+
+ self.assertEqual(self.registry.paths(), ["foo", "bar"])
+
+ self.registry.remove("foo")
+ self.assertEqual(self.registry.paths(), ["bar"])
+ self.registry.remove("bar")
+ self.assertEqual(self.registry.paths(), [])
+
+ self.prepare_match_test()
+ self.do_match_test()
+ self.assertTrue(self.checked)
+ self.assertEqual(
+ self.registry.paths(),
+ [
+ "bar",
+ "foo/bar",
+ "foo/baz",
+ "foo/qux/1",
+ "foo/qux/bar",
+ "foo/qux/2/test",
+ "foo/qux/2/test2",
+ ],
+ )
+
+ self.registry.remove("foo/qux")
+ self.assertEqual(self.registry.paths(), ["bar", "foo/bar", "foo/baz"])
+
+ self.registry.add("foo/qux", GeneratedFile(b"fooqux"))
+ self.assertEqual(
+ self.registry.paths(), ["bar", "foo/bar", "foo/baz", "foo/qux"]
+ )
+ self.registry.remove("foo/b*")
+ self.assertEqual(self.registry.paths(), ["bar", "foo/qux"])
+
+ self.assertEqual([f for f, c in self.registry], ["bar", "foo/qux"])
+ self.assertEqual(len(self.registry), 2)
+
+ self.add("foo/.foo")
+ self.assertTrue(self.registry.contains("foo/.foo"))
+
+ def do_test_registry_paths(self, registry):
+ self.registry = registry
+
+ # Can't add a file if it requires a directory in place of a
+ # file we also require.
+ self.registry.add("foo", GeneratedFile(b"foo"))
+ self.assertRaises(
+ ErrorMessage, self.registry.add, "foo/bar", GeneratedFile(b"foobar")
+ )
+
+ # Can't add a file if we already have a directory there.
+ self.registry.add("bar/baz", GeneratedFile(b"barbaz"))
+ self.assertRaises(ErrorMessage, self.registry.add, "bar", GeneratedFile(b"bar"))
+
+ # Bump the count of things that require bar/ to 2.
+ self.registry.add("bar/zot", GeneratedFile(b"barzot"))
+ self.assertRaises(ErrorMessage, self.registry.add, "bar", GeneratedFile(b"bar"))
+
+ # Drop the count of things that require bar/ to 1.
+ self.registry.remove("bar/baz")
+ self.assertRaises(ErrorMessage, self.registry.add, "bar", GeneratedFile(b"bar"))
+
+ # Drop the count of things that require bar/ to 0.
+ self.registry.remove("bar/zot")
+ self.registry.add("bar/zot", GeneratedFile(b"barzot"))
+
+
+class TestFileRegistry(BaseTestFileRegistry, unittest.TestCase):
+ def test_partial_paths(self):
+ cases = {
+ "foo/bar/baz/zot": ["foo/bar/baz", "foo/bar", "foo"],
+ "foo/bar": ["foo"],
+ "bar": [],
+ }
+ reg = FileRegistry()
+ for path, parts in six.iteritems(cases):
+ self.assertEqual(reg._partial_paths(path), parts)
+
+ def test_file_registry(self):
+ self.do_test_file_registry(FileRegistry())
+
+ def test_registry_paths(self):
+ self.do_test_registry_paths(FileRegistry())
+
+ def test_required_directories(self):
+ self.registry = FileRegistry()
+
+ self.registry.add("foo", GeneratedFile(b"foo"))
+ self.assertEqual(self.registry.required_directories(), set())
+
+ self.registry.add("bar/baz", GeneratedFile(b"barbaz"))
+ self.assertEqual(self.registry.required_directories(), {"bar"})
+
+ self.registry.add("bar/zot", GeneratedFile(b"barzot"))
+ self.assertEqual(self.registry.required_directories(), {"bar"})
+
+ self.registry.add("bar/zap/zot", GeneratedFile(b"barzapzot"))
+ self.assertEqual(self.registry.required_directories(), {"bar", "bar/zap"})
+
+ self.registry.remove("bar/zap/zot")
+ self.assertEqual(self.registry.required_directories(), {"bar"})
+
+ self.registry.remove("bar/baz")
+ self.assertEqual(self.registry.required_directories(), {"bar"})
+
+ self.registry.remove("bar/zot")
+ self.assertEqual(self.registry.required_directories(), set())
+
+ self.registry.add("x/y/z", GeneratedFile(b"xyz"))
+ self.assertEqual(self.registry.required_directories(), {"x", "x/y"})
+
+
+class TestFileRegistrySubtree(BaseTestFileRegistry, unittest.TestCase):
+ def test_file_registry_subtree_base(self):
+ registry = FileRegistry()
+ self.assertEqual(registry, FileRegistrySubtree("", registry))
+ self.assertNotEqual(registry, FileRegistrySubtree("base", registry))
+
+ def create_registry(self):
+ registry = FileRegistry()
+ registry.add("foo/bar", GeneratedFile(b"foo/bar"))
+ registry.add("baz/qux", GeneratedFile(b"baz/qux"))
+ return FileRegistrySubtree("base/root", registry)
+
+ def test_file_registry_subtree(self):
+ self.do_test_file_registry(self.create_registry())
+
+ def test_registry_paths_subtree(self):
+ FileRegistry()
+ self.do_test_registry_paths(self.create_registry())
+
+
+class TestFileCopier(TestWithTmpDir):
+ def all_dirs(self, base):
+ all_dirs = set()
+ for root, dirs, files in os.walk(base):
+ if not dirs:
+ all_dirs.add(mozpath.relpath(root, base))
+ return all_dirs
+
+ def all_files(self, base):
+ all_files = set()
+ for root, dirs, files in os.walk(base):
+ for f in files:
+ all_files.add(mozpath.join(mozpath.relpath(root, base), f))
+ return all_files
+
+ def test_file_copier(self):
+ copier = FileCopier()
+ copier.add("foo/bar", GeneratedFile(b"foobar"))
+ copier.add("foo/qux", GeneratedFile(b"fooqux"))
+ copier.add("foo/deep/nested/directory/file", GeneratedFile(b"fooz"))
+ copier.add("bar", GeneratedFile(b"bar"))
+ copier.add("qux/foo", GeneratedFile(b"quxfoo"))
+ copier.add("qux/bar", GeneratedFile(b""))
+
+ result = copier.copy(self.tmpdir)
+ self.assertEqual(self.all_files(self.tmpdir), set(copier.paths()))
+ self.assertEqual(
+ self.all_dirs(self.tmpdir), set(["foo/deep/nested/directory", "qux"])
+ )
+
+ self.assertEqual(
+ result.updated_files,
+ set(self.tmppath(p) for p in self.all_files(self.tmpdir)),
+ )
+ self.assertEqual(result.existing_files, set())
+ self.assertEqual(result.removed_files, set())
+ self.assertEqual(result.removed_directories, set())
+
+ copier.remove("foo")
+ copier.add("test", GeneratedFile(b"test"))
+ result = copier.copy(self.tmpdir)
+ self.assertEqual(self.all_files(self.tmpdir), set(copier.paths()))
+ self.assertEqual(self.all_dirs(self.tmpdir), set(["qux"]))
+ self.assertEqual(
+ result.removed_files,
+ set(
+ self.tmppath(p)
+ for p in ("foo/bar", "foo/qux", "foo/deep/nested/directory/file")
+ ),
+ )
+
+ def test_symlink_directory_replaced(self):
+ """Directory symlinks in destination are replaced if they need to be
+ real directories."""
+ if not self.symlink_supported:
+ return
+
+ dest = self.tmppath("dest")
+
+ copier = FileCopier()
+ copier.add("foo/bar/baz", GeneratedFile(b"foobarbaz"))
+
+ os.makedirs(self.tmppath("dest/foo"))
+ dummy = self.tmppath("dummy")
+ os.mkdir(dummy)
+ link = self.tmppath("dest/foo/bar")
+ os.symlink(dummy, link)
+
+ result = copier.copy(dest)
+
+ st = os.lstat(link)
+ self.assertFalse(stat.S_ISLNK(st.st_mode))
+ self.assertTrue(stat.S_ISDIR(st.st_mode))
+
+ self.assertEqual(self.all_files(dest), set(copier.paths()))
+
+ self.assertEqual(result.removed_directories, set())
+ self.assertEqual(len(result.updated_files), 1)
+
+ def test_remove_unaccounted_directory_symlinks(self):
+ """Directory symlinks in destination that are not in the way are
+ deleted according to remove_unaccounted and
+ remove_all_directory_symlinks.
+ """
+ if not self.symlink_supported:
+ return
+
+ dest = self.tmppath("dest")
+
+ copier = FileCopier()
+ copier.add("foo/bar/baz", GeneratedFile(b"foobarbaz"))
+
+ os.makedirs(self.tmppath("dest/foo"))
+ dummy = self.tmppath("dummy")
+ os.mkdir(dummy)
+
+ os.mkdir(self.tmppath("dest/zot"))
+ link = self.tmppath("dest/zot/zap")
+ os.symlink(dummy, link)
+
+ # If not remove_unaccounted but remove_empty_directories, then
+ # the symlinked directory remains (as does its containing
+ # directory).
+ result = copier.copy(
+ dest,
+ remove_unaccounted=False,
+ remove_empty_directories=True,
+ remove_all_directory_symlinks=False,
+ )
+
+ st = os.lstat(link)
+ self.assertTrue(stat.S_ISLNK(st.st_mode))
+ self.assertFalse(stat.S_ISDIR(st.st_mode))
+
+ self.assertEqual(self.all_files(dest), set(copier.paths()))
+ self.assertEqual(self.all_dirs(dest), set(["foo/bar"]))
+
+ self.assertEqual(result.removed_directories, set())
+ self.assertEqual(len(result.updated_files), 1)
+
+ # If remove_unaccounted but not remove_empty_directories, then
+ # only the symlinked directory is removed.
+ result = copier.copy(
+ dest,
+ remove_unaccounted=True,
+ remove_empty_directories=False,
+ remove_all_directory_symlinks=False,
+ )
+
+ st = os.lstat(self.tmppath("dest/zot"))
+ self.assertFalse(stat.S_ISLNK(st.st_mode))
+ self.assertTrue(stat.S_ISDIR(st.st_mode))
+
+ self.assertEqual(result.removed_files, set([link]))
+ self.assertEqual(result.removed_directories, set())
+
+ self.assertEqual(self.all_files(dest), set(copier.paths()))
+ self.assertEqual(self.all_dirs(dest), set(["foo/bar", "zot"]))
+
+ # If remove_unaccounted and remove_empty_directories, then
+ # both the symlink and its containing directory are removed.
+ link = self.tmppath("dest/zot/zap")
+ os.symlink(dummy, link)
+
+ result = copier.copy(
+ dest,
+ remove_unaccounted=True,
+ remove_empty_directories=True,
+ remove_all_directory_symlinks=False,
+ )
+
+ self.assertEqual(result.removed_files, set([link]))
+ self.assertEqual(result.removed_directories, set([self.tmppath("dest/zot")]))
+
+ self.assertEqual(self.all_files(dest), set(copier.paths()))
+ self.assertEqual(self.all_dirs(dest), set(["foo/bar"]))
+
+ def test_permissions(self):
+ """Ensure files without write permission can be deleted."""
+ with open(self.tmppath("dummy"), "a"):
+ pass
+
+ p = self.tmppath("no_perms")
+ with open(p, "a"):
+ pass
+
+ # Make file and directory unwritable. Reminder: making a directory
+ # unwritable prevents modifications (including deletes) from the list
+ # of files in that directory.
+ os.chmod(p, 0o400)
+ os.chmod(self.tmpdir, 0o400)
+
+ copier = FileCopier()
+ copier.add("dummy", GeneratedFile(b"content"))
+ result = copier.copy(self.tmpdir)
+ self.assertEqual(result.removed_files_count, 1)
+ self.assertFalse(os.path.exists(p))
+
+ def test_no_remove(self):
+ copier = FileCopier()
+ copier.add("foo", GeneratedFile(b"foo"))
+
+ with open(self.tmppath("bar"), "a"):
+ pass
+
+ os.mkdir(self.tmppath("emptydir"))
+ d = self.tmppath("populateddir")
+ os.mkdir(d)
+
+ with open(self.tmppath("populateddir/foo"), "a"):
+ pass
+
+ result = copier.copy(self.tmpdir, remove_unaccounted=False)
+
+ self.assertEqual(
+ self.all_files(self.tmpdir), set(["foo", "bar", "populateddir/foo"])
+ )
+ self.assertEqual(self.all_dirs(self.tmpdir), set(["populateddir"]))
+ self.assertEqual(result.removed_files, set())
+ self.assertEqual(result.removed_directories, set([self.tmppath("emptydir")]))
+
+ def test_no_remove_empty_directories(self):
+ copier = FileCopier()
+ copier.add("foo", GeneratedFile(b"foo"))
+
+ with open(self.tmppath("bar"), "a"):
+ pass
+
+ os.mkdir(self.tmppath("emptydir"))
+ d = self.tmppath("populateddir")
+ os.mkdir(d)
+
+ with open(self.tmppath("populateddir/foo"), "a"):
+ pass
+
+ result = copier.copy(
+ self.tmpdir, remove_unaccounted=False, remove_empty_directories=False
+ )
+
+ self.assertEqual(
+ self.all_files(self.tmpdir), set(["foo", "bar", "populateddir/foo"])
+ )
+ self.assertEqual(self.all_dirs(self.tmpdir), set(["emptydir", "populateddir"]))
+ self.assertEqual(result.removed_files, set())
+ self.assertEqual(result.removed_directories, set())
+
+ def test_optional_exists_creates_unneeded_directory(self):
+ """Demonstrate that a directory not strictly required, but specified
+ as the path to an optional file, will be unnecessarily created.
+
+ This behaviour is wrong; fixing it is tracked by Bug 972432;
+ and this test exists to guard against unexpected changes in
+ behaviour.
+ """
+
+ dest = self.tmppath("dest")
+
+ copier = FileCopier()
+ copier.add("foo/bar", ExistingFile(required=False))
+
+ result = copier.copy(dest)
+
+ st = os.lstat(self.tmppath("dest/foo"))
+ self.assertFalse(stat.S_ISLNK(st.st_mode))
+ self.assertTrue(stat.S_ISDIR(st.st_mode))
+
+ # What's worse, we have no record that dest was created.
+ self.assertEqual(len(result.updated_files), 0)
+
+ # But we do have an erroneous record of an optional file
+ # existing when it does not.
+ self.assertIn(self.tmppath("dest/foo/bar"), result.existing_files)
+
+ def test_remove_unaccounted_file_registry(self):
+ """Test FileCopier.copy(remove_unaccounted=FileRegistry())"""
+
+ dest = self.tmppath("dest")
+
+ copier = FileCopier()
+ copier.add("foo/bar/baz", GeneratedFile(b"foobarbaz"))
+ copier.add("foo/bar/qux", GeneratedFile(b"foobarqux"))
+ copier.add("foo/hoge/fuga", GeneratedFile(b"foohogefuga"))
+ copier.add("foo/toto/tata", GeneratedFile(b"footototata"))
+
+ os.makedirs(os.path.join(dest, "bar"))
+ with open(os.path.join(dest, "bar", "bar"), "w") as fh:
+ fh.write("barbar")
+ os.makedirs(os.path.join(dest, "foo", "toto"))
+ with open(os.path.join(dest, "foo", "toto", "toto"), "w") as fh:
+ fh.write("foototototo")
+
+ result = copier.copy(dest, remove_unaccounted=False)
+
+ self.assertEqual(
+ self.all_files(dest), set(copier.paths()) | {"foo/toto/toto", "bar/bar"}
+ )
+ self.assertEqual(
+ self.all_dirs(dest), {"foo/bar", "foo/hoge", "foo/toto", "bar"}
+ )
+
+ copier2 = FileCopier()
+ copier2.add("foo/hoge/fuga", GeneratedFile(b"foohogefuga"))
+
+ # We expect only files copied from the first copier to be removed,
+ # not the extra file that was there beforehand.
+ result = copier2.copy(dest, remove_unaccounted=copier)
+
+ self.assertEqual(
+ self.all_files(dest), set(copier2.paths()) | {"foo/toto/toto", "bar/bar"}
+ )
+ self.assertEqual(self.all_dirs(dest), {"foo/hoge", "foo/toto", "bar"})
+ self.assertEqual(result.updated_files, {self.tmppath("dest/foo/hoge/fuga")})
+ self.assertEqual(result.existing_files, set())
+ self.assertEqual(
+ result.removed_files,
+ {
+ self.tmppath(p)
+ for p in ("dest/foo/bar/baz", "dest/foo/bar/qux", "dest/foo/toto/tata")
+ },
+ )
+ self.assertEqual(result.removed_directories, {self.tmppath("dest/foo/bar")})
+
+
+class TestJarrer(unittest.TestCase):
+ def check_jar(self, dest, copier):
+ jar = JarReader(fileobj=dest)
+ self.assertEqual([f.filename for f in jar], copier.paths())
+ for f in jar:
+ self.assertEqual(f.uncompressed_data.read(), copier[f.filename].content)
+
+ def test_jarrer(self):
+ copier = Jarrer()
+ copier.add("foo/bar", GeneratedFile(b"foobar"))
+ copier.add("foo/qux", GeneratedFile(b"fooqux"))
+ copier.add("foo/deep/nested/directory/file", GeneratedFile(b"fooz"))
+ copier.add("bar", GeneratedFile(b"bar"))
+ copier.add("qux/foo", GeneratedFile(b"quxfoo"))
+ copier.add("qux/bar", GeneratedFile(b""))
+
+ dest = MockDest()
+ copier.copy(dest)
+ self.check_jar(dest, copier)
+
+ copier.remove("foo")
+ copier.add("test", GeneratedFile(b"test"))
+ copier.copy(dest)
+ self.check_jar(dest, copier)
+
+ copier.remove("test")
+ copier.add("test", GeneratedFile(b"replaced-content"))
+ copier.copy(dest)
+ self.check_jar(dest, copier)
+
+ copier.copy(dest)
+ self.check_jar(dest, copier)
+
+ preloaded = ["qux/bar", "bar"]
+ copier.preload(preloaded)
+ copier.copy(dest)
+
+ dest.seek(0)
+ jar = JarReader(fileobj=dest)
+ self.assertEqual(
+ [f.filename for f in jar],
+ preloaded + [p for p in copier.paths() if p not in preloaded],
+ )
+ self.assertEqual(jar.last_preloaded, preloaded[-1])
+
+ def test_jarrer_compress(self):
+ copier = Jarrer()
+ copier.add("foo/bar", GeneratedFile(b"ffffff"))
+ copier.add("foo/qux", GeneratedFile(b"ffffff"), compress=False)
+
+ dest = MockDest()
+ copier.copy(dest)
+ self.check_jar(dest, copier)
+
+ dest.seek(0)
+ jar = JarReader(fileobj=dest)
+ self.assertTrue(jar["foo/bar"].compressed)
+ self.assertFalse(jar["foo/qux"].compressed)
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_errors.py b/python/mozbuild/mozpack/test/test_errors.py
new file mode 100644
index 0000000000..411b1b54c3
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_errors.py
@@ -0,0 +1,95 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import sys
+import unittest
+
+import mozunit
+import six
+
+from mozpack.errors import AccumulatedErrors, ErrorMessage, errors
+
+
+class TestErrors(object):
+ def setUp(self):
+ errors.out = six.moves.cStringIO()
+ errors.ignore_errors(False)
+
+ def tearDown(self):
+ errors.out = sys.stderr
+
+ def get_output(self):
+ return [l.strip() for l in errors.out.getvalue().splitlines()]
+
+
+class TestErrorsImpl(TestErrors, unittest.TestCase):
+ def test_plain_error(self):
+ errors.warn("foo")
+ self.assertRaises(ErrorMessage, errors.error, "foo")
+ self.assertRaises(ErrorMessage, errors.fatal, "foo")
+ self.assertEqual(self.get_output(), ["warning: foo"])
+
+ def test_ignore_errors(self):
+ errors.ignore_errors()
+ errors.warn("foo")
+ errors.error("bar")
+ self.assertRaises(ErrorMessage, errors.fatal, "foo")
+ self.assertEqual(self.get_output(), ["warning: foo", "warning: bar"])
+
+ def test_no_error(self):
+ with errors.accumulate():
+ errors.warn("1")
+
+ def test_simple_error(self):
+ with self.assertRaises(AccumulatedErrors):
+ with errors.accumulate():
+ errors.error("1")
+ self.assertEqual(self.get_output(), ["error: 1"])
+
+ def test_error_loop(self):
+ with self.assertRaises(AccumulatedErrors):
+ with errors.accumulate():
+ for i in range(3):
+ errors.error("%d" % i)
+ self.assertEqual(self.get_output(), ["error: 0", "error: 1", "error: 2"])
+
+ def test_multiple_errors(self):
+ with self.assertRaises(AccumulatedErrors):
+ with errors.accumulate():
+ errors.error("foo")
+ for i in range(3):
+ if i == 2:
+ errors.warn("%d" % i)
+ else:
+ errors.error("%d" % i)
+ errors.error("bar")
+ self.assertEqual(
+ self.get_output(),
+ ["error: foo", "error: 0", "error: 1", "warning: 2", "error: bar"],
+ )
+
+ def test_errors_context(self):
+ with self.assertRaises(AccumulatedErrors):
+ with errors.accumulate():
+ self.assertEqual(errors.get_context(), None)
+ with errors.context("foo", 1):
+ self.assertEqual(errors.get_context(), ("foo", 1))
+ errors.error("a")
+ with errors.context("bar", 2):
+ self.assertEqual(errors.get_context(), ("bar", 2))
+ errors.error("b")
+ self.assertEqual(errors.get_context(), ("foo", 1))
+ errors.error("c")
+ self.assertEqual(
+ self.get_output(),
+ [
+ "error: foo:1: a",
+ "error: bar:2: b",
+ "error: foo:1: c",
+ ],
+ )
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_files.py b/python/mozbuild/mozpack/test/test_files.py
new file mode 100644
index 0000000000..1c86f2e0cc
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_files.py
@@ -0,0 +1,1362 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from mozbuild.util import ensure_bytes, ensureParentDir
+from mozpack.errors import ErrorMessage, errors
+from mozpack.files import (
+ AbsoluteSymlinkFile,
+ ComposedFinder,
+ DeflatedFile,
+ Dest,
+ ExistingFile,
+ ExtractedTarFile,
+ File,
+ FileFinder,
+ GeneratedFile,
+ HardlinkFile,
+ JarFinder,
+ ManifestFile,
+ MercurialFile,
+ MercurialRevisionFinder,
+ MinifiedCommentStripped,
+ MinifiedJavaScript,
+ PreprocessedFile,
+ TarFinder,
+)
+
+# We don't have hglib installed everywhere.
+try:
+ import hglib
+except ImportError:
+ hglib = None
+
+import os
+import platform
+import random
+import sys
+import tarfile
+import unittest
+from io import BytesIO
+from tempfile import mkdtemp
+
+import mozfile
+import mozunit
+import six
+
+import mozpack.path as mozpath
+from mozpack.chrome.manifest import (
+ ManifestContent,
+ ManifestLocale,
+ ManifestOverride,
+ ManifestResource,
+)
+from mozpack.mozjar import JarReader, JarWriter
+
+
+class TestWithTmpDir(unittest.TestCase):
+ def setUp(self):
+ self.tmpdir = mkdtemp()
+
+ self.symlink_supported = False
+ self.hardlink_supported = False
+
+ # See comment in mozpack.files.AbsoluteSymlinkFile
+ if hasattr(os, "symlink") and platform.system() != "Windows":
+ dummy_path = self.tmppath("dummy_file")
+ with open(dummy_path, "a"):
+ pass
+
+ try:
+ os.symlink(dummy_path, self.tmppath("dummy_symlink"))
+ os.remove(self.tmppath("dummy_symlink"))
+ except EnvironmentError:
+ pass
+ finally:
+ os.remove(dummy_path)
+
+ self.symlink_supported = True
+
+ if hasattr(os, "link"):
+ dummy_path = self.tmppath("dummy_file")
+ with open(dummy_path, "a"):
+ pass
+
+ try:
+ os.link(dummy_path, self.tmppath("dummy_hardlink"))
+ os.remove(self.tmppath("dummy_hardlink"))
+ except EnvironmentError:
+ pass
+ finally:
+ os.remove(dummy_path)
+
+ self.hardlink_supported = True
+
+ def tearDown(self):
+ mozfile.rmtree(self.tmpdir)
+
+ def tmppath(self, relpath):
+ return os.path.normpath(os.path.join(self.tmpdir, relpath))
+
+
+class MockDest(BytesIO, Dest):
+ def __init__(self):
+ BytesIO.__init__(self)
+ self.mode = None
+
+ def read(self, length=-1):
+ if self.mode != "r":
+ self.seek(0)
+ self.mode = "r"
+ return BytesIO.read(self, length)
+
+ def write(self, data):
+ if self.mode != "w":
+ self.seek(0)
+ self.truncate(0)
+ self.mode = "w"
+ return BytesIO.write(self, data)
+
+ def exists(self):
+ return True
+
+ def close(self):
+ if self.mode:
+ self.mode = None
+
+
+class DestNoWrite(Dest):
+ def write(self, data):
+ raise RuntimeError
+
+
+class TestDest(TestWithTmpDir):
+ def test_dest(self):
+ dest = Dest(self.tmppath("dest"))
+ self.assertFalse(dest.exists())
+ dest.write(b"foo")
+ self.assertTrue(dest.exists())
+ dest.write(b"foo")
+ self.assertEqual(dest.read(4), b"foof")
+ self.assertEqual(dest.read(), b"oo")
+ self.assertEqual(dest.read(), b"")
+ dest.write(b"bar")
+ self.assertEqual(dest.read(4), b"bar")
+ dest.close()
+ self.assertEqual(dest.read(), b"bar")
+ dest.write(b"foo")
+ dest.close()
+ dest.write(b"qux")
+ self.assertEqual(dest.read(), b"qux")
+
+
+rand = bytes(
+ random.choice(b"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ")
+ for i in six.moves.xrange(131597)
+)
+samples = [
+ b"",
+ b"test",
+ b"fooo",
+ b"same",
+ b"same",
+ b"Different and longer",
+ rand,
+ rand,
+ rand[:-1] + b"_",
+ b"test",
+]
+
+
+class TestFile(TestWithTmpDir):
+ def test_file(self):
+ """
+ Check that File.copy yields the proper content in the destination file
+ in all situations that trigger different code paths:
+ - different content
+ - different content of the same size
+ - same content
+ - long content
+ """
+ src = self.tmppath("src")
+ dest = self.tmppath("dest")
+
+ for content in samples:
+ with open(src, "wb") as tmp:
+ tmp.write(content)
+ # Ensure the destination file, when it exists, is older than the
+ # source
+ if os.path.exists(dest):
+ time = os.path.getmtime(src) - 1
+ os.utime(dest, (time, time))
+ f = File(src)
+ f.copy(dest)
+ self.assertEqual(content, open(dest, "rb").read())
+ self.assertEqual(content, f.open().read())
+ self.assertEqual(content, f.open().read())
+
+ def test_file_dest(self):
+ """
+ Similar to test_file, but for a destination object instead of
+ a destination file. This ensures the destination object is being
+ used properly by File.copy, ensuring that other subclasses of Dest
+ will work.
+ """
+ src = self.tmppath("src")
+ dest = MockDest()
+
+ for content in samples:
+ with open(src, "wb") as tmp:
+ tmp.write(content)
+ f = File(src)
+ f.copy(dest)
+ self.assertEqual(content, dest.getvalue())
+
+ def test_file_open(self):
+ """
+ Test whether File.open returns an appropriately reset file object.
+ """
+ src = self.tmppath("src")
+ content = b"".join(samples)
+ with open(src, "wb") as tmp:
+ tmp.write(content)
+
+ f = File(src)
+ self.assertEqual(content[:42], f.open().read(42))
+ self.assertEqual(content, f.open().read())
+
+ def test_file_no_write(self):
+ """
+ Test various conditions where File.copy is expected not to write
+ in the destination file.
+ """
+ src = self.tmppath("src")
+ dest = self.tmppath("dest")
+
+ with open(src, "wb") as tmp:
+ tmp.write(b"test")
+
+ # Initial copy
+ f = File(src)
+ f.copy(dest)
+
+ # Ensure subsequent copies won't trigger writes
+ f.copy(DestNoWrite(dest))
+ self.assertEqual(b"test", open(dest, "rb").read())
+
+ # When the source file is newer, but with the same content, no copy
+ # should occur
+ time = os.path.getmtime(src) - 1
+ os.utime(dest, (time, time))
+ f.copy(DestNoWrite(dest))
+ self.assertEqual(b"test", open(dest, "rb").read())
+
+ # When the source file is older than the destination file, even with
+ # different content, no copy should occur.
+ with open(src, "wb") as tmp:
+ tmp.write(b"fooo")
+ time = os.path.getmtime(dest) - 1
+ os.utime(src, (time, time))
+ f.copy(DestNoWrite(dest))
+ self.assertEqual(b"test", open(dest, "rb").read())
+
+ # Double check that under conditions where a copy occurs, we would get
+ # an exception.
+ time = os.path.getmtime(src) - 1
+ os.utime(dest, (time, time))
+ self.assertRaises(RuntimeError, f.copy, DestNoWrite(dest))
+
+ # skip_if_older=False is expected to force a copy in this situation.
+ f.copy(dest, skip_if_older=False)
+ self.assertEqual(b"fooo", open(dest, "rb").read())
+
+
+class TestAbsoluteSymlinkFile(TestWithTmpDir):
+ def test_absolute_relative(self):
+ AbsoluteSymlinkFile("/foo")
+
+ with self.assertRaisesRegexp(ValueError, "Symlink target not absolute"):
+ AbsoluteSymlinkFile("./foo")
+
+ def test_symlink_file(self):
+ source = self.tmppath("test_path")
+ with open(source, "wt") as fh:
+ fh.write("Hello world")
+
+ s = AbsoluteSymlinkFile(source)
+ dest = self.tmppath("symlink")
+ self.assertTrue(s.copy(dest))
+
+ if self.symlink_supported:
+ self.assertTrue(os.path.islink(dest))
+ link = os.readlink(dest)
+ self.assertEqual(link, source)
+ else:
+ self.assertTrue(os.path.isfile(dest))
+ content = open(dest).read()
+ self.assertEqual(content, "Hello world")
+
+ def test_replace_file_with_symlink(self):
+ # If symlinks are supported, an existing file should be replaced by a
+ # symlink.
+ source = self.tmppath("test_path")
+ with open(source, "wt") as fh:
+ fh.write("source")
+
+ dest = self.tmppath("dest")
+ with open(dest, "a"):
+ pass
+
+ s = AbsoluteSymlinkFile(source)
+ s.copy(dest, skip_if_older=False)
+
+ if self.symlink_supported:
+ self.assertTrue(os.path.islink(dest))
+ link = os.readlink(dest)
+ self.assertEqual(link, source)
+ else:
+ self.assertTrue(os.path.isfile(dest))
+ content = open(dest).read()
+ self.assertEqual(content, "source")
+
+ def test_replace_symlink(self):
+ if not self.symlink_supported:
+ return
+
+ source = self.tmppath("source")
+ with open(source, "a"):
+ pass
+
+ dest = self.tmppath("dest")
+
+ os.symlink(self.tmppath("bad"), dest)
+ self.assertTrue(os.path.islink(dest))
+
+ s = AbsoluteSymlinkFile(source)
+ self.assertTrue(s.copy(dest))
+
+ self.assertTrue(os.path.islink(dest))
+ link = os.readlink(dest)
+ self.assertEqual(link, source)
+
+ def test_noop(self):
+ if not hasattr(os, "symlink") or sys.platform == "win32":
+ return
+
+ source = self.tmppath("source")
+ dest = self.tmppath("dest")
+
+ with open(source, "a"):
+ pass
+
+ os.symlink(source, dest)
+ link = os.readlink(dest)
+ self.assertEqual(link, source)
+
+ s = AbsoluteSymlinkFile(source)
+ self.assertFalse(s.copy(dest))
+
+ link = os.readlink(dest)
+ self.assertEqual(link, source)
+
+
+class TestHardlinkFile(TestWithTmpDir):
+ def test_absolute_relative(self):
+ HardlinkFile("/foo")
+ HardlinkFile("./foo")
+
+ def test_hardlink_file(self):
+ source = self.tmppath("test_path")
+ with open(source, "wt") as fh:
+ fh.write("Hello world")
+
+ s = HardlinkFile(source)
+ dest = self.tmppath("hardlink")
+ self.assertTrue(s.copy(dest))
+
+ if self.hardlink_supported:
+ source_stat = os.stat(source)
+ dest_stat = os.stat(dest)
+ self.assertEqual(source_stat.st_dev, dest_stat.st_dev)
+ self.assertEqual(source_stat.st_ino, dest_stat.st_ino)
+ else:
+ self.assertTrue(os.path.isfile(dest))
+ with open(dest) as f:
+ content = f.read()
+ self.assertEqual(content, "Hello world")
+
+ def test_replace_file_with_hardlink(self):
+ # If hardlink are supported, an existing file should be replaced by a
+ # symlink.
+ source = self.tmppath("test_path")
+ with open(source, "wt") as fh:
+ fh.write("source")
+
+ dest = self.tmppath("dest")
+ with open(dest, "a"):
+ pass
+
+ s = HardlinkFile(source)
+ s.copy(dest, skip_if_older=False)
+
+ if self.hardlink_supported:
+ source_stat = os.stat(source)
+ dest_stat = os.stat(dest)
+ self.assertEqual(source_stat.st_dev, dest_stat.st_dev)
+ self.assertEqual(source_stat.st_ino, dest_stat.st_ino)
+ else:
+ self.assertTrue(os.path.isfile(dest))
+ with open(dest) as f:
+ content = f.read()
+ self.assertEqual(content, "source")
+
+ def test_replace_hardlink(self):
+ if not self.hardlink_supported:
+ raise unittest.SkipTest("hardlink not supported")
+
+ source = self.tmppath("source")
+ with open(source, "a"):
+ pass
+
+ dest = self.tmppath("dest")
+
+ os.link(source, dest)
+
+ s = HardlinkFile(source)
+ self.assertFalse(s.copy(dest))
+
+ source_stat = os.lstat(source)
+ dest_stat = os.lstat(dest)
+ self.assertEqual(source_stat.st_dev, dest_stat.st_dev)
+ self.assertEqual(source_stat.st_ino, dest_stat.st_ino)
+
+ def test_noop(self):
+ if not self.hardlink_supported:
+ raise unittest.SkipTest("hardlink not supported")
+
+ source = self.tmppath("source")
+ dest = self.tmppath("dest")
+
+ with open(source, "a"):
+ pass
+
+ os.link(source, dest)
+
+ s = HardlinkFile(source)
+ self.assertFalse(s.copy(dest))
+
+ source_stat = os.lstat(source)
+ dest_stat = os.lstat(dest)
+ self.assertEqual(source_stat.st_dev, dest_stat.st_dev)
+ self.assertEqual(source_stat.st_ino, dest_stat.st_ino)
+
+
+class TestPreprocessedFile(TestWithTmpDir):
+ def test_preprocess(self):
+ """
+ Test that copying the file invokes the preprocessor
+ """
+ src = self.tmppath("src")
+ dest = self.tmppath("dest")
+
+ with open(src, "wb") as tmp:
+ tmp.write(b"#ifdef FOO\ntest\n#endif")
+
+ f = PreprocessedFile(src, depfile_path=None, marker="#", defines={"FOO": True})
+ self.assertTrue(f.copy(dest))
+
+ self.assertEqual(b"test\n", open(dest, "rb").read())
+
+ def test_preprocess_file_no_write(self):
+ """
+ Test various conditions where PreprocessedFile.copy is expected not to
+ write in the destination file.
+ """
+ src = self.tmppath("src")
+ dest = self.tmppath("dest")
+ depfile = self.tmppath("depfile")
+
+ with open(src, "wb") as tmp:
+ tmp.write(b"#ifdef FOO\ntest\n#endif")
+
+ # Initial copy
+ f = PreprocessedFile(
+ src, depfile_path=depfile, marker="#", defines={"FOO": True}
+ )
+ self.assertTrue(f.copy(dest))
+
+ # Ensure subsequent copies won't trigger writes
+ self.assertFalse(f.copy(DestNoWrite(dest)))
+ self.assertEqual(b"test\n", open(dest, "rb").read())
+
+ # When the source file is older than the destination file, even with
+ # different content, no copy should occur.
+ with open(src, "wb") as tmp:
+ tmp.write(b"#ifdef FOO\nfooo\n#endif")
+ time = os.path.getmtime(dest) - 1
+ os.utime(src, (time, time))
+ self.assertFalse(f.copy(DestNoWrite(dest)))
+ self.assertEqual(b"test\n", open(dest, "rb").read())
+
+ # skip_if_older=False is expected to force a copy in this situation.
+ self.assertTrue(f.copy(dest, skip_if_older=False))
+ self.assertEqual(b"fooo\n", open(dest, "rb").read())
+
+ def test_preprocess_file_dependencies(self):
+ """
+ Test that the preprocess runs if the dependencies of the source change
+ """
+ src = self.tmppath("src")
+ dest = self.tmppath("dest")
+ incl = self.tmppath("incl")
+ deps = self.tmppath("src.pp")
+
+ with open(src, "wb") as tmp:
+ tmp.write(b"#ifdef FOO\ntest\n#endif")
+
+ with open(incl, "wb") as tmp:
+ tmp.write(b"foo bar")
+
+ # Initial copy
+ f = PreprocessedFile(src, depfile_path=deps, marker="#", defines={"FOO": True})
+ self.assertTrue(f.copy(dest))
+
+ # Update the source so it #includes the include file.
+ with open(src, "wb") as tmp:
+ tmp.write(b"#include incl\n")
+ time = os.path.getmtime(dest) + 1
+ os.utime(src, (time, time))
+ self.assertTrue(f.copy(dest))
+ self.assertEqual(b"foo bar", open(dest, "rb").read())
+
+ # If one of the dependencies changes, the file should be updated. The
+ # mtime of the dependency is set after the destination file, to avoid
+ # both files having the same time.
+ with open(incl, "wb") as tmp:
+ tmp.write(b"quux")
+ time = os.path.getmtime(dest) + 1
+ os.utime(incl, (time, time))
+ self.assertTrue(f.copy(dest))
+ self.assertEqual(b"quux", open(dest, "rb").read())
+
+ # Perform one final copy to confirm that we don't run the preprocessor
+ # again. We update the mtime of the destination so it's newer than the
+ # input files. This would "just work" if we weren't changing
+ time = os.path.getmtime(incl) + 1
+ os.utime(dest, (time, time))
+ self.assertFalse(f.copy(DestNoWrite(dest)))
+
+ def test_replace_symlink(self):
+ """
+ Test that if the destination exists, and is a symlink, the target of
+ the symlink is not overwritten by the preprocessor output.
+ """
+ if not self.symlink_supported:
+ return
+
+ source = self.tmppath("source")
+ dest = self.tmppath("dest")
+ pp_source = self.tmppath("pp_in")
+ deps = self.tmppath("deps")
+
+ with open(source, "a"):
+ pass
+
+ os.symlink(source, dest)
+ self.assertTrue(os.path.islink(dest))
+
+ with open(pp_source, "wb") as tmp:
+ tmp.write(b"#define FOO\nPREPROCESSED")
+
+ f = PreprocessedFile(
+ pp_source, depfile_path=deps, marker="#", defines={"FOO": True}
+ )
+ self.assertTrue(f.copy(dest))
+
+ self.assertEqual(b"PREPROCESSED", open(dest, "rb").read())
+ self.assertFalse(os.path.islink(dest))
+ self.assertEqual(b"", open(source, "rb").read())
+
+
+class TestExistingFile(TestWithTmpDir):
+ def test_required_missing_dest(self):
+ with self.assertRaisesRegexp(ErrorMessage, "Required existing file"):
+ f = ExistingFile(required=True)
+ f.copy(self.tmppath("dest"))
+
+ def test_required_existing_dest(self):
+ p = self.tmppath("dest")
+ with open(p, "a"):
+ pass
+
+ f = ExistingFile(required=True)
+ f.copy(p)
+
+ def test_optional_missing_dest(self):
+ f = ExistingFile(required=False)
+ f.copy(self.tmppath("dest"))
+
+ def test_optional_existing_dest(self):
+ p = self.tmppath("dest")
+ with open(p, "a"):
+ pass
+
+ f = ExistingFile(required=False)
+ f.copy(p)
+
+
+class TestGeneratedFile(TestWithTmpDir):
+ def test_generated_file(self):
+ """
+ Check that GeneratedFile.copy yields the proper content in the
+ destination file in all situations that trigger different code paths
+ (see TestFile.test_file)
+ """
+ dest = self.tmppath("dest")
+
+ for content in samples:
+ f = GeneratedFile(content)
+ f.copy(dest)
+ self.assertEqual(content, open(dest, "rb").read())
+
+ def test_generated_file_open(self):
+ """
+ Test whether GeneratedFile.open returns an appropriately reset file
+ object.
+ """
+ content = b"".join(samples)
+ f = GeneratedFile(content)
+ self.assertEqual(content[:42], f.open().read(42))
+ self.assertEqual(content, f.open().read())
+
+ def test_generated_file_no_write(self):
+ """
+ Test various conditions where GeneratedFile.copy is expected not to
+ write in the destination file.
+ """
+ dest = self.tmppath("dest")
+
+ # Initial copy
+ f = GeneratedFile(b"test")
+ f.copy(dest)
+
+ # Ensure subsequent copies won't trigger writes
+ f.copy(DestNoWrite(dest))
+ self.assertEqual(b"test", open(dest, "rb").read())
+
+ # When using a new instance with the same content, no copy should occur
+ f = GeneratedFile(b"test")
+ f.copy(DestNoWrite(dest))
+ self.assertEqual(b"test", open(dest, "rb").read())
+
+ # Double check that under conditions where a copy occurs, we would get
+ # an exception.
+ f = GeneratedFile(b"fooo")
+ self.assertRaises(RuntimeError, f.copy, DestNoWrite(dest))
+
+ def test_generated_file_function(self):
+ """
+ Test GeneratedFile behavior with functions.
+ """
+ dest = self.tmppath("dest")
+ data = {
+ "num_calls": 0,
+ }
+
+ def content():
+ data["num_calls"] += 1
+ return b"content"
+
+ f = GeneratedFile(content)
+ self.assertEqual(data["num_calls"], 0)
+ f.copy(dest)
+ self.assertEqual(data["num_calls"], 1)
+ self.assertEqual(b"content", open(dest, "rb").read())
+ self.assertEqual(b"content", f.open().read())
+ self.assertEqual(b"content", f.read())
+ self.assertEqual(len(b"content"), f.size())
+ self.assertEqual(data["num_calls"], 1)
+
+ f.content = b"modified"
+ f.copy(dest)
+ self.assertEqual(data["num_calls"], 1)
+ self.assertEqual(b"modified", open(dest, "rb").read())
+ self.assertEqual(b"modified", f.open().read())
+ self.assertEqual(b"modified", f.read())
+ self.assertEqual(len(b"modified"), f.size())
+
+ f.content = content
+ self.assertEqual(data["num_calls"], 1)
+ self.assertEqual(b"content", f.read())
+ self.assertEqual(data["num_calls"], 2)
+
+
+class TestDeflatedFile(TestWithTmpDir):
+ def test_deflated_file(self):
+ """
+ Check that DeflatedFile.copy yields the proper content in the
+ destination file in all situations that trigger different code paths
+ (see TestFile.test_file)
+ """
+ src = self.tmppath("src.jar")
+ dest = self.tmppath("dest")
+
+ contents = {}
+ with JarWriter(src) as jar:
+ for content in samples:
+ name = "".join(
+ random.choice(
+ "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
+ )
+ for i in range(8)
+ )
+ jar.add(name, content, compress=True)
+ contents[name] = content
+
+ for j in JarReader(src):
+ f = DeflatedFile(j)
+ f.copy(dest)
+ self.assertEqual(contents[j.filename], open(dest, "rb").read())
+
+ def test_deflated_file_open(self):
+ """
+ Test whether DeflatedFile.open returns an appropriately reset file
+ object.
+ """
+ src = self.tmppath("src.jar")
+ content = b"".join(samples)
+ with JarWriter(src) as jar:
+ jar.add("content", content)
+
+ f = DeflatedFile(JarReader(src)["content"])
+ self.assertEqual(content[:42], f.open().read(42))
+ self.assertEqual(content, f.open().read())
+
+ def test_deflated_file_no_write(self):
+ """
+ Test various conditions where DeflatedFile.copy is expected not to
+ write in the destination file.
+ """
+ src = self.tmppath("src.jar")
+ dest = self.tmppath("dest")
+
+ with JarWriter(src) as jar:
+ jar.add("test", b"test")
+ jar.add("test2", b"test")
+ jar.add("fooo", b"fooo")
+
+ jar = JarReader(src)
+ # Initial copy
+ f = DeflatedFile(jar["test"])
+ f.copy(dest)
+
+ # Ensure subsequent copies won't trigger writes
+ f.copy(DestNoWrite(dest))
+ self.assertEqual(b"test", open(dest, "rb").read())
+
+ # When using a different file with the same content, no copy should
+ # occur
+ f = DeflatedFile(jar["test2"])
+ f.copy(DestNoWrite(dest))
+ self.assertEqual(b"test", open(dest, "rb").read())
+
+ # Double check that under conditions where a copy occurs, we would get
+ # an exception.
+ f = DeflatedFile(jar["fooo"])
+ self.assertRaises(RuntimeError, f.copy, DestNoWrite(dest))
+
+
+class TestManifestFile(TestWithTmpDir):
+ def test_manifest_file(self):
+ f = ManifestFile("chrome")
+ f.add(ManifestContent("chrome", "global", "toolkit/content/global/"))
+ f.add(ManifestResource("chrome", "gre-resources", "toolkit/res/"))
+ f.add(ManifestResource("chrome/pdfjs", "pdfjs", "./"))
+ f.add(ManifestContent("chrome/pdfjs", "pdfjs", "pdfjs"))
+ f.add(ManifestLocale("chrome", "browser", "en-US", "en-US/locale/browser/"))
+
+ f.copy(self.tmppath("chrome.manifest"))
+ self.assertEqual(
+ open(self.tmppath("chrome.manifest")).readlines(),
+ [
+ "content global toolkit/content/global/\n",
+ "resource gre-resources toolkit/res/\n",
+ "resource pdfjs pdfjs/\n",
+ "content pdfjs pdfjs/pdfjs\n",
+ "locale browser en-US en-US/locale/browser/\n",
+ ],
+ )
+
+ self.assertRaises(
+ ValueError,
+ f.remove,
+ ManifestContent("", "global", "toolkit/content/global/"),
+ )
+ self.assertRaises(
+ ValueError,
+ f.remove,
+ ManifestOverride(
+ "chrome",
+ "chrome://global/locale/netError.dtd",
+ "chrome://browser/locale/netError.dtd",
+ ),
+ )
+
+ f.remove(ManifestContent("chrome", "global", "toolkit/content/global/"))
+ self.assertRaises(
+ ValueError,
+ f.remove,
+ ManifestContent("chrome", "global", "toolkit/content/global/"),
+ )
+
+ f.copy(self.tmppath("chrome.manifest"))
+ content = open(self.tmppath("chrome.manifest"), "rb").read()
+ self.assertEqual(content[:42], f.open().read(42))
+ self.assertEqual(content, f.open().read())
+
+
+# Compiled typelib for the following IDL:
+# interface foo;
+# [scriptable, uuid(5f70da76-519c-4858-b71e-e3c92333e2d6)]
+# interface bar {
+# void bar(in foo f);
+# };
+# We need to make this [scriptable] so it doesn't get deleted from the
+# typelib. We don't need to make the foo interfaces below [scriptable],
+# because they will be automatically included by virtue of being an
+# argument to a method of |bar|.
+bar_xpt = GeneratedFile(
+ b"\x58\x50\x43\x4F\x4D\x0A\x54\x79\x70\x65\x4C\x69\x62\x0D\x0A\x1A"
+ + b"\x01\x02\x00\x02\x00\x00\x00\x7B\x00\x00\x00\x24\x00\x00\x00\x5C"
+ + b"\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
+ + b"\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x5F"
+ + b"\x70\xDA\x76\x51\x9C\x48\x58\xB7\x1E\xE3\xC9\x23\x33\xE2\xD6\x00"
+ + b"\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x0D\x00\x66\x6F\x6F\x00"
+ + b"\x62\x61\x72\x00\x62\x61\x72\x00\x00\x00\x00\x01\x00\x00\x00\x00"
+ + b"\x09\x01\x80\x92\x00\x01\x80\x06\x00\x00\x80"
+)
+
+# Compiled typelib for the following IDL:
+# [uuid(3271bebc-927e-4bef-935e-44e0aaf3c1e5)]
+# interface foo {
+# void foo();
+# };
+foo_xpt = GeneratedFile(
+ b"\x58\x50\x43\x4F\x4D\x0A\x54\x79\x70\x65\x4C\x69\x62\x0D\x0A\x1A"
+ + b"\x01\x02\x00\x01\x00\x00\x00\x57\x00\x00\x00\x24\x00\x00\x00\x40"
+ + b"\x80\x00\x00\x32\x71\xBE\xBC\x92\x7E\x4B\xEF\x93\x5E\x44\xE0\xAA"
+ + b"\xF3\xC1\xE5\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x09\x00"
+ + b"\x66\x6F\x6F\x00\x66\x6F\x6F\x00\x00\x00\x00\x01\x00\x00\x00\x00"
+ + b"\x05\x00\x80\x06\x00\x00\x00"
+)
+
+# Compiled typelib for the following IDL:
+# [uuid(7057f2aa-fdc2-4559-abde-08d939f7e80d)]
+# interface foo {
+# void foo();
+# };
+foo2_xpt = GeneratedFile(
+ b"\x58\x50\x43\x4F\x4D\x0A\x54\x79\x70\x65\x4C\x69\x62\x0D\x0A\x1A"
+ + b"\x01\x02\x00\x01\x00\x00\x00\x57\x00\x00\x00\x24\x00\x00\x00\x40"
+ + b"\x80\x00\x00\x70\x57\xF2\xAA\xFD\xC2\x45\x59\xAB\xDE\x08\xD9\x39"
+ + b"\xF7\xE8\x0D\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x09\x00"
+ + b"\x66\x6F\x6F\x00\x66\x6F\x6F\x00\x00\x00\x00\x01\x00\x00\x00\x00"
+ + b"\x05\x00\x80\x06\x00\x00\x00"
+)
+
+
+class TestMinifiedCommentStripped(TestWithTmpDir):
+ def test_minified_comment_stripped(self):
+ propLines = [
+ "# Comments are removed",
+ "foo = bar",
+ "",
+ "# Another comment",
+ ]
+ prop = GeneratedFile("\n".join(propLines))
+ self.assertEqual(
+ MinifiedCommentStripped(prop).open().readlines(), [b"foo = bar\n", b"\n"]
+ )
+ open(self.tmppath("prop"), "w").write("\n".join(propLines))
+ MinifiedCommentStripped(File(self.tmppath("prop"))).copy(self.tmppath("prop2"))
+ self.assertEqual(open(self.tmppath("prop2")).readlines(), ["foo = bar\n", "\n"])
+
+
+class TestMinifiedJavaScript(TestWithTmpDir):
+ orig_lines = [
+ "// Comment line",
+ 'let foo = "bar";',
+ "var bar = true;",
+ "",
+ "// Another comment",
+ ]
+
+ def test_minified_javascript(self):
+ orig_f = GeneratedFile("\n".join(self.orig_lines))
+ min_f = MinifiedJavaScript(orig_f)
+
+ mini_lines = min_f.open().readlines()
+ self.assertTrue(mini_lines)
+ self.assertTrue(len(mini_lines) < len(self.orig_lines))
+
+ def _verify_command(self, code):
+ our_dir = os.path.abspath(os.path.dirname(__file__))
+ return [
+ sys.executable,
+ os.path.join(our_dir, "support", "minify_js_verify.py"),
+ code,
+ ]
+
+ def test_minified_verify_success(self):
+ orig_f = GeneratedFile("\n".join(self.orig_lines))
+ min_f = MinifiedJavaScript(orig_f, verify_command=self._verify_command("0"))
+
+ mini_lines = [six.ensure_text(s) for s in min_f.open().readlines()]
+ self.assertTrue(mini_lines)
+ self.assertTrue(len(mini_lines) < len(self.orig_lines))
+
+ def test_minified_verify_failure(self):
+ orig_f = GeneratedFile("\n".join(self.orig_lines))
+ errors.out = six.StringIO()
+ min_f = MinifiedJavaScript(orig_f, verify_command=self._verify_command("1"))
+
+ mini_lines = min_f.open().readlines()
+ output = errors.out.getvalue()
+ errors.out = sys.stderr
+ self.assertEqual(
+ output,
+ "warning: JS minification verification failed for <unknown>:\n"
+ "warning: Error message\n",
+ )
+ self.assertEqual(mini_lines, orig_f.open().readlines())
+
+
+class MatchTestTemplate(object):
+ def prepare_match_test(self, with_dotfiles=False):
+ self.add("bar")
+ self.add("foo/bar")
+ self.add("foo/baz")
+ self.add("foo/qux/1")
+ self.add("foo/qux/bar")
+ self.add("foo/qux/2/test")
+ self.add("foo/qux/2/test2")
+ if with_dotfiles:
+ self.add("foo/.foo")
+ self.add("foo/.bar/foo")
+
+ def do_match_test(self):
+ self.do_check(
+ "",
+ [
+ "bar",
+ "foo/bar",
+ "foo/baz",
+ "foo/qux/1",
+ "foo/qux/bar",
+ "foo/qux/2/test",
+ "foo/qux/2/test2",
+ ],
+ )
+ self.do_check(
+ "*",
+ [
+ "bar",
+ "foo/bar",
+ "foo/baz",
+ "foo/qux/1",
+ "foo/qux/bar",
+ "foo/qux/2/test",
+ "foo/qux/2/test2",
+ ],
+ )
+ self.do_check(
+ "foo/qux", ["foo/qux/1", "foo/qux/bar", "foo/qux/2/test", "foo/qux/2/test2"]
+ )
+ self.do_check("foo/b*", ["foo/bar", "foo/baz"])
+ self.do_check("baz", [])
+ self.do_check("foo/foo", [])
+ self.do_check("foo/*ar", ["foo/bar"])
+ self.do_check("*ar", ["bar"])
+ self.do_check("*/bar", ["foo/bar"])
+ self.do_check(
+ "foo/*ux", ["foo/qux/1", "foo/qux/bar", "foo/qux/2/test", "foo/qux/2/test2"]
+ )
+ self.do_check(
+ "foo/q*ux",
+ ["foo/qux/1", "foo/qux/bar", "foo/qux/2/test", "foo/qux/2/test2"],
+ )
+ self.do_check("foo/*/2/test*", ["foo/qux/2/test", "foo/qux/2/test2"])
+ self.do_check("**/bar", ["bar", "foo/bar", "foo/qux/bar"])
+ self.do_check("foo/**/test", ["foo/qux/2/test"])
+ self.do_check(
+ "foo",
+ [
+ "foo/bar",
+ "foo/baz",
+ "foo/qux/1",
+ "foo/qux/bar",
+ "foo/qux/2/test",
+ "foo/qux/2/test2",
+ ],
+ )
+ self.do_check(
+ "foo/**",
+ [
+ "foo/bar",
+ "foo/baz",
+ "foo/qux/1",
+ "foo/qux/bar",
+ "foo/qux/2/test",
+ "foo/qux/2/test2",
+ ],
+ )
+ self.do_check("**/2/test*", ["foo/qux/2/test", "foo/qux/2/test2"])
+ self.do_check(
+ "**/foo",
+ [
+ "foo/bar",
+ "foo/baz",
+ "foo/qux/1",
+ "foo/qux/bar",
+ "foo/qux/2/test",
+ "foo/qux/2/test2",
+ ],
+ )
+ self.do_check("**/barbaz", [])
+ self.do_check("f**/bar", ["foo/bar"])
+
+ def do_finder_test(self, finder):
+ self.assertTrue(finder.contains("foo/.foo"))
+ self.assertTrue(finder.contains("foo/.bar"))
+ self.assertTrue("foo/.foo" in [f for f, c in finder.find("foo/.foo")])
+ self.assertTrue("foo/.bar/foo" in [f for f, c in finder.find("foo/.bar")])
+ self.assertEqual(
+ sorted([f for f, c in finder.find("foo/.*")]), ["foo/.bar/foo", "foo/.foo"]
+ )
+ for pattern in ["foo", "**", "**/*", "**/foo", "foo/*"]:
+ self.assertFalse("foo/.foo" in [f for f, c in finder.find(pattern)])
+ self.assertFalse("foo/.bar/foo" in [f for f, c in finder.find(pattern)])
+ self.assertEqual(
+ sorted([f for f, c in finder.find(pattern)]),
+ sorted([f for f, c in finder if mozpath.match(f, pattern)]),
+ )
+
+
+def do_check(test, finder, pattern, result):
+ if result:
+ test.assertTrue(finder.contains(pattern))
+ else:
+ test.assertFalse(finder.contains(pattern))
+ test.assertEqual(sorted(list(f for f, c in finder.find(pattern))), sorted(result))
+
+
+class TestFileFinder(MatchTestTemplate, TestWithTmpDir):
+ def add(self, path):
+ ensureParentDir(self.tmppath(path))
+ open(self.tmppath(path), "wb").write(six.ensure_binary(path))
+
+ def do_check(self, pattern, result):
+ do_check(self, self.finder, pattern, result)
+
+ def test_file_finder(self):
+ self.prepare_match_test(with_dotfiles=True)
+ self.finder = FileFinder(self.tmpdir)
+ self.do_match_test()
+ self.do_finder_test(self.finder)
+
+ def test_get(self):
+ self.prepare_match_test()
+ finder = FileFinder(self.tmpdir)
+
+ self.assertIsNone(finder.get("does-not-exist"))
+ res = finder.get("bar")
+ self.assertIsInstance(res, File)
+ self.assertEqual(mozpath.normpath(res.path), mozpath.join(self.tmpdir, "bar"))
+
+ def test_ignored_dirs(self):
+ """Ignored directories should not have results returned."""
+ self.prepare_match_test()
+ self.add("fooz")
+
+ # Present to ensure prefix matching doesn't exclude.
+ self.add("foo/quxz")
+
+ self.finder = FileFinder(self.tmpdir, ignore=["foo/qux"])
+
+ self.do_check("**", ["bar", "foo/bar", "foo/baz", "foo/quxz", "fooz"])
+ self.do_check("foo/*", ["foo/bar", "foo/baz", "foo/quxz"])
+ self.do_check("foo/**", ["foo/bar", "foo/baz", "foo/quxz"])
+ self.do_check("foo/qux/**", [])
+ self.do_check("foo/qux/*", [])
+ self.do_check("foo/qux/bar", [])
+ self.do_check("foo/quxz", ["foo/quxz"])
+ self.do_check("fooz", ["fooz"])
+
+ def test_ignored_files(self):
+ """Ignored files should not have results returned."""
+ self.prepare_match_test()
+
+ # Be sure prefix match doesn't get ignored.
+ self.add("barz")
+
+ self.finder = FileFinder(self.tmpdir, ignore=["foo/bar", "bar"])
+ self.do_check(
+ "**",
+ [
+ "barz",
+ "foo/baz",
+ "foo/qux/1",
+ "foo/qux/2/test",
+ "foo/qux/2/test2",
+ "foo/qux/bar",
+ ],
+ )
+ self.do_check(
+ "foo/**",
+ [
+ "foo/baz",
+ "foo/qux/1",
+ "foo/qux/2/test",
+ "foo/qux/2/test2",
+ "foo/qux/bar",
+ ],
+ )
+
+ def test_ignored_patterns(self):
+ """Ignore entries with patterns should be honored."""
+ self.prepare_match_test()
+
+ self.add("foo/quxz")
+
+ self.finder = FileFinder(self.tmpdir, ignore=["foo/qux/*"])
+ self.do_check("**", ["foo/bar", "foo/baz", "foo/quxz", "bar"])
+ self.do_check("foo/**", ["foo/bar", "foo/baz", "foo/quxz"])
+
+ def test_dotfiles(self):
+ """Finder can find files beginning with . is configured."""
+ self.prepare_match_test(with_dotfiles=True)
+ self.finder = FileFinder(self.tmpdir, find_dotfiles=True)
+ self.do_check(
+ "**",
+ [
+ "bar",
+ "foo/.foo",
+ "foo/.bar/foo",
+ "foo/bar",
+ "foo/baz",
+ "foo/qux/1",
+ "foo/qux/bar",
+ "foo/qux/2/test",
+ "foo/qux/2/test2",
+ ],
+ )
+
+ def test_dotfiles_plus_ignore(self):
+ self.prepare_match_test(with_dotfiles=True)
+ self.finder = FileFinder(
+ self.tmpdir, find_dotfiles=True, ignore=["foo/.bar/**"]
+ )
+ self.do_check(
+ "foo/**",
+ [
+ "foo/.foo",
+ "foo/bar",
+ "foo/baz",
+ "foo/qux/1",
+ "foo/qux/bar",
+ "foo/qux/2/test",
+ "foo/qux/2/test2",
+ ],
+ )
+
+
+class TestJarFinder(MatchTestTemplate, TestWithTmpDir):
+ def add(self, path):
+ self.jar.add(path, ensure_bytes(path), compress=True)
+
+ def do_check(self, pattern, result):
+ do_check(self, self.finder, pattern, result)
+
+ def test_jar_finder(self):
+ self.jar = JarWriter(file=self.tmppath("test.jar"))
+ self.prepare_match_test()
+ self.jar.finish()
+ reader = JarReader(file=self.tmppath("test.jar"))
+ self.finder = JarFinder(self.tmppath("test.jar"), reader)
+ self.do_match_test()
+
+ self.assertIsNone(self.finder.get("does-not-exist"))
+ self.assertIsInstance(self.finder.get("bar"), DeflatedFile)
+
+
+class TestTarFinder(MatchTestTemplate, TestWithTmpDir):
+ def add(self, path):
+ self.tar.addfile(tarfile.TarInfo(name=path))
+
+ def do_check(self, pattern, result):
+ do_check(self, self.finder, pattern, result)
+
+ def test_tar_finder(self):
+ self.tar = tarfile.open(name=self.tmppath("test.tar.bz2"), mode="w:bz2")
+ self.prepare_match_test()
+ self.tar.close()
+ with tarfile.open(name=self.tmppath("test.tar.bz2"), mode="r:bz2") as tarreader:
+ self.finder = TarFinder(self.tmppath("test.tar.bz2"), tarreader)
+ self.do_match_test()
+
+ self.assertIsNone(self.finder.get("does-not-exist"))
+ self.assertIsInstance(self.finder.get("bar"), ExtractedTarFile)
+
+
+class TestComposedFinder(MatchTestTemplate, TestWithTmpDir):
+ def add(self, path, content=None):
+ # Put foo/qux files under $tmp/b.
+ if path.startswith("foo/qux/"):
+ real_path = mozpath.join("b", path[8:])
+ else:
+ real_path = mozpath.join("a", path)
+ ensureParentDir(self.tmppath(real_path))
+ if not content:
+ content = six.ensure_binary(path)
+ open(self.tmppath(real_path), "wb").write(content)
+
+ def do_check(self, pattern, result):
+ if "*" in pattern:
+ return
+ do_check(self, self.finder, pattern, result)
+
+ def test_composed_finder(self):
+ self.prepare_match_test()
+ # Also add files in $tmp/a/foo/qux because ComposedFinder is
+ # expected to mask foo/qux entirely with content from $tmp/b.
+ ensureParentDir(self.tmppath("a/foo/qux/hoge"))
+ open(self.tmppath("a/foo/qux/hoge"), "wb").write(b"hoge")
+ open(self.tmppath("a/foo/qux/bar"), "wb").write(b"not the right content")
+ self.finder = ComposedFinder(
+ {
+ "": FileFinder(self.tmppath("a")),
+ "foo/qux": FileFinder(self.tmppath("b")),
+ }
+ )
+ self.do_match_test()
+
+ self.assertIsNone(self.finder.get("does-not-exist"))
+ self.assertIsInstance(self.finder.get("bar"), File)
+
+
+@unittest.skipUnless(hglib, "hglib not available")
+@unittest.skipIf(
+ six.PY3 and os.name == "nt", "Does not currently work in Python3 on Windows"
+)
+class TestMercurialRevisionFinder(MatchTestTemplate, TestWithTmpDir):
+ def setUp(self):
+ super(TestMercurialRevisionFinder, self).setUp()
+ hglib.init(self.tmpdir)
+ self._clients = []
+
+ def tearDown(self):
+ # Ensure the hg client process is closed. Otherwise, Windows
+ # may have trouble removing the repo directory because the process
+ # has an open handle on it.
+ for client in getattr(self, "_clients", []):
+ if client.server:
+ client.close()
+
+ self._clients[:] = []
+
+ super(TestMercurialRevisionFinder, self).tearDown()
+
+ def _client(self):
+ configs = (
+ # b'' because py2 needs !unicode
+ b'ui.username="Dummy User <dummy@example.com>"',
+ )
+ client = hglib.open(
+ six.ensure_binary(self.tmpdir),
+ encoding=b"UTF-8", # b'' because py2 needs !unicode
+ configs=configs,
+ )
+ self._clients.append(client)
+ return client
+
+ def add(self, path):
+ with self._client() as c:
+ ensureParentDir(self.tmppath(path))
+ with open(self.tmppath(path), "wb") as fh:
+ fh.write(six.ensure_binary(path))
+ c.add(six.ensure_binary(self.tmppath(path)))
+
+ def do_check(self, pattern, result):
+ do_check(self, self.finder, pattern, result)
+
+ def _get_finder(self, *args, **kwargs):
+ f = MercurialRevisionFinder(*args, **kwargs)
+ self._clients.append(f._client)
+ return f
+
+ def test_default_revision(self):
+ self.prepare_match_test()
+ with self._client() as c:
+ c.commit("initial commit")
+
+ self.finder = self._get_finder(self.tmpdir)
+ self.do_match_test()
+
+ self.assertIsNone(self.finder.get("does-not-exist"))
+ self.assertIsInstance(self.finder.get("bar"), MercurialFile)
+
+ def test_old_revision(self):
+ with self._client() as c:
+ with open(self.tmppath("foo"), "wb") as fh:
+ fh.write(b"foo initial")
+ c.add(six.ensure_binary(self.tmppath("foo")))
+ c.commit("initial")
+
+ with open(self.tmppath("foo"), "wb") as fh:
+ fh.write(b"foo second")
+ with open(self.tmppath("bar"), "wb") as fh:
+ fh.write(b"bar second")
+ c.add(six.ensure_binary(self.tmppath("bar")))
+ c.commit("second")
+ # This wipes out the working directory, ensuring the finder isn't
+ # finding anything from the filesystem.
+ c.rawcommand([b"update", b"null"])
+
+ finder = self._get_finder(self.tmpdir, "0")
+ f = finder.get("foo")
+ self.assertEqual(f.read(), b"foo initial")
+ self.assertEqual(f.read(), b"foo initial", "read again for good measure")
+ self.assertIsNone(finder.get("bar"))
+
+ finder = self._get_finder(self.tmpdir, rev="1")
+ f = finder.get("foo")
+ self.assertEqual(f.read(), b"foo second")
+ f = finder.get("bar")
+ self.assertEqual(f.read(), b"bar second")
+ f = None
+
+ def test_recognize_repo_paths(self):
+ with self._client() as c:
+ with open(self.tmppath("foo"), "wb") as fh:
+ fh.write(b"initial")
+ c.add(six.ensure_binary(self.tmppath("foo")))
+ c.commit("initial")
+ c.rawcommand([b"update", b"null"])
+
+ finder = self._get_finder(self.tmpdir, "0", recognize_repo_paths=True)
+ with self.assertRaises(NotImplementedError):
+ list(finder.find(""))
+
+ with self.assertRaises(ValueError):
+ finder.get("foo")
+ with self.assertRaises(ValueError):
+ finder.get("")
+
+ f = finder.get(self.tmppath("foo"))
+ self.assertIsInstance(f, MercurialFile)
+ self.assertEqual(f.read(), b"initial")
+ f = None
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_manifests.py b/python/mozbuild/mozpack/test/test_manifests.py
new file mode 100644
index 0000000000..a5db53b58c
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_manifests.py
@@ -0,0 +1,465 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+
+import mozunit
+
+from mozpack.copier import FileCopier, FileRegistry
+from mozpack.manifests import InstallManifest, UnreadableInstallManifest
+from mozpack.test.test_files import TestWithTmpDir
+
+
+class TestInstallManifest(TestWithTmpDir):
+ def test_construct(self):
+ m = InstallManifest()
+ self.assertEqual(len(m), 0)
+
+ def test_malformed(self):
+ f = self.tmppath("manifest")
+ open(f, "wt").write("junk\n")
+ with self.assertRaises(UnreadableInstallManifest):
+ InstallManifest(f)
+
+ def test_adds(self):
+ m = InstallManifest()
+ m.add_link("s_source", "s_dest")
+ m.add_copy("c_source", "c_dest")
+ m.add_required_exists("e_dest")
+ m.add_optional_exists("o_dest")
+ m.add_pattern_link("ps_base", "ps/*", "ps_dest")
+ m.add_pattern_copy("pc_base", "pc/**", "pc_dest")
+ m.add_preprocess("p_source", "p_dest", "p_source.pp")
+ m.add_content("content", "content")
+
+ self.assertEqual(len(m), 8)
+ self.assertIn("s_dest", m)
+ self.assertIn("c_dest", m)
+ self.assertIn("p_dest", m)
+ self.assertIn("e_dest", m)
+ self.assertIn("o_dest", m)
+ self.assertIn("content", m)
+
+ with self.assertRaises(ValueError):
+ m.add_link("s_other", "s_dest")
+
+ with self.assertRaises(ValueError):
+ m.add_copy("c_other", "c_dest")
+
+ with self.assertRaises(ValueError):
+ m.add_preprocess("p_other", "p_dest", "p_other.pp")
+
+ with self.assertRaises(ValueError):
+ m.add_required_exists("e_dest")
+
+ with self.assertRaises(ValueError):
+ m.add_optional_exists("o_dest")
+
+ with self.assertRaises(ValueError):
+ m.add_pattern_link("ps_base", "ps/*", "ps_dest")
+
+ with self.assertRaises(ValueError):
+ m.add_pattern_copy("pc_base", "pc/**", "pc_dest")
+
+ with self.assertRaises(ValueError):
+ m.add_content("content", "content")
+
+ def _get_test_manifest(self):
+ m = InstallManifest()
+ m.add_link(self.tmppath("s_source"), "s_dest")
+ m.add_copy(self.tmppath("c_source"), "c_dest")
+ m.add_preprocess(
+ self.tmppath("p_source"),
+ "p_dest",
+ self.tmppath("p_source.pp"),
+ "#",
+ {"FOO": "BAR", "BAZ": "QUX"},
+ )
+ m.add_required_exists("e_dest")
+ m.add_optional_exists("o_dest")
+ m.add_pattern_link("ps_base", "*", "ps_dest")
+ m.add_pattern_copy("pc_base", "**", "pc_dest")
+ m.add_content("the content\non\nmultiple lines", "content")
+
+ return m
+
+ def test_serialization(self):
+ m = self._get_test_manifest()
+
+ p = self.tmppath("m")
+ m.write(path=p)
+ self.assertTrue(os.path.isfile(p))
+
+ with open(p, "r") as fh:
+ c = fh.read()
+
+ self.assertEqual(c.count("\n"), 9)
+
+ lines = c.splitlines()
+ self.assertEqual(len(lines), 9)
+
+ self.assertEqual(lines[0], "5")
+
+ m2 = InstallManifest(path=p)
+ self.assertEqual(m, m2)
+ p2 = self.tmppath("m2")
+ m2.write(path=p2)
+
+ with open(p2, "r") as fh:
+ c2 = fh.read()
+
+ self.assertEqual(c, c2)
+
+ def test_populate_registry(self):
+ m = self._get_test_manifest()
+ r = FileRegistry()
+ m.populate_registry(r)
+
+ self.assertEqual(len(r), 6)
+ self.assertEqual(
+ r.paths(), ["c_dest", "content", "e_dest", "o_dest", "p_dest", "s_dest"]
+ )
+
+ def test_pattern_expansion(self):
+ source = self.tmppath("source")
+ os.mkdir(source)
+ os.mkdir("%s/base" % source)
+ os.mkdir("%s/base/foo" % source)
+
+ with open("%s/base/foo/file1" % source, "a"):
+ pass
+
+ with open("%s/base/foo/file2" % source, "a"):
+ pass
+
+ m = InstallManifest()
+ m.add_pattern_link("%s/base" % source, "**", "dest")
+
+ c = FileCopier()
+ m.populate_registry(c)
+ self.assertEqual(c.paths(), ["dest/foo/file1", "dest/foo/file2"])
+
+ def test_write_expand_pattern(self):
+ source = self.tmppath("source")
+ os.mkdir(source)
+ os.mkdir("%s/base" % source)
+ os.mkdir("%s/base/foo" % source)
+
+ with open("%s/base/foo/file1" % source, "a"):
+ pass
+
+ with open("%s/base/foo/file2" % source, "a"):
+ pass
+
+ m = InstallManifest()
+ m.add_pattern_link("%s/base" % source, "**", "dest")
+
+ track = self.tmppath("track")
+ m.write(path=track, expand_pattern=True)
+
+ m = InstallManifest(path=track)
+ self.assertEqual(
+ sorted(dest for dest in m._dests), ["dest/foo/file1", "dest/foo/file2"]
+ )
+
+ def test_or(self):
+ m1 = self._get_test_manifest()
+ orig_length = len(m1)
+ m2 = InstallManifest()
+ m2.add_link("s_source2", "s_dest2")
+ m2.add_copy("c_source2", "c_dest2")
+
+ m1 |= m2
+
+ self.assertEqual(len(m2), 2)
+ self.assertEqual(len(m1), orig_length + 2)
+
+ self.assertIn("s_dest2", m1)
+ self.assertIn("c_dest2", m1)
+
+ def test_copier_application(self):
+ dest = self.tmppath("dest")
+ os.mkdir(dest)
+
+ to_delete = self.tmppath("dest/to_delete")
+ with open(to_delete, "a"):
+ pass
+
+ with open(self.tmppath("s_source"), "wt") as fh:
+ fh.write("symlink!")
+
+ with open(self.tmppath("c_source"), "wt") as fh:
+ fh.write("copy!")
+
+ with open(self.tmppath("p_source"), "wt") as fh:
+ fh.write("#define FOO 1\npreprocess!")
+
+ with open(self.tmppath("dest/e_dest"), "a"):
+ pass
+
+ with open(self.tmppath("dest/o_dest"), "a"):
+ pass
+
+ m = self._get_test_manifest()
+ c = FileCopier()
+ m.populate_registry(c)
+ result = c.copy(dest)
+
+ self.assertTrue(os.path.exists(self.tmppath("dest/s_dest")))
+ self.assertTrue(os.path.exists(self.tmppath("dest/c_dest")))
+ self.assertTrue(os.path.exists(self.tmppath("dest/p_dest")))
+ self.assertTrue(os.path.exists(self.tmppath("dest/e_dest")))
+ self.assertTrue(os.path.exists(self.tmppath("dest/o_dest")))
+ self.assertTrue(os.path.exists(self.tmppath("dest/content")))
+ self.assertFalse(os.path.exists(to_delete))
+
+ with open(self.tmppath("dest/s_dest"), "rt") as fh:
+ self.assertEqual(fh.read(), "symlink!")
+
+ with open(self.tmppath("dest/c_dest"), "rt") as fh:
+ self.assertEqual(fh.read(), "copy!")
+
+ with open(self.tmppath("dest/p_dest"), "rt") as fh:
+ self.assertEqual(fh.read(), "preprocess!")
+
+ self.assertEqual(
+ result.updated_files,
+ set(
+ self.tmppath(p)
+ for p in ("dest/s_dest", "dest/c_dest", "dest/p_dest", "dest/content")
+ ),
+ )
+ self.assertEqual(
+ result.existing_files,
+ set([self.tmppath("dest/e_dest"), self.tmppath("dest/o_dest")]),
+ )
+ self.assertEqual(result.removed_files, {to_delete})
+ self.assertEqual(result.removed_directories, set())
+
+ def test_preprocessor(self):
+ manifest = self.tmppath("m")
+ deps = self.tmppath("m.pp")
+ dest = self.tmppath("dest")
+ include = self.tmppath("p_incl")
+
+ with open(include, "wt") as fh:
+ fh.write("#define INCL\n")
+ time = os.path.getmtime(include) - 3
+ os.utime(include, (time, time))
+
+ with open(self.tmppath("p_source"), "wt") as fh:
+ fh.write("#ifdef FOO\n#if BAZ == QUX\nPASS1\n#endif\n#endif\n")
+ fh.write("#ifdef DEPTEST\nPASS2\n#endif\n")
+ fh.write("#include p_incl\n#ifdef INCLTEST\nPASS3\n#endif\n")
+ time = os.path.getmtime(self.tmppath("p_source")) - 3
+ os.utime(self.tmppath("p_source"), (time, time))
+
+ # Create and write a manifest with the preprocessed file, then apply it.
+ # This should write out our preprocessed file.
+ m = InstallManifest()
+ m.add_preprocess(
+ self.tmppath("p_source"), "p_dest", deps, "#", {"FOO": "BAR", "BAZ": "QUX"}
+ )
+ m.write(path=manifest)
+
+ m = InstallManifest(path=manifest)
+ c = FileCopier()
+ m.populate_registry(c)
+ c.copy(dest)
+
+ self.assertTrue(os.path.exists(self.tmppath("dest/p_dest")))
+
+ with open(self.tmppath("dest/p_dest"), "rt") as fh:
+ self.assertEqual(fh.read(), "PASS1\n")
+
+ # Create a second manifest with the preprocessed file, then apply it.
+ # Since this manifest does not exist on the disk, there should not be a
+ # dependency on it, and the preprocessed file should not be modified.
+ m2 = InstallManifest()
+ m2.add_preprocess(
+ self.tmppath("p_source"), "p_dest", deps, "#", {"DEPTEST": True}
+ )
+ c = FileCopier()
+ m2.populate_registry(c)
+ result = c.copy(dest)
+
+ self.assertFalse(self.tmppath("dest/p_dest") in result.updated_files)
+ self.assertTrue(self.tmppath("dest/p_dest") in result.existing_files)
+
+ # Write out the second manifest, then load it back in from the disk.
+ # This should add the dependency on the manifest file, so our
+ # preprocessed file should be regenerated with the new defines.
+ # We also set the mtime on the destination file back, so it will be
+ # older than the manifest file.
+ m2.write(path=manifest)
+ time = os.path.getmtime(manifest) - 1
+ os.utime(self.tmppath("dest/p_dest"), (time, time))
+ m2 = InstallManifest(path=manifest)
+ c = FileCopier()
+ m2.populate_registry(c)
+ self.assertTrue(c.copy(dest))
+
+ with open(self.tmppath("dest/p_dest"), "rt") as fh:
+ self.assertEqual(fh.read(), "PASS2\n")
+
+ # Set the time on the manifest back, so it won't be picked up as
+ # modified in the next test
+ time = os.path.getmtime(manifest) - 1
+ os.utime(manifest, (time, time))
+
+ # Update the contents of a file included by the source file. This should
+ # cause the destination to be regenerated.
+ with open(include, "wt") as fh:
+ fh.write("#define INCLTEST\n")
+
+ time = os.path.getmtime(include) - 1
+ os.utime(self.tmppath("dest/p_dest"), (time, time))
+ c = FileCopier()
+ m2.populate_registry(c)
+ self.assertTrue(c.copy(dest))
+
+ with open(self.tmppath("dest/p_dest"), "rt") as fh:
+ self.assertEqual(fh.read(), "PASS2\nPASS3\n")
+
+ def test_preprocessor_dependencies(self):
+ manifest = self.tmppath("m")
+ deps = self.tmppath("m.pp")
+ dest = self.tmppath("dest")
+ source = self.tmppath("p_source")
+ destfile = self.tmppath("dest/p_dest")
+ include = self.tmppath("p_incl")
+ os.mkdir(dest)
+
+ with open(source, "wt") as fh:
+ fh.write("#define SRC\nSOURCE\n")
+ time = os.path.getmtime(source) - 3
+ os.utime(source, (time, time))
+
+ with open(include, "wt") as fh:
+ fh.write("INCLUDE\n")
+ time = os.path.getmtime(source) - 3
+ os.utime(include, (time, time))
+
+ # Create and write a manifest with the preprocessed file.
+ m = InstallManifest()
+ m.add_preprocess(source, "p_dest", deps, "#", {"FOO": "BAR", "BAZ": "QUX"})
+ m.write(path=manifest)
+
+ time = os.path.getmtime(source) - 5
+ os.utime(manifest, (time, time))
+
+ # Now read the manifest back in, and apply it. This should write out
+ # our preprocessed file.
+ m = InstallManifest(path=manifest)
+ c = FileCopier()
+ m.populate_registry(c)
+ self.assertTrue(c.copy(dest))
+
+ with open(destfile, "rt") as fh:
+ self.assertEqual(fh.read(), "SOURCE\n")
+
+ # Next, modify the source to #INCLUDE another file.
+ with open(source, "wt") as fh:
+ fh.write("SOURCE\n#include p_incl\n")
+ time = os.path.getmtime(source) - 1
+ os.utime(destfile, (time, time))
+
+ # Apply the manifest, and confirm that it also reads the newly included
+ # file.
+ m = InstallManifest(path=manifest)
+ c = FileCopier()
+ m.populate_registry(c)
+ c.copy(dest)
+
+ with open(destfile, "rt") as fh:
+ self.assertEqual(fh.read(), "SOURCE\nINCLUDE\n")
+
+ # Set the time on the source file back, so it won't be picked up as
+ # modified in the next test.
+ time = os.path.getmtime(source) - 1
+ os.utime(source, (time, time))
+
+ # Now, modify the include file (but not the original source).
+ with open(include, "wt") as fh:
+ fh.write("INCLUDE MODIFIED\n")
+ time = os.path.getmtime(include) - 1
+ os.utime(destfile, (time, time))
+
+ # Apply the manifest, and confirm that the change to the include file
+ # is detected. That should cause the preprocessor to run again.
+ m = InstallManifest(path=manifest)
+ c = FileCopier()
+ m.populate_registry(c)
+ c.copy(dest)
+
+ with open(destfile, "rt") as fh:
+ self.assertEqual(fh.read(), "SOURCE\nINCLUDE MODIFIED\n")
+
+ # ORing an InstallManifest should copy file dependencies
+ m = InstallManifest()
+ m |= InstallManifest(path=manifest)
+ c = FileCopier()
+ m.populate_registry(c)
+ e = c._files["p_dest"]
+ self.assertEqual(e.extra_depends, [manifest])
+
+ def test_add_entries_from(self):
+ source = self.tmppath("source")
+ os.mkdir(source)
+ os.mkdir("%s/base" % source)
+ os.mkdir("%s/base/foo" % source)
+
+ with open("%s/base/foo/file1" % source, "a"):
+ pass
+
+ with open("%s/base/foo/file2" % source, "a"):
+ pass
+
+ m = InstallManifest()
+ m.add_pattern_link("%s/base" % source, "**", "dest")
+
+ p = InstallManifest()
+ p.add_entries_from(m)
+ self.assertEqual(len(p), 1)
+
+ c = FileCopier()
+ p.populate_registry(c)
+ self.assertEqual(c.paths(), ["dest/foo/file1", "dest/foo/file2"])
+
+ q = InstallManifest()
+ q.add_entries_from(m, base="target")
+ self.assertEqual(len(q), 1)
+
+ d = FileCopier()
+ q.populate_registry(d)
+ self.assertEqual(d.paths(), ["target/dest/foo/file1", "target/dest/foo/file2"])
+
+ # Some of the values in an InstallManifest include destination
+ # information that is present in the keys. Verify that we can
+ # round-trip serialization.
+ r = InstallManifest()
+ r.add_entries_from(m)
+ r.add_entries_from(m, base="target")
+ self.assertEqual(len(r), 2)
+
+ temp_path = self.tmppath("temp_path")
+ r.write(path=temp_path)
+
+ s = InstallManifest(path=temp_path)
+ e = FileCopier()
+ s.populate_registry(e)
+
+ self.assertEqual(
+ e.paths(),
+ [
+ "dest/foo/file1",
+ "dest/foo/file2",
+ "target/dest/foo/file1",
+ "target/dest/foo/file2",
+ ],
+ )
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_mozjar.py b/python/mozbuild/mozpack/test/test_mozjar.py
new file mode 100644
index 0000000000..e96c59238f
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_mozjar.py
@@ -0,0 +1,350 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import unittest
+from collections import OrderedDict
+
+import mozunit
+import six
+
+import mozpack.path as mozpath
+from mozpack.files import FileFinder
+from mozpack.mozjar import (
+ Deflater,
+ JarLog,
+ JarReader,
+ JarReaderError,
+ JarStruct,
+ JarWriter,
+ JarWriterError,
+)
+from mozpack.test.test_files import MockDest
+
+test_data_path = mozpath.abspath(mozpath.dirname(__file__))
+test_data_path = mozpath.join(test_data_path, "data")
+
+
+class TestJarStruct(unittest.TestCase):
+ class Foo(JarStruct):
+ MAGIC = 0x01020304
+ STRUCT = OrderedDict(
+ [
+ ("foo", "uint32"),
+ ("bar", "uint16"),
+ ("qux", "uint16"),
+ ("length", "uint16"),
+ ("length2", "uint16"),
+ ("string", "length"),
+ ("string2", "length2"),
+ ]
+ )
+
+ def test_jar_struct(self):
+ foo = TestJarStruct.Foo()
+ self.assertEqual(foo.signature, TestJarStruct.Foo.MAGIC)
+ self.assertEqual(foo["foo"], 0)
+ self.assertEqual(foo["bar"], 0)
+ self.assertEqual(foo["qux"], 0)
+ self.assertFalse("length" in foo)
+ self.assertFalse("length2" in foo)
+ self.assertEqual(foo["string"], "")
+ self.assertEqual(foo["string2"], "")
+
+ self.assertEqual(foo.size, 16)
+
+ foo["foo"] = 0x42434445
+ foo["bar"] = 0xABCD
+ foo["qux"] = 0xEF01
+ foo["string"] = "abcde"
+ foo["string2"] = "Arbitrarily long string"
+
+ serialized = (
+ b"\x04\x03\x02\x01\x45\x44\x43\x42\xcd\xab\x01\xef"
+ + b"\x05\x00\x17\x00abcdeArbitrarily long string"
+ )
+ self.assertEqual(foo.size, len(serialized))
+ foo_serialized = foo.serialize()
+ self.assertEqual(foo_serialized, serialized)
+
+ def do_test_read_jar_struct(self, data):
+ self.assertRaises(JarReaderError, TestJarStruct.Foo, data)
+ self.assertRaises(JarReaderError, TestJarStruct.Foo, data[2:])
+
+ foo = TestJarStruct.Foo(data[1:])
+ self.assertEqual(foo["foo"], 0x45444342)
+ self.assertEqual(foo["bar"], 0xCDAB)
+ self.assertEqual(foo["qux"], 0x01EF)
+ self.assertFalse("length" in foo)
+ self.assertFalse("length2" in foo)
+ self.assertEqual(foo["string"], b"012345")
+ self.assertEqual(foo["string2"], b"67")
+
+ def test_read_jar_struct(self):
+ data = (
+ b"\x00\x04\x03\x02\x01\x42\x43\x44\x45\xab\xcd\xef"
+ + b"\x01\x06\x00\x02\x0001234567890"
+ )
+ self.do_test_read_jar_struct(data)
+
+ def test_read_jar_struct_memoryview(self):
+ data = (
+ b"\x00\x04\x03\x02\x01\x42\x43\x44\x45\xab\xcd\xef"
+ + b"\x01\x06\x00\x02\x0001234567890"
+ )
+ self.do_test_read_jar_struct(memoryview(data))
+
+
+class TestDeflater(unittest.TestCase):
+ def wrap(self, data):
+ return data
+
+ def test_deflater_no_compress(self):
+ deflater = Deflater(False)
+ deflater.write(self.wrap(b"abc"))
+ self.assertFalse(deflater.compressed)
+ self.assertEqual(deflater.uncompressed_size, 3)
+ self.assertEqual(deflater.compressed_size, deflater.uncompressed_size)
+ self.assertEqual(deflater.compressed_data, b"abc")
+ self.assertEqual(deflater.crc32, 0x352441C2)
+
+ def test_deflater_compress_no_gain(self):
+ deflater = Deflater(True)
+ deflater.write(self.wrap(b"abc"))
+ self.assertFalse(deflater.compressed)
+ self.assertEqual(deflater.uncompressed_size, 3)
+ self.assertEqual(deflater.compressed_size, deflater.uncompressed_size)
+ self.assertEqual(deflater.compressed_data, b"abc")
+ self.assertEqual(deflater.crc32, 0x352441C2)
+
+ def test_deflater_compress(self):
+ deflater = Deflater(True)
+ deflater.write(self.wrap(b"aaaaaaaaaaaaanopqrstuvwxyz"))
+ self.assertTrue(deflater.compressed)
+ self.assertEqual(deflater.uncompressed_size, 26)
+ self.assertNotEqual(deflater.compressed_size, deflater.uncompressed_size)
+ self.assertEqual(deflater.crc32, 0xD46B97ED)
+ # The CRC is the same as when not compressed
+ deflater = Deflater(False)
+ self.assertFalse(deflater.compressed)
+ deflater.write(self.wrap(b"aaaaaaaaaaaaanopqrstuvwxyz"))
+ self.assertEqual(deflater.crc32, 0xD46B97ED)
+
+ def test_deflater_empty(self):
+ deflater = Deflater(False)
+ self.assertFalse(deflater.compressed)
+ self.assertEqual(deflater.uncompressed_size, 0)
+ self.assertEqual(deflater.compressed_size, deflater.uncompressed_size)
+ self.assertEqual(deflater.compressed_data, b"")
+ self.assertEqual(deflater.crc32, 0)
+
+
+class TestDeflaterMemoryView(TestDeflater):
+ def wrap(self, data):
+ return memoryview(data)
+
+
+class TestJar(unittest.TestCase):
+ def test_jar(self):
+ s = MockDest()
+ with JarWriter(fileobj=s) as jar:
+ jar.add("foo", b"foo")
+ self.assertRaises(JarWriterError, jar.add, "foo", b"bar")
+ jar.add("bar", b"aaaaaaaaaaaaanopqrstuvwxyz")
+ jar.add("baz/qux", b"aaaaaaaaaaaaanopqrstuvwxyz", False)
+ jar.add("baz\\backslash", b"aaaaaaaaaaaaaaa")
+
+ files = [j for j in JarReader(fileobj=s)]
+
+ self.assertEqual(files[0].filename, "foo")
+ self.assertFalse(files[0].compressed)
+ self.assertEqual(files[0].read(), b"foo")
+
+ self.assertEqual(files[1].filename, "bar")
+ self.assertTrue(files[1].compressed)
+ self.assertEqual(files[1].read(), b"aaaaaaaaaaaaanopqrstuvwxyz")
+
+ self.assertEqual(files[2].filename, "baz/qux")
+ self.assertFalse(files[2].compressed)
+ self.assertEqual(files[2].read(), b"aaaaaaaaaaaaanopqrstuvwxyz")
+
+ if os.sep == "\\":
+ self.assertEqual(
+ files[3].filename,
+ "baz/backslash",
+ "backslashes in filenames on Windows should get normalized",
+ )
+ else:
+ self.assertEqual(
+ files[3].filename,
+ "baz\\backslash",
+ "backslashes in filenames on POSIX platform are untouched",
+ )
+
+ s = MockDest()
+ with JarWriter(fileobj=s, compress=False) as jar:
+ jar.add("bar", b"aaaaaaaaaaaaanopqrstuvwxyz")
+ jar.add("foo", b"foo")
+ jar.add("baz/qux", b"aaaaaaaaaaaaanopqrstuvwxyz", True)
+
+ jar = JarReader(fileobj=s)
+ files = [j for j in jar]
+
+ self.assertEqual(files[0].filename, "bar")
+ self.assertFalse(files[0].compressed)
+ self.assertEqual(files[0].read(), b"aaaaaaaaaaaaanopqrstuvwxyz")
+
+ self.assertEqual(files[1].filename, "foo")
+ self.assertFalse(files[1].compressed)
+ self.assertEqual(files[1].read(), b"foo")
+
+ self.assertEqual(files[2].filename, "baz/qux")
+ self.assertTrue(files[2].compressed)
+ self.assertEqual(files[2].read(), b"aaaaaaaaaaaaanopqrstuvwxyz")
+
+ self.assertTrue("bar" in jar)
+ self.assertTrue("foo" in jar)
+ self.assertFalse("baz" in jar)
+ self.assertTrue("baz/qux" in jar)
+ self.assertTrue(jar["bar"], files[1])
+ self.assertTrue(jar["foo"], files[0])
+ self.assertTrue(jar["baz/qux"], files[2])
+
+ s.seek(0)
+ jar = JarReader(fileobj=s)
+ self.assertTrue("bar" in jar)
+ self.assertTrue("foo" in jar)
+ self.assertFalse("baz" in jar)
+ self.assertTrue("baz/qux" in jar)
+
+ files[0].seek(0)
+ self.assertEqual(jar["bar"].filename, files[0].filename)
+ self.assertEqual(jar["bar"].compressed, files[0].compressed)
+ self.assertEqual(jar["bar"].read(), files[0].read())
+
+ files[1].seek(0)
+ self.assertEqual(jar["foo"].filename, files[1].filename)
+ self.assertEqual(jar["foo"].compressed, files[1].compressed)
+ self.assertEqual(jar["foo"].read(), files[1].read())
+
+ files[2].seek(0)
+ self.assertEqual(jar["baz/qux"].filename, files[2].filename)
+ self.assertEqual(jar["baz/qux"].compressed, files[2].compressed)
+ self.assertEqual(jar["baz/qux"].read(), files[2].read())
+
+ def test_rejar(self):
+ s = MockDest()
+ with JarWriter(fileobj=s) as jar:
+ jar.add("foo", b"foo")
+ jar.add("bar", b"aaaaaaaaaaaaanopqrstuvwxyz")
+ jar.add("baz/qux", b"aaaaaaaaaaaaanopqrstuvwxyz", False)
+
+ new = MockDest()
+ with JarWriter(fileobj=new) as jar:
+ for j in JarReader(fileobj=s):
+ jar.add(j.filename, j)
+
+ jar = JarReader(fileobj=new)
+ files = [j for j in jar]
+
+ self.assertEqual(files[0].filename, "foo")
+ self.assertFalse(files[0].compressed)
+ self.assertEqual(files[0].read(), b"foo")
+
+ self.assertEqual(files[1].filename, "bar")
+ self.assertTrue(files[1].compressed)
+ self.assertEqual(files[1].read(), b"aaaaaaaaaaaaanopqrstuvwxyz")
+
+ self.assertEqual(files[2].filename, "baz/qux")
+ self.assertTrue(files[2].compressed)
+ self.assertEqual(files[2].read(), b"aaaaaaaaaaaaanopqrstuvwxyz")
+
+ def test_add_from_finder(self):
+ s = MockDest()
+ with JarWriter(fileobj=s) as jar:
+ finder = FileFinder(test_data_path)
+ for p, f in finder.find("test_data"):
+ jar.add("test_data", f)
+
+ jar = JarReader(fileobj=s)
+ files = [j for j in jar]
+
+ self.assertEqual(files[0].filename, "test_data")
+ self.assertFalse(files[0].compressed)
+ self.assertEqual(files[0].read(), b"test_data")
+
+
+class TestPreload(unittest.TestCase):
+ def test_preload(self):
+ s = MockDest()
+ with JarWriter(fileobj=s) as jar:
+ jar.add("foo", b"foo")
+ jar.add("bar", b"abcdefghijklmnopqrstuvwxyz")
+ jar.add("baz/qux", b"aaaaaaaaaaaaanopqrstuvwxyz")
+
+ jar = JarReader(fileobj=s)
+ self.assertEqual(jar.last_preloaded, None)
+
+ with JarWriter(fileobj=s) as jar:
+ jar.add("foo", b"foo")
+ jar.add("bar", b"abcdefghijklmnopqrstuvwxyz")
+ jar.add("baz/qux", b"aaaaaaaaaaaaanopqrstuvwxyz")
+ jar.preload(["baz/qux", "bar"])
+
+ jar = JarReader(fileobj=s)
+ self.assertEqual(jar.last_preloaded, "bar")
+ files = [j for j in jar]
+
+ self.assertEqual(files[0].filename, "baz/qux")
+ self.assertEqual(files[1].filename, "bar")
+ self.assertEqual(files[2].filename, "foo")
+
+
+class TestJarLog(unittest.TestCase):
+ def test_jarlog(self):
+ s = six.moves.cStringIO(
+ "\n".join(
+ [
+ "bar/baz.jar first",
+ "bar/baz.jar second",
+ "bar/baz.jar third",
+ "bar/baz.jar second",
+ "bar/baz.jar second",
+ "omni.ja stuff",
+ "bar/baz.jar first",
+ "omni.ja other/stuff",
+ "omni.ja stuff",
+ "bar/baz.jar third",
+ ]
+ )
+ )
+ log = JarLog(fileobj=s)
+ self.assertEqual(
+ set(log.keys()),
+ set(
+ [
+ "bar/baz.jar",
+ "omni.ja",
+ ]
+ ),
+ )
+ self.assertEqual(
+ log["bar/baz.jar"],
+ [
+ "first",
+ "second",
+ "third",
+ ],
+ )
+ self.assertEqual(
+ log["omni.ja"],
+ [
+ "stuff",
+ "other/stuff",
+ ],
+ )
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_packager.py b/python/mozbuild/mozpack/test/test_packager.py
new file mode 100644
index 0000000000..266902ebb2
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_packager.py
@@ -0,0 +1,630 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import unittest
+
+import mozunit
+from buildconfig import topobjdir
+from mozunit import MockedOpen
+
+import mozpack.path as mozpath
+from mozbuild.preprocessor import Preprocessor
+from mozpack.chrome.manifest import (
+ ManifestBinaryComponent,
+ ManifestContent,
+ ManifestResource,
+)
+from mozpack.errors import ErrorMessage, errors
+from mozpack.files import GeneratedFile
+from mozpack.packager import (
+ CallDeque,
+ Component,
+ SimpleManifestSink,
+ SimplePackager,
+ preprocess_manifest,
+)
+
+MANIFEST = """
+bar/*
+[foo]
+foo/*
+-foo/bar
+chrome.manifest
+[zot destdir="destdir"]
+foo/zot
+; comment
+#ifdef baz
+[baz]
+baz@SUFFIX@
+#endif
+"""
+
+
+class TestPreprocessManifest(unittest.TestCase):
+ MANIFEST_PATH = mozpath.join("$OBJDIR", "manifest")
+
+ EXPECTED_LOG = [
+ ((MANIFEST_PATH, 2), "add", "", "bar/*"),
+ ((MANIFEST_PATH, 4), "add", "foo", "foo/*"),
+ ((MANIFEST_PATH, 5), "remove", "foo", "foo/bar"),
+ ((MANIFEST_PATH, 6), "add", "foo", "chrome.manifest"),
+ ((MANIFEST_PATH, 8), "add", 'zot destdir="destdir"', "foo/zot"),
+ ]
+
+ def setUp(self):
+ class MockSink(object):
+ def __init__(self):
+ self.log = []
+
+ def add(self, component, path):
+ self._log(errors.get_context(), "add", repr(component), path)
+
+ def remove(self, component, path):
+ self._log(errors.get_context(), "remove", repr(component), path)
+
+ def _log(self, *args):
+ self.log.append(args)
+
+ self.sink = MockSink()
+ self.cwd = os.getcwd()
+ os.chdir(topobjdir)
+
+ def tearDown(self):
+ os.chdir(self.cwd)
+
+ def test_preprocess_manifest(self):
+ with MockedOpen({"manifest": MANIFEST}):
+ preprocess_manifest(self.sink, "manifest")
+ self.assertEqual(self.sink.log, self.EXPECTED_LOG)
+
+ def test_preprocess_manifest_missing_define(self):
+ with MockedOpen({"manifest": MANIFEST}):
+ self.assertRaises(
+ Preprocessor.Error,
+ preprocess_manifest,
+ self.sink,
+ "manifest",
+ {"baz": 1},
+ )
+
+ def test_preprocess_manifest_defines(self):
+ with MockedOpen({"manifest": MANIFEST}):
+ preprocess_manifest(self.sink, "manifest", {"baz": 1, "SUFFIX": ".exe"})
+ self.assertEqual(
+ self.sink.log,
+ self.EXPECTED_LOG + [((self.MANIFEST_PATH, 12), "add", "baz", "baz.exe")],
+ )
+
+
+class MockFinder(object):
+ def __init__(self, files):
+ self.files = files
+ self.log = []
+
+ def find(self, path):
+ self.log.append(path)
+ for f in sorted(self.files):
+ if mozpath.match(f, path):
+ yield f, self.files[f]
+
+ def __iter__(self):
+ return self.find("")
+
+
+class MockFormatter(object):
+ def __init__(self):
+ self.log = []
+
+ def add_base(self, *args):
+ self._log(errors.get_context(), "add_base", *args)
+
+ def add_manifest(self, *args):
+ self._log(errors.get_context(), "add_manifest", *args)
+
+ def add_interfaces(self, *args):
+ self._log(errors.get_context(), "add_interfaces", *args)
+
+ def add(self, *args):
+ self._log(errors.get_context(), "add", *args)
+
+ def _log(self, *args):
+ self.log.append(args)
+
+
+class TestSimplePackager(unittest.TestCase):
+ def test_simple_packager(self):
+ class GeneratedFileWithPath(GeneratedFile):
+ def __init__(self, path, content):
+ GeneratedFile.__init__(self, content)
+ self.path = path
+
+ formatter = MockFormatter()
+ packager = SimplePackager(formatter)
+ curdir = os.path.abspath(os.curdir)
+ file = GeneratedFileWithPath(
+ os.path.join(curdir, "foo", "bar.manifest"),
+ b"resource bar bar/\ncontent bar bar/",
+ )
+ with errors.context("manifest", 1):
+ packager.add("foo/bar.manifest", file)
+
+ file = GeneratedFileWithPath(
+ os.path.join(curdir, "foo", "baz.manifest"), b"resource baz baz/"
+ )
+ with errors.context("manifest", 2):
+ packager.add("bar/baz.manifest", file)
+
+ with errors.context("manifest", 3):
+ packager.add(
+ "qux/qux.manifest",
+ GeneratedFile(
+ b"".join(
+ [
+ b"resource qux qux/\n",
+ b"binary-component qux.so\n",
+ ]
+ )
+ ),
+ )
+ bar_xpt = GeneratedFile(b"bar.xpt")
+ qux_xpt = GeneratedFile(b"qux.xpt")
+ foo_html = GeneratedFile(b"foo_html")
+ bar_html = GeneratedFile(b"bar_html")
+ with errors.context("manifest", 4):
+ packager.add("foo/bar.xpt", bar_xpt)
+ with errors.context("manifest", 5):
+ packager.add("foo/bar/foo.html", foo_html)
+ packager.add("foo/bar/bar.html", bar_html)
+
+ file = GeneratedFileWithPath(
+ os.path.join(curdir, "foo.manifest"),
+ b"".join(
+ [
+ b"manifest foo/bar.manifest\n",
+ b"manifest bar/baz.manifest\n",
+ ]
+ ),
+ )
+ with errors.context("manifest", 6):
+ packager.add("foo.manifest", file)
+ with errors.context("manifest", 7):
+ packager.add("foo/qux.xpt", qux_xpt)
+
+ file = GeneratedFileWithPath(
+ os.path.join(curdir, "addon", "chrome.manifest"), b"resource hoge hoge/"
+ )
+ with errors.context("manifest", 8):
+ packager.add("addon/chrome.manifest", file)
+
+ install_rdf = GeneratedFile(b"<RDF></RDF>")
+ with errors.context("manifest", 9):
+ packager.add("addon/install.rdf", install_rdf)
+
+ with errors.context("manifest", 10):
+ packager.add("addon2/install.rdf", install_rdf)
+ packager.add(
+ "addon2/chrome.manifest", GeneratedFile(b"binary-component addon2.so")
+ )
+
+ with errors.context("manifest", 11):
+ packager.add("addon3/install.rdf", install_rdf)
+ packager.add(
+ "addon3/chrome.manifest",
+ GeneratedFile(b"manifest components/components.manifest"),
+ )
+ packager.add(
+ "addon3/components/components.manifest",
+ GeneratedFile(b"binary-component addon3.so"),
+ )
+
+ with errors.context("manifest", 12):
+ install_rdf_addon4 = GeneratedFile(
+ b"<RDF>\n<...>\n<em:unpack>true</em:unpack>\n<...>\n</RDF>"
+ )
+ packager.add("addon4/install.rdf", install_rdf_addon4)
+
+ with errors.context("manifest", 13):
+ install_rdf_addon5 = GeneratedFile(
+ b"<RDF>\n<...>\n<em:unpack>false</em:unpack>\n<...>\n</RDF>"
+ )
+ packager.add("addon5/install.rdf", install_rdf_addon5)
+
+ with errors.context("manifest", 14):
+ install_rdf_addon6 = GeneratedFile(
+ b"<RDF>\n<... em:unpack=true>\n<...>\n</RDF>"
+ )
+ packager.add("addon6/install.rdf", install_rdf_addon6)
+
+ with errors.context("manifest", 15):
+ install_rdf_addon7 = GeneratedFile(
+ b"<RDF>\n<... em:unpack=false>\n<...>\n</RDF>"
+ )
+ packager.add("addon7/install.rdf", install_rdf_addon7)
+
+ with errors.context("manifest", 16):
+ install_rdf_addon8 = GeneratedFile(
+ b'<RDF>\n<... em:unpack="true">\n<...>\n</RDF>'
+ )
+ packager.add("addon8/install.rdf", install_rdf_addon8)
+
+ with errors.context("manifest", 17):
+ install_rdf_addon9 = GeneratedFile(
+ b'<RDF>\n<... em:unpack="false">\n<...>\n</RDF>'
+ )
+ packager.add("addon9/install.rdf", install_rdf_addon9)
+
+ with errors.context("manifest", 18):
+ install_rdf_addon10 = GeneratedFile(
+ b"<RDF>\n<... em:unpack='true'>\n<...>\n</RDF>"
+ )
+ packager.add("addon10/install.rdf", install_rdf_addon10)
+
+ with errors.context("manifest", 19):
+ install_rdf_addon11 = GeneratedFile(
+ b"<RDF>\n<... em:unpack='false'>\n<...>\n</RDF>"
+ )
+ packager.add("addon11/install.rdf", install_rdf_addon11)
+
+ we_manifest = GeneratedFile(
+ b'{"manifest_version": 2, "name": "Test WebExtension", "version": "1.0"}'
+ )
+ # hybrid and hybrid2 are both bootstrapped extensions with
+ # embedded webextensions, they differ in the order in which
+ # the manifests are added to the packager.
+ with errors.context("manifest", 20):
+ packager.add("hybrid/install.rdf", install_rdf)
+
+ with errors.context("manifest", 21):
+ packager.add("hybrid/webextension/manifest.json", we_manifest)
+
+ with errors.context("manifest", 22):
+ packager.add("hybrid2/webextension/manifest.json", we_manifest)
+
+ with errors.context("manifest", 23):
+ packager.add("hybrid2/install.rdf", install_rdf)
+
+ with errors.context("manifest", 24):
+ packager.add("webextension/manifest.json", we_manifest)
+
+ non_we_manifest = GeneratedFile(b'{"not a webextension": true}')
+ with errors.context("manifest", 25):
+ packager.add("nonwebextension/manifest.json", non_we_manifest)
+
+ self.assertEqual(formatter.log, [])
+
+ with errors.context("dummy", 1):
+ packager.close()
+ self.maxDiff = None
+ # The formatter is expected to reorder the manifest entries so that
+ # chrome entries appear before the others.
+ self.assertEqual(
+ formatter.log,
+ [
+ (("dummy", 1), "add_base", "", False),
+ (("dummy", 1), "add_base", "addon", True),
+ (("dummy", 1), "add_base", "addon10", "unpacked"),
+ (("dummy", 1), "add_base", "addon11", True),
+ (("dummy", 1), "add_base", "addon2", "unpacked"),
+ (("dummy", 1), "add_base", "addon3", "unpacked"),
+ (("dummy", 1), "add_base", "addon4", "unpacked"),
+ (("dummy", 1), "add_base", "addon5", True),
+ (("dummy", 1), "add_base", "addon6", "unpacked"),
+ (("dummy", 1), "add_base", "addon7", True),
+ (("dummy", 1), "add_base", "addon8", "unpacked"),
+ (("dummy", 1), "add_base", "addon9", True),
+ (("dummy", 1), "add_base", "hybrid", True),
+ (("dummy", 1), "add_base", "hybrid2", True),
+ (("dummy", 1), "add_base", "qux", False),
+ (("dummy", 1), "add_base", "webextension", True),
+ (
+ (os.path.join(curdir, "foo", "bar.manifest"), 2),
+ "add_manifest",
+ ManifestContent("foo", "bar", "bar/"),
+ ),
+ (
+ (os.path.join(curdir, "foo", "bar.manifest"), 1),
+ "add_manifest",
+ ManifestResource("foo", "bar", "bar/"),
+ ),
+ (
+ ("bar/baz.manifest", 1),
+ "add_manifest",
+ ManifestResource("bar", "baz", "baz/"),
+ ),
+ (
+ ("qux/qux.manifest", 1),
+ "add_manifest",
+ ManifestResource("qux", "qux", "qux/"),
+ ),
+ (
+ ("qux/qux.manifest", 2),
+ "add_manifest",
+ ManifestBinaryComponent("qux", "qux.so"),
+ ),
+ (("manifest", 4), "add_interfaces", "foo/bar.xpt", bar_xpt),
+ (("manifest", 7), "add_interfaces", "foo/qux.xpt", qux_xpt),
+ (
+ (os.path.join(curdir, "addon", "chrome.manifest"), 1),
+ "add_manifest",
+ ManifestResource("addon", "hoge", "hoge/"),
+ ),
+ (
+ ("addon2/chrome.manifest", 1),
+ "add_manifest",
+ ManifestBinaryComponent("addon2", "addon2.so"),
+ ),
+ (
+ ("addon3/components/components.manifest", 1),
+ "add_manifest",
+ ManifestBinaryComponent("addon3/components", "addon3.so"),
+ ),
+ (("manifest", 5), "add", "foo/bar/foo.html", foo_html),
+ (("manifest", 5), "add", "foo/bar/bar.html", bar_html),
+ (("manifest", 9), "add", "addon/install.rdf", install_rdf),
+ (("manifest", 10), "add", "addon2/install.rdf", install_rdf),
+ (("manifest", 11), "add", "addon3/install.rdf", install_rdf),
+ (("manifest", 12), "add", "addon4/install.rdf", install_rdf_addon4),
+ (("manifest", 13), "add", "addon5/install.rdf", install_rdf_addon5),
+ (("manifest", 14), "add", "addon6/install.rdf", install_rdf_addon6),
+ (("manifest", 15), "add", "addon7/install.rdf", install_rdf_addon7),
+ (("manifest", 16), "add", "addon8/install.rdf", install_rdf_addon8),
+ (("manifest", 17), "add", "addon9/install.rdf", install_rdf_addon9),
+ (("manifest", 18), "add", "addon10/install.rdf", install_rdf_addon10),
+ (("manifest", 19), "add", "addon11/install.rdf", install_rdf_addon11),
+ (("manifest", 20), "add", "hybrid/install.rdf", install_rdf),
+ (
+ ("manifest", 21),
+ "add",
+ "hybrid/webextension/manifest.json",
+ we_manifest,
+ ),
+ (
+ ("manifest", 22),
+ "add",
+ "hybrid2/webextension/manifest.json",
+ we_manifest,
+ ),
+ (("manifest", 23), "add", "hybrid2/install.rdf", install_rdf),
+ (("manifest", 24), "add", "webextension/manifest.json", we_manifest),
+ (
+ ("manifest", 25),
+ "add",
+ "nonwebextension/manifest.json",
+ non_we_manifest,
+ ),
+ ],
+ )
+
+ self.assertEqual(
+ packager.get_bases(),
+ set(
+ [
+ "",
+ "addon",
+ "addon2",
+ "addon3",
+ "addon4",
+ "addon5",
+ "addon6",
+ "addon7",
+ "addon8",
+ "addon9",
+ "addon10",
+ "addon11",
+ "qux",
+ "hybrid",
+ "hybrid2",
+ "webextension",
+ ]
+ ),
+ )
+ self.assertEqual(packager.get_bases(addons=False), set(["", "qux"]))
+
+ def test_simple_packager_manifest_consistency(self):
+ formatter = MockFormatter()
+ # bar/ is detected as an addon because of install.rdf, but top-level
+ # includes a manifest inside bar/.
+ packager = SimplePackager(formatter)
+ packager.add(
+ "base.manifest",
+ GeneratedFile(
+ b"manifest foo/bar.manifest\n" b"manifest bar/baz.manifest\n"
+ ),
+ )
+ packager.add("foo/bar.manifest", GeneratedFile(b"resource bar bar"))
+ packager.add("bar/baz.manifest", GeneratedFile(b"resource baz baz"))
+ packager.add("bar/install.rdf", GeneratedFile(b""))
+
+ with self.assertRaises(ErrorMessage) as e:
+ packager.close()
+
+ self.assertEqual(
+ str(e.exception),
+ 'error: "bar/baz.manifest" is included from "base.manifest", '
+ 'which is outside "bar"',
+ )
+
+ # bar/ is detected as a separate base because of chrome.manifest that
+ # is included nowhere, but top-level includes another manifest inside
+ # bar/.
+ packager = SimplePackager(formatter)
+ packager.add(
+ "base.manifest",
+ GeneratedFile(
+ b"manifest foo/bar.manifest\n" b"manifest bar/baz.manifest\n"
+ ),
+ )
+ packager.add("foo/bar.manifest", GeneratedFile(b"resource bar bar"))
+ packager.add("bar/baz.manifest", GeneratedFile(b"resource baz baz"))
+ packager.add("bar/chrome.manifest", GeneratedFile(b"resource baz baz"))
+
+ with self.assertRaises(ErrorMessage) as e:
+ packager.close()
+
+ self.assertEqual(
+ str(e.exception),
+ 'error: "bar/baz.manifest" is included from "base.manifest", '
+ 'which is outside "bar"',
+ )
+
+ # bar/ is detected as a separate base because of chrome.manifest that
+ # is included nowhere, but chrome.manifest includes baz.manifest from
+ # the same directory. This shouldn't error out.
+ packager = SimplePackager(formatter)
+ packager.add("base.manifest", GeneratedFile(b"manifest foo/bar.manifest\n"))
+ packager.add("foo/bar.manifest", GeneratedFile(b"resource bar bar"))
+ packager.add("bar/baz.manifest", GeneratedFile(b"resource baz baz"))
+ packager.add("bar/chrome.manifest", GeneratedFile(b"manifest baz.manifest"))
+ packager.close()
+
+
+class TestSimpleManifestSink(unittest.TestCase):
+ def test_simple_manifest_parser(self):
+ formatter = MockFormatter()
+ foobar = GeneratedFile(b"foobar")
+ foobaz = GeneratedFile(b"foobaz")
+ fooqux = GeneratedFile(b"fooqux")
+ foozot = GeneratedFile(b"foozot")
+ finder = MockFinder(
+ {
+ "bin/foo/bar": foobar,
+ "bin/foo/baz": foobaz,
+ "bin/foo/qux": fooqux,
+ "bin/foo/zot": foozot,
+ "bin/foo/chrome.manifest": GeneratedFile(b"resource foo foo/"),
+ "bin/chrome.manifest": GeneratedFile(b"manifest foo/chrome.manifest"),
+ }
+ )
+ parser = SimpleManifestSink(finder, formatter)
+ component0 = Component("component0")
+ component1 = Component("component1")
+ component2 = Component("component2", destdir="destdir")
+ parser.add(component0, "bin/foo/b*")
+ parser.add(component1, "bin/foo/qux")
+ parser.add(component1, "bin/foo/chrome.manifest")
+ parser.add(component2, "bin/foo/zot")
+ self.assertRaises(ErrorMessage, parser.add, "component1", "bin/bar")
+
+ self.assertEqual(formatter.log, [])
+ parser.close()
+ self.assertEqual(
+ formatter.log,
+ [
+ (None, "add_base", "", False),
+ (
+ ("foo/chrome.manifest", 1),
+ "add_manifest",
+ ManifestResource("foo", "foo", "foo/"),
+ ),
+ (None, "add", "foo/bar", foobar),
+ (None, "add", "foo/baz", foobaz),
+ (None, "add", "foo/qux", fooqux),
+ (None, "add", "destdir/foo/zot", foozot),
+ ],
+ )
+
+ self.assertEqual(
+ finder.log,
+ [
+ "bin/foo/b*",
+ "bin/foo/qux",
+ "bin/foo/chrome.manifest",
+ "bin/foo/zot",
+ "bin/bar",
+ "bin/chrome.manifest",
+ ],
+ )
+
+
+class TestCallDeque(unittest.TestCase):
+ def test_call_deque(self):
+ class Logger(object):
+ def __init__(self):
+ self._log = []
+
+ def log(self, str):
+ self._log.append(str)
+
+ @staticmethod
+ def staticlog(logger, str):
+ logger.log(str)
+
+ def do_log(logger, str):
+ logger.log(str)
+
+ logger = Logger()
+ d = CallDeque()
+ d.append(logger.log, "foo")
+ d.append(logger.log, "bar")
+ d.append(logger.staticlog, logger, "baz")
+ d.append(do_log, logger, "qux")
+ self.assertEqual(logger._log, [])
+ d.execute()
+ self.assertEqual(logger._log, ["foo", "bar", "baz", "qux"])
+
+
+class TestComponent(unittest.TestCase):
+ def do_split(self, string, name, options):
+ n, o = Component._split_component_and_options(string)
+ self.assertEqual(name, n)
+ self.assertEqual(options, o)
+
+ def test_component_split_component_and_options(self):
+ self.do_split("component", "component", {})
+ self.do_split("trailingspace ", "trailingspace", {})
+ self.do_split(" leadingspace", "leadingspace", {})
+ self.do_split(" trim ", "trim", {})
+ self.do_split(' trim key="value"', "trim", {"key": "value"})
+ self.do_split(' trim empty=""', "trim", {"empty": ""})
+ self.do_split(' trim space=" "', "trim", {"space": " "})
+ self.do_split(
+ 'component key="value" key2="second" ',
+ "component",
+ {"key": "value", "key2": "second"},
+ )
+ self.do_split(
+ 'trim key=" value with spaces " key2="spaces again"',
+ "trim",
+ {"key": " value with spaces ", "key2": "spaces again"},
+ )
+
+ def do_split_error(self, string):
+ self.assertRaises(ValueError, Component._split_component_and_options, string)
+
+ def test_component_split_component_and_options_errors(self):
+ self.do_split_error('"component')
+ self.do_split_error('comp"onent')
+ self.do_split_error('component"')
+ self.do_split_error('"component"')
+ self.do_split_error("=component")
+ self.do_split_error("comp=onent")
+ self.do_split_error("component=")
+ self.do_split_error('key="val"')
+ self.do_split_error("component key=")
+ self.do_split_error('component key="val')
+ self.do_split_error('component key=val"')
+ self.do_split_error('component key="val" x')
+ self.do_split_error('component x key="val"')
+ self.do_split_error('component key1="val" x key2="val"')
+
+ def do_from_string(self, string, name, destdir=""):
+ component = Component.from_string(string)
+ self.assertEqual(name, component.name)
+ self.assertEqual(destdir, component.destdir)
+
+ def test_component_from_string(self):
+ self.do_from_string("component", "component")
+ self.do_from_string("component-with-hyphen", "component-with-hyphen")
+ self.do_from_string('component destdir="foo/bar"', "component", "foo/bar")
+ self.do_from_string('component destdir="bar spc"', "component", "bar spc")
+ self.assertRaises(ErrorMessage, Component.from_string, "")
+ self.assertRaises(ErrorMessage, Component.from_string, "component novalue=")
+ self.assertRaises(
+ ErrorMessage, Component.from_string, "component badoption=badvalue"
+ )
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_packager_formats.py b/python/mozbuild/mozpack/test/test_packager_formats.py
new file mode 100644
index 0000000000..b09971a102
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_packager_formats.py
@@ -0,0 +1,537 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+from itertools import chain
+
+import mozunit
+import six
+
+import mozpack.path as mozpath
+from mozpack.chrome.manifest import (
+ ManifestBinaryComponent,
+ ManifestComponent,
+ ManifestContent,
+ ManifestLocale,
+ ManifestResource,
+ ManifestSkin,
+)
+from mozpack.copier import FileRegistry
+from mozpack.errors import ErrorMessage
+from mozpack.files import GeneratedFile, ManifestFile
+from mozpack.packager.formats import FlatFormatter, JarFormatter, OmniJarFormatter
+from mozpack.test.test_files import bar_xpt, foo2_xpt, foo_xpt
+from test_errors import TestErrors
+
+CONTENTS = {
+ "bases": {
+ # base_path: is_addon?
+ "": False,
+ "app": False,
+ "addon0": "unpacked",
+ "addon1": True,
+ "app/chrome/addons/addon2": True,
+ },
+ "manifests": [
+ ManifestContent("chrome/f", "oo", "oo/"),
+ ManifestContent("chrome/f", "bar", "oo/bar/"),
+ ManifestResource("chrome/f", "foo", "resource://bar/"),
+ ManifestBinaryComponent("components", "foo.so"),
+ ManifestContent("app/chrome", "content", "foo/"),
+ ManifestComponent("app/components", "{foo-id}", "foo.js"),
+ ManifestContent("addon0/chrome", "addon0", "foo/bar/"),
+ ManifestContent("addon1/chrome", "addon1", "foo/bar/"),
+ ManifestContent("app/chrome/addons/addon2/chrome", "addon2", "foo/bar/"),
+ ],
+ "files": {
+ "chrome/f/oo/bar/baz": GeneratedFile(b"foobarbaz"),
+ "chrome/f/oo/baz": GeneratedFile(b"foobaz"),
+ "chrome/f/oo/qux": GeneratedFile(b"fooqux"),
+ "components/foo.so": GeneratedFile(b"foo.so"),
+ "components/foo.xpt": foo_xpt,
+ "components/bar.xpt": bar_xpt,
+ "foo": GeneratedFile(b"foo"),
+ "app/chrome/foo/foo": GeneratedFile(b"appfoo"),
+ "app/components/foo.js": GeneratedFile(b"foo.js"),
+ "addon0/chrome/foo/bar/baz": GeneratedFile(b"foobarbaz"),
+ "addon0/components/foo.xpt": foo2_xpt,
+ "addon0/components/bar.xpt": bar_xpt,
+ "addon1/chrome/foo/bar/baz": GeneratedFile(b"foobarbaz"),
+ "addon1/components/foo.xpt": foo2_xpt,
+ "addon1/components/bar.xpt": bar_xpt,
+ "app/chrome/addons/addon2/chrome/foo/bar/baz": GeneratedFile(b"foobarbaz"),
+ "app/chrome/addons/addon2/components/foo.xpt": foo2_xpt,
+ "app/chrome/addons/addon2/components/bar.xpt": bar_xpt,
+ },
+}
+
+FILES = CONTENTS["files"]
+
+RESULT_FLAT = {
+ "chrome.manifest": [
+ "manifest chrome/chrome.manifest",
+ "manifest components/components.manifest",
+ ],
+ "chrome/chrome.manifest": [
+ "manifest f/f.manifest",
+ ],
+ "chrome/f/f.manifest": [
+ "content oo oo/",
+ "content bar oo/bar/",
+ "resource foo resource://bar/",
+ ],
+ "chrome/f/oo/bar/baz": FILES["chrome/f/oo/bar/baz"],
+ "chrome/f/oo/baz": FILES["chrome/f/oo/baz"],
+ "chrome/f/oo/qux": FILES["chrome/f/oo/qux"],
+ "components/components.manifest": [
+ "binary-component foo.so",
+ "interfaces bar.xpt",
+ "interfaces foo.xpt",
+ ],
+ "components/foo.so": FILES["components/foo.so"],
+ "components/foo.xpt": foo_xpt,
+ "components/bar.xpt": bar_xpt,
+ "foo": FILES["foo"],
+ "app/chrome.manifest": [
+ "manifest chrome/chrome.manifest",
+ "manifest components/components.manifest",
+ ],
+ "app/chrome/chrome.manifest": [
+ "content content foo/",
+ ],
+ "app/chrome/foo/foo": FILES["app/chrome/foo/foo"],
+ "app/components/components.manifest": [
+ "component {foo-id} foo.js",
+ ],
+ "app/components/foo.js": FILES["app/components/foo.js"],
+}
+
+for addon in ("addon0", "addon1", "app/chrome/addons/addon2"):
+ RESULT_FLAT.update(
+ {
+ mozpath.join(addon, p): f
+ for p, f in six.iteritems(
+ {
+ "chrome.manifest": [
+ "manifest chrome/chrome.manifest",
+ "manifest components/components.manifest",
+ ],
+ "chrome/chrome.manifest": [
+ "content %s foo/bar/" % mozpath.basename(addon),
+ ],
+ "chrome/foo/bar/baz": FILES[
+ mozpath.join(addon, "chrome/foo/bar/baz")
+ ],
+ "components/components.manifest": [
+ "interfaces bar.xpt",
+ "interfaces foo.xpt",
+ ],
+ "components/bar.xpt": bar_xpt,
+ "components/foo.xpt": foo2_xpt,
+ }
+ )
+ }
+ )
+
+RESULT_JAR = {
+ p: RESULT_FLAT[p]
+ for p in (
+ "chrome.manifest",
+ "chrome/chrome.manifest",
+ "components/components.manifest",
+ "components/foo.so",
+ "components/foo.xpt",
+ "components/bar.xpt",
+ "foo",
+ "app/chrome.manifest",
+ "app/components/components.manifest",
+ "app/components/foo.js",
+ "addon0/chrome.manifest",
+ "addon0/components/components.manifest",
+ "addon0/components/foo.xpt",
+ "addon0/components/bar.xpt",
+ )
+}
+
+RESULT_JAR.update(
+ {
+ "chrome/f/f.manifest": [
+ "content oo jar:oo.jar!/",
+ "content bar jar:oo.jar!/bar/",
+ "resource foo resource://bar/",
+ ],
+ "chrome/f/oo.jar": {
+ "bar/baz": FILES["chrome/f/oo/bar/baz"],
+ "baz": FILES["chrome/f/oo/baz"],
+ "qux": FILES["chrome/f/oo/qux"],
+ },
+ "app/chrome/chrome.manifest": [
+ "content content jar:foo.jar!/",
+ ],
+ "app/chrome/foo.jar": {
+ "foo": FILES["app/chrome/foo/foo"],
+ },
+ "addon0/chrome/chrome.manifest": [
+ "content addon0 jar:foo.jar!/bar/",
+ ],
+ "addon0/chrome/foo.jar": {
+ "bar/baz": FILES["addon0/chrome/foo/bar/baz"],
+ },
+ "addon1.xpi": {
+ mozpath.relpath(p, "addon1"): f
+ for p, f in six.iteritems(RESULT_FLAT)
+ if p.startswith("addon1/")
+ },
+ "app/chrome/addons/addon2.xpi": {
+ mozpath.relpath(p, "app/chrome/addons/addon2"): f
+ for p, f in six.iteritems(RESULT_FLAT)
+ if p.startswith("app/chrome/addons/addon2/")
+ },
+ }
+)
+
+RESULT_OMNIJAR = {
+ p: RESULT_FLAT[p]
+ for p in (
+ "components/foo.so",
+ "foo",
+ )
+}
+
+RESULT_OMNIJAR.update({p: RESULT_JAR[p] for p in RESULT_JAR if p.startswith("addon")})
+
+RESULT_OMNIJAR.update(
+ {
+ "omni.foo": {
+ "components/components.manifest": [
+ "interfaces bar.xpt",
+ "interfaces foo.xpt",
+ ],
+ },
+ "chrome.manifest": [
+ "manifest components/components.manifest",
+ ],
+ "components/components.manifest": [
+ "binary-component foo.so",
+ ],
+ "app/omni.foo": {
+ p: RESULT_FLAT["app/" + p]
+ for p in chain(
+ (
+ "chrome.manifest",
+ "chrome/chrome.manifest",
+ "chrome/foo/foo",
+ "components/components.manifest",
+ "components/foo.js",
+ ),
+ (
+ mozpath.relpath(p, "app")
+ for p in six.iterkeys(RESULT_FLAT)
+ if p.startswith("app/chrome/addons/addon2/")
+ ),
+ )
+ },
+ }
+)
+
+RESULT_OMNIJAR["omni.foo"].update(
+ {
+ p: RESULT_FLAT[p]
+ for p in (
+ "chrome.manifest",
+ "chrome/chrome.manifest",
+ "chrome/f/f.manifest",
+ "chrome/f/oo/bar/baz",
+ "chrome/f/oo/baz",
+ "chrome/f/oo/qux",
+ "components/foo.xpt",
+ "components/bar.xpt",
+ )
+ }
+)
+
+RESULT_OMNIJAR_WITH_SUBPATH = {
+ k.replace("omni.foo", "bar/omni.foo"): v for k, v in RESULT_OMNIJAR.items()
+}
+
+CONTENTS_WITH_BASE = {
+ "bases": {
+ mozpath.join("base/root", b) if b else "base/root": a
+ for b, a in six.iteritems(CONTENTS["bases"])
+ },
+ "manifests": [
+ m.move(mozpath.join("base/root", m.base)) for m in CONTENTS["manifests"]
+ ],
+ "files": {
+ mozpath.join("base/root", p): f for p, f in six.iteritems(CONTENTS["files"])
+ },
+}
+
+EXTRA_CONTENTS = {
+ "extra/file": GeneratedFile(b"extra file"),
+}
+
+CONTENTS_WITH_BASE["files"].update(EXTRA_CONTENTS)
+
+
+def result_with_base(results):
+ result = {mozpath.join("base/root", p): v for p, v in six.iteritems(results)}
+ result.update(EXTRA_CONTENTS)
+ return result
+
+
+RESULT_FLAT_WITH_BASE = result_with_base(RESULT_FLAT)
+RESULT_JAR_WITH_BASE = result_with_base(RESULT_JAR)
+RESULT_OMNIJAR_WITH_BASE = result_with_base(RESULT_OMNIJAR)
+
+
+def fill_formatter(formatter, contents):
+ for base, is_addon in sorted(contents["bases"].items()):
+ formatter.add_base(base, is_addon)
+
+ for manifest in contents["manifests"]:
+ formatter.add_manifest(manifest)
+
+ for k, v in sorted(six.iteritems(contents["files"])):
+ if k.endswith(".xpt"):
+ formatter.add_interfaces(k, v)
+ else:
+ formatter.add(k, v)
+
+
+def get_contents(registry, read_all=False, mode="rt"):
+ result = {}
+ for k, v in registry:
+ if isinstance(v, FileRegistry):
+ result[k] = get_contents(v)
+ elif isinstance(v, ManifestFile) or read_all:
+ if "b" in mode:
+ result[k] = v.open().read()
+ else:
+ result[k] = six.ensure_text(v.open().read()).splitlines()
+ else:
+ result[k] = v
+ return result
+
+
+class TestFormatters(TestErrors, unittest.TestCase):
+ maxDiff = None
+
+ def test_bases(self):
+ formatter = FlatFormatter(FileRegistry())
+ formatter.add_base("")
+ formatter.add_base("addon0", addon=True)
+ formatter.add_base("browser")
+ self.assertEqual(formatter._get_base("platform.ini"), ("", "platform.ini"))
+ self.assertEqual(
+ formatter._get_base("browser/application.ini"),
+ ("browser", "application.ini"),
+ )
+ self.assertEqual(
+ formatter._get_base("addon0/install.rdf"), ("addon0", "install.rdf")
+ )
+
+ def do_test_contents(self, formatter, contents):
+ for f in contents["files"]:
+ # .xpt files are merged, so skip them.
+ if not f.endswith(".xpt"):
+ self.assertTrue(formatter.contains(f))
+
+ def test_flat_formatter(self):
+ registry = FileRegistry()
+ formatter = FlatFormatter(registry)
+
+ fill_formatter(formatter, CONTENTS)
+ self.assertEqual(get_contents(registry), RESULT_FLAT)
+ self.do_test_contents(formatter, CONTENTS)
+
+ def test_jar_formatter(self):
+ registry = FileRegistry()
+ formatter = JarFormatter(registry)
+
+ fill_formatter(formatter, CONTENTS)
+ self.assertEqual(get_contents(registry), RESULT_JAR)
+ self.do_test_contents(formatter, CONTENTS)
+
+ def test_omnijar_formatter(self):
+ registry = FileRegistry()
+ formatter = OmniJarFormatter(registry, "omni.foo")
+
+ fill_formatter(formatter, CONTENTS)
+ self.assertEqual(get_contents(registry), RESULT_OMNIJAR)
+ self.do_test_contents(formatter, CONTENTS)
+
+ def test_flat_formatter_with_base(self):
+ registry = FileRegistry()
+ formatter = FlatFormatter(registry)
+
+ fill_formatter(formatter, CONTENTS_WITH_BASE)
+ self.assertEqual(get_contents(registry), RESULT_FLAT_WITH_BASE)
+ self.do_test_contents(formatter, CONTENTS_WITH_BASE)
+
+ def test_jar_formatter_with_base(self):
+ registry = FileRegistry()
+ formatter = JarFormatter(registry)
+
+ fill_formatter(formatter, CONTENTS_WITH_BASE)
+ self.assertEqual(get_contents(registry), RESULT_JAR_WITH_BASE)
+ self.do_test_contents(formatter, CONTENTS_WITH_BASE)
+
+ def test_omnijar_formatter_with_base(self):
+ registry = FileRegistry()
+ formatter = OmniJarFormatter(registry, "omni.foo")
+
+ fill_formatter(formatter, CONTENTS_WITH_BASE)
+ self.assertEqual(get_contents(registry), RESULT_OMNIJAR_WITH_BASE)
+ self.do_test_contents(formatter, CONTENTS_WITH_BASE)
+
+ def test_omnijar_formatter_with_subpath(self):
+ registry = FileRegistry()
+ formatter = OmniJarFormatter(registry, "bar/omni.foo")
+
+ fill_formatter(formatter, CONTENTS)
+ self.assertEqual(get_contents(registry), RESULT_OMNIJAR_WITH_SUBPATH)
+ self.do_test_contents(formatter, CONTENTS)
+
+ def test_omnijar_is_resource(self):
+ def is_resource(base, path):
+ registry = FileRegistry()
+ f = OmniJarFormatter(
+ registry,
+ "omni.foo",
+ non_resources=[
+ "defaults/messenger/mailViews.dat",
+ "defaults/foo/*",
+ "*/dummy",
+ ],
+ )
+ f.add_base("")
+ f.add_base("app")
+ f.add(mozpath.join(base, path), GeneratedFile(b""))
+ if f.copier.contains(mozpath.join(base, path)):
+ return False
+ self.assertTrue(f.copier.contains(mozpath.join(base, "omni.foo")))
+ self.assertTrue(f.copier[mozpath.join(base, "omni.foo")].contains(path))
+ return True
+
+ for base in ["", "app/"]:
+ self.assertTrue(is_resource(base, "chrome"))
+ self.assertTrue(is_resource(base, "chrome/foo/bar/baz.properties"))
+ self.assertFalse(is_resource(base, "chrome/icons/foo.png"))
+ self.assertTrue(is_resource(base, "components/foo.js"))
+ self.assertFalse(is_resource(base, "components/foo.so"))
+ self.assertTrue(is_resource(base, "res/foo.css"))
+ self.assertFalse(is_resource(base, "res/cursors/foo.png"))
+ self.assertFalse(is_resource(base, "res/MainMenu.nib/foo"))
+ self.assertTrue(is_resource(base, "defaults/pref/foo.js"))
+ self.assertFalse(is_resource(base, "defaults/pref/channel-prefs.js"))
+ self.assertTrue(is_resource(base, "defaults/preferences/foo.js"))
+ self.assertFalse(is_resource(base, "defaults/preferences/channel-prefs.js"))
+ self.assertTrue(is_resource(base, "modules/foo.jsm"))
+ self.assertTrue(is_resource(base, "greprefs.js"))
+ self.assertTrue(is_resource(base, "hyphenation/foo"))
+ self.assertTrue(is_resource(base, "update.locale"))
+ self.assertFalse(is_resource(base, "foo"))
+ self.assertFalse(is_resource(base, "foo/bar/greprefs.js"))
+ self.assertTrue(is_resource(base, "defaults/messenger/foo.dat"))
+ self.assertFalse(is_resource(base, "defaults/messenger/mailViews.dat"))
+ self.assertTrue(is_resource(base, "defaults/pref/foo.js"))
+ self.assertFalse(is_resource(base, "defaults/foo/bar.dat"))
+ self.assertFalse(is_resource(base, "defaults/foo/bar/baz.dat"))
+ self.assertTrue(is_resource(base, "chrome/foo/bar/baz/dummy_"))
+ self.assertFalse(is_resource(base, "chrome/foo/bar/baz/dummy"))
+ self.assertTrue(is_resource(base, "chrome/foo/bar/dummy_"))
+ self.assertFalse(is_resource(base, "chrome/foo/bar/dummy"))
+
+ def test_chrome_override(self):
+ registry = FileRegistry()
+ f = FlatFormatter(registry)
+ f.add_base("")
+ f.add_manifest(ManifestContent("chrome", "foo", "foo/unix"))
+ # A more specific entry for a given chrome name can override a more
+ # generic one.
+ f.add_manifest(ManifestContent("chrome", "foo", "foo/win", "os=WINNT"))
+ f.add_manifest(ManifestContent("chrome", "foo", "foo/osx", "os=Darwin"))
+
+ # Chrome with the same name overrides the previous registration.
+ with self.assertRaises(ErrorMessage) as e:
+ f.add_manifest(ManifestContent("chrome", "foo", "foo/"))
+
+ self.assertEqual(
+ str(e.exception),
+ 'error: "content foo foo/" overrides ' '"content foo foo/unix"',
+ )
+
+ # Chrome with the same name and same flags overrides the previous
+ # registration.
+ with self.assertRaises(ErrorMessage) as e:
+ f.add_manifest(ManifestContent("chrome", "foo", "foo/", "os=WINNT"))
+
+ self.assertEqual(
+ str(e.exception),
+ 'error: "content foo foo/ os=WINNT" overrides '
+ '"content foo foo/win os=WINNT"',
+ )
+
+ # We may start with the more specific entry first
+ f.add_manifest(ManifestContent("chrome", "bar", "bar/win", "os=WINNT"))
+ # Then adding a more generic one overrides it.
+ with self.assertRaises(ErrorMessage) as e:
+ f.add_manifest(ManifestContent("chrome", "bar", "bar/unix"))
+
+ self.assertEqual(
+ str(e.exception),
+ 'error: "content bar bar/unix" overrides ' '"content bar bar/win os=WINNT"',
+ )
+
+ # Adding something more specific still works.
+ f.add_manifest(
+ ManifestContent("chrome", "bar", "bar/win", "os=WINNT osversion>=7.0")
+ )
+
+ # Variations of skin/locales are allowed.
+ f.add_manifest(
+ ManifestSkin("chrome", "foo", "classic/1.0", "foo/skin/classic/")
+ )
+ f.add_manifest(ManifestSkin("chrome", "foo", "modern/1.0", "foo/skin/modern/"))
+
+ f.add_manifest(ManifestLocale("chrome", "foo", "en-US", "foo/locale/en-US/"))
+ f.add_manifest(ManifestLocale("chrome", "foo", "ja-JP", "foo/locale/ja-JP/"))
+
+ # But same-skin/locale still error out.
+ with self.assertRaises(ErrorMessage) as e:
+ f.add_manifest(
+ ManifestSkin("chrome", "foo", "classic/1.0", "foo/skin/classic/foo")
+ )
+
+ self.assertEqual(
+ str(e.exception),
+ 'error: "skin foo classic/1.0 foo/skin/classic/foo" overrides '
+ '"skin foo classic/1.0 foo/skin/classic/"',
+ )
+
+ with self.assertRaises(ErrorMessage) as e:
+ f.add_manifest(
+ ManifestLocale("chrome", "foo", "en-US", "foo/locale/en-US/foo")
+ )
+
+ self.assertEqual(
+ str(e.exception),
+ 'error: "locale foo en-US foo/locale/en-US/foo" overrides '
+ '"locale foo en-US foo/locale/en-US/"',
+ )
+
+ # Duplicating existing manifest entries is not an error.
+ f.add_manifest(ManifestContent("chrome", "foo", "foo/unix"))
+
+ self.assertEqual(
+ self.get_output(),
+ [
+ 'warning: "content foo foo/unix" is duplicated. Skipping.',
+ ],
+ )
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_packager_l10n.py b/python/mozbuild/mozpack/test/test_packager_l10n.py
new file mode 100644
index 0000000000..0714ae3252
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_packager_l10n.py
@@ -0,0 +1,153 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+
+import mozunit
+import six
+
+from mozpack.chrome.manifest import Manifest, ManifestContent, ManifestLocale
+from mozpack.copier import FileRegistry
+from mozpack.files import GeneratedFile, ManifestFile
+from mozpack.packager import l10n
+from test_packager import MockFinder
+
+
+class TestL10NRepack(unittest.TestCase):
+ def test_l10n_repack(self):
+ foo = GeneratedFile(b"foo")
+ foobar = GeneratedFile(b"foobar")
+ qux = GeneratedFile(b"qux")
+ bar = GeneratedFile(b"bar")
+ baz = GeneratedFile(b"baz")
+ dict_aa = GeneratedFile(b"dict_aa")
+ dict_bb = GeneratedFile(b"dict_bb")
+ dict_cc = GeneratedFile(b"dict_cc")
+ barbaz = GeneratedFile(b"barbaz")
+ lst = GeneratedFile(b"foo\nbar")
+ app_finder = MockFinder(
+ {
+ "bar/foo": foo,
+ "chrome/foo/foobar": foobar,
+ "chrome/qux/qux.properties": qux,
+ "chrome/qux/baz/baz.properties": baz,
+ "chrome/chrome.manifest": ManifestFile(
+ "chrome",
+ [
+ ManifestContent("chrome", "foo", "foo/"),
+ ManifestLocale("chrome", "qux", "en-US", "qux/"),
+ ],
+ ),
+ "chrome.manifest": ManifestFile(
+ "", [Manifest("", "chrome/chrome.manifest")]
+ ),
+ "dict/aa": dict_aa,
+ "app/chrome/bar/barbaz.dtd": barbaz,
+ "app/chrome/chrome.manifest": ManifestFile(
+ "app/chrome", [ManifestLocale("app/chrome", "bar", "en-US", "bar/")]
+ ),
+ "app/chrome.manifest": ManifestFile(
+ "app", [Manifest("app", "chrome/chrome.manifest")]
+ ),
+ "app/dict/bb": dict_bb,
+ "app/dict/cc": dict_cc,
+ "app/chrome/bar/search/foo.xml": foo,
+ "app/chrome/bar/search/bar.xml": bar,
+ "app/chrome/bar/search/lst.txt": lst,
+ "META-INF/foo": foo, # Stripped.
+ "inner/META-INF/foo": foo, # Not stripped.
+ "app/META-INF/foo": foo, # Stripped.
+ "app/inner/META-INF/foo": foo, # Not stripped.
+ }
+ )
+ app_finder.jarlogs = {}
+ app_finder.base = "app"
+ foo_l10n = GeneratedFile(b"foo_l10n")
+ qux_l10n = GeneratedFile(b"qux_l10n")
+ baz_l10n = GeneratedFile(b"baz_l10n")
+ barbaz_l10n = GeneratedFile(b"barbaz_l10n")
+ lst_l10n = GeneratedFile(b"foo\nqux")
+ l10n_finder = MockFinder(
+ {
+ "chrome/qux-l10n/qux.properties": qux_l10n,
+ "chrome/qux-l10n/baz/baz.properties": baz_l10n,
+ "chrome/chrome.manifest": ManifestFile(
+ "chrome",
+ [
+ ManifestLocale("chrome", "qux", "x-test", "qux-l10n/"),
+ ],
+ ),
+ "chrome.manifest": ManifestFile(
+ "", [Manifest("", "chrome/chrome.manifest")]
+ ),
+ "dict/bb": dict_bb,
+ "dict/cc": dict_cc,
+ "app/chrome/bar-l10n/barbaz.dtd": barbaz_l10n,
+ "app/chrome/chrome.manifest": ManifestFile(
+ "app/chrome",
+ [ManifestLocale("app/chrome", "bar", "x-test", "bar-l10n/")],
+ ),
+ "app/chrome.manifest": ManifestFile(
+ "app", [Manifest("app", "chrome/chrome.manifest")]
+ ),
+ "app/dict/aa": dict_aa,
+ "app/chrome/bar-l10n/search/foo.xml": foo_l10n,
+ "app/chrome/bar-l10n/search/qux.xml": qux_l10n,
+ "app/chrome/bar-l10n/search/lst.txt": lst_l10n,
+ }
+ )
+ l10n_finder.base = "l10n"
+ copier = FileRegistry()
+ formatter = l10n.FlatFormatter(copier)
+
+ l10n._repack(
+ app_finder,
+ l10n_finder,
+ copier,
+ formatter,
+ ["dict", "chrome/**/search/*.xml"],
+ )
+ self.maxDiff = None
+
+ repacked = {
+ "bar/foo": foo,
+ "chrome/foo/foobar": foobar,
+ "chrome/qux-l10n/qux.properties": qux_l10n,
+ "chrome/qux-l10n/baz/baz.properties": baz_l10n,
+ "chrome/chrome.manifest": ManifestFile(
+ "chrome",
+ [
+ ManifestContent("chrome", "foo", "foo/"),
+ ManifestLocale("chrome", "qux", "x-test", "qux-l10n/"),
+ ],
+ ),
+ "chrome.manifest": ManifestFile(
+ "", [Manifest("", "chrome/chrome.manifest")]
+ ),
+ "dict/bb": dict_bb,
+ "dict/cc": dict_cc,
+ "app/chrome/bar-l10n/barbaz.dtd": barbaz_l10n,
+ "app/chrome/chrome.manifest": ManifestFile(
+ "app/chrome",
+ [ManifestLocale("app/chrome", "bar", "x-test", "bar-l10n/")],
+ ),
+ "app/chrome.manifest": ManifestFile(
+ "app", [Manifest("app", "chrome/chrome.manifest")]
+ ),
+ "app/dict/aa": dict_aa,
+ "app/chrome/bar-l10n/search/foo.xml": foo_l10n,
+ "app/chrome/bar-l10n/search/qux.xml": qux_l10n,
+ "app/chrome/bar-l10n/search/lst.txt": lst_l10n,
+ "inner/META-INF/foo": foo,
+ "app/inner/META-INF/foo": foo,
+ }
+
+ self.assertEqual(
+ dict((p, f.open().read()) for p, f in copier),
+ dict((p, f.open().read()) for p, f in six.iteritems(repacked)),
+ )
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_packager_unpack.py b/python/mozbuild/mozpack/test/test_packager_unpack.py
new file mode 100644
index 0000000000..57a2d71eda
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_packager_unpack.py
@@ -0,0 +1,67 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import mozunit
+
+from mozpack.copier import FileCopier, FileRegistry
+from mozpack.packager.formats import FlatFormatter, JarFormatter, OmniJarFormatter
+from mozpack.packager.unpack import unpack_to_registry
+from mozpack.test.test_files import TestWithTmpDir
+from mozpack.test.test_packager_formats import CONTENTS, fill_formatter, get_contents
+
+
+class TestUnpack(TestWithTmpDir):
+ maxDiff = None
+
+ @staticmethod
+ def _get_copier(cls):
+ copier = FileCopier()
+ formatter = cls(copier)
+ fill_formatter(formatter, CONTENTS)
+ return copier
+
+ @classmethod
+ def setUpClass(cls):
+ cls.contents = get_contents(
+ cls._get_copier(FlatFormatter), read_all=True, mode="rb"
+ )
+
+ def _unpack_test(self, cls):
+ # Format a package with the given formatter class
+ copier = self._get_copier(cls)
+ copier.copy(self.tmpdir)
+
+ # Unpack that package. Its content is expected to match that of a Flat
+ # formatted package.
+ registry = FileRegistry()
+ unpack_to_registry(self.tmpdir, registry, getattr(cls, "OMNIJAR_NAME", None))
+ self.assertEqual(
+ get_contents(registry, read_all=True, mode="rb"), self.contents
+ )
+
+ def test_flat_unpack(self):
+ self._unpack_test(FlatFormatter)
+
+ def test_jar_unpack(self):
+ self._unpack_test(JarFormatter)
+
+ @staticmethod
+ def _omni_foo_formatter(name):
+ class OmniFooFormatter(OmniJarFormatter):
+ OMNIJAR_NAME = name
+
+ def __init__(self, registry):
+ super(OmniFooFormatter, self).__init__(registry, name)
+
+ return OmniFooFormatter
+
+ def test_omnijar_unpack(self):
+ self._unpack_test(self._omni_foo_formatter("omni.foo"))
+
+ def test_omnijar_subpath_unpack(self):
+ self._unpack_test(self._omni_foo_formatter("bar/omni.foo"))
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_path.py b/python/mozbuild/mozpack/test/test_path.py
new file mode 100644
index 0000000000..6c7aeb5400
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_path.py
@@ -0,0 +1,152 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import unittest
+
+import mozunit
+
+from mozpack.path import (
+ basedir,
+ basename,
+ commonprefix,
+ dirname,
+ join,
+ match,
+ normpath,
+ rebase,
+ relpath,
+ split,
+ splitext,
+)
+
+
+class TestPath(unittest.TestCase):
+ SEP = os.sep
+
+ def test_relpath(self):
+ self.assertEqual(relpath("foo", "foo"), "")
+ self.assertEqual(relpath(self.SEP.join(("foo", "bar")), "foo/bar"), "")
+ self.assertEqual(relpath(self.SEP.join(("foo", "bar")), "foo"), "bar")
+ self.assertEqual(
+ relpath(self.SEP.join(("foo", "bar", "baz")), "foo"), "bar/baz"
+ )
+ self.assertEqual(relpath(self.SEP.join(("foo", "bar")), "foo/bar/baz"), "..")
+ self.assertEqual(relpath(self.SEP.join(("foo", "bar")), "foo/baz"), "../bar")
+ self.assertEqual(relpath("foo/", "foo"), "")
+ self.assertEqual(relpath("foo/bar/", "foo"), "bar")
+
+ def test_join(self):
+ self.assertEqual(join("foo", "bar", "baz"), "foo/bar/baz")
+ self.assertEqual(join("foo", "", "bar"), "foo/bar")
+ self.assertEqual(join("", "foo", "bar"), "foo/bar")
+ self.assertEqual(join("", "foo", "/bar"), "/bar")
+
+ def test_normpath(self):
+ self.assertEqual(
+ normpath(self.SEP.join(("foo", "bar", "baz", "..", "qux"))), "foo/bar/qux"
+ )
+
+ def test_dirname(self):
+ self.assertEqual(dirname("foo/bar/baz"), "foo/bar")
+ self.assertEqual(dirname("foo/bar"), "foo")
+ self.assertEqual(dirname("foo"), "")
+ self.assertEqual(dirname("foo/bar/"), "foo/bar")
+
+ def test_commonprefix(self):
+ self.assertEqual(
+ commonprefix(
+ [self.SEP.join(("foo", "bar", "baz")), "foo/qux", "foo/baz/qux"]
+ ),
+ "foo/",
+ )
+ self.assertEqual(
+ commonprefix([self.SEP.join(("foo", "bar", "baz")), "foo/qux", "baz/qux"]),
+ "",
+ )
+
+ def test_basename(self):
+ self.assertEqual(basename("foo/bar/baz"), "baz")
+ self.assertEqual(basename("foo/bar"), "bar")
+ self.assertEqual(basename("foo"), "foo")
+ self.assertEqual(basename("foo/bar/"), "")
+
+ def test_split(self):
+ self.assertEqual(
+ split(self.SEP.join(("foo", "bar", "baz"))), ["foo", "bar", "baz"]
+ )
+
+ def test_splitext(self):
+ self.assertEqual(
+ splitext(self.SEP.join(("foo", "bar", "baz.qux"))), ("foo/bar/baz", ".qux")
+ )
+
+ def test_basedir(self):
+ foobarbaz = self.SEP.join(("foo", "bar", "baz"))
+ self.assertEqual(basedir(foobarbaz, ["foo", "bar", "baz"]), "foo")
+ self.assertEqual(basedir(foobarbaz, ["foo", "foo/bar", "baz"]), "foo/bar")
+ self.assertEqual(basedir(foobarbaz, ["foo/bar", "foo", "baz"]), "foo/bar")
+ self.assertEqual(basedir(foobarbaz, ["foo", "bar", ""]), "foo")
+ self.assertEqual(basedir(foobarbaz, ["bar", "baz", ""]), "")
+
+ def test_match(self):
+ self.assertTrue(match("foo", ""))
+ self.assertTrue(match("foo/bar/baz.qux", "foo/bar"))
+ self.assertTrue(match("foo/bar/baz.qux", "foo"))
+ self.assertTrue(match("foo", "*"))
+ self.assertTrue(match("foo/bar/baz.qux", "foo/bar/*"))
+ self.assertTrue(match("foo/bar/baz.qux", "foo/bar/*"))
+ self.assertTrue(match("foo/bar/baz.qux", "foo/bar/*"))
+ self.assertTrue(match("foo/bar/baz.qux", "foo/bar/*"))
+ self.assertTrue(match("foo/bar/baz.qux", "foo/*/baz.qux"))
+ self.assertTrue(match("foo/bar/baz.qux", "*/bar/baz.qux"))
+ self.assertTrue(match("foo/bar/baz.qux", "*/*/baz.qux"))
+ self.assertTrue(match("foo/bar/baz.qux", "*/*/*"))
+ self.assertTrue(match("foo/bar/baz.qux", "foo/*/*"))
+ self.assertTrue(match("foo/bar/baz.qux", "foo/*/*.qux"))
+ self.assertTrue(match("foo/bar/baz.qux", "foo/b*/*z.qux"))
+ self.assertTrue(match("foo/bar/baz.qux", "foo/b*r/ba*z.qux"))
+ self.assertFalse(match("foo/bar/baz.qux", "foo/b*z/ba*r.qux"))
+ self.assertTrue(match("foo/bar/baz.qux", "**"))
+ self.assertTrue(match("foo/bar/baz.qux", "**/baz.qux"))
+ self.assertTrue(match("foo/bar/baz.qux", "**/bar/baz.qux"))
+ self.assertTrue(match("foo/bar/baz.qux", "foo/**/baz.qux"))
+ self.assertTrue(match("foo/bar/baz.qux", "foo/**/*.qux"))
+ self.assertTrue(match("foo/bar/baz.qux", "**/foo/bar/baz.qux"))
+ self.assertTrue(match("foo/bar/baz.qux", "foo/**/bar/baz.qux"))
+ self.assertTrue(match("foo/bar/baz.qux", "foo/**/bar/*.qux"))
+ self.assertTrue(match("foo/bar/baz.qux", "foo/**/*.qux"))
+ self.assertTrue(match("foo/bar/baz.qux", "**/*.qux"))
+ self.assertFalse(match("foo/bar/baz.qux", "**.qux"))
+ self.assertFalse(match("foo/bar", "foo/*/bar"))
+ self.assertTrue(match("foo/bar/baz.qux", "foo/**/bar/**"))
+ self.assertFalse(match("foo/nobar/baz.qux", "foo/**/bar/**"))
+ self.assertTrue(match("foo/bar", "foo/**/bar/**"))
+
+ def test_rebase(self):
+ self.assertEqual(rebase("foo", "foo/bar", "bar/baz"), "baz")
+ self.assertEqual(rebase("foo", "foo", "bar/baz"), "bar/baz")
+ self.assertEqual(rebase("foo/bar", "foo", "baz"), "bar/baz")
+
+
+if os.altsep:
+
+ class TestAltPath(TestPath):
+ SEP = os.altsep
+
+ class TestReverseAltPath(TestPath):
+ def setUp(self):
+ sep = os.sep
+ os.sep = os.altsep
+ os.altsep = sep
+
+ def tearDown(self):
+ self.setUp()
+
+ class TestAltReverseAltPath(TestReverseAltPath):
+ SEP = os.altsep
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_pkg.py b/python/mozbuild/mozpack/test/test_pkg.py
new file mode 100644
index 0000000000..f1febbbae0
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_pkg.py
@@ -0,0 +1,138 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from pathlib import Path
+from string import Template
+from unittest.mock import patch
+
+import mozunit
+
+import mozpack.pkg
+from mozpack.pkg import (
+ create_bom,
+ create_payload,
+ create_pkg,
+ get_app_info_plist,
+ get_apple_template,
+ get_relative_glob_list,
+ save_text_file,
+ xar_package_folder,
+)
+from mozpack.test.test_files import TestWithTmpDir
+
+
+class TestPkg(TestWithTmpDir):
+ maxDiff = None
+
+ class MockSubprocessRun:
+ stderr = ""
+ stdout = ""
+ returncode = 0
+
+ def __init__(self, returncode=0):
+ self.returncode = returncode
+
+ def _mk_test_file(self, name, mode=0o777):
+ tool = Path(self.tmpdir) / f"{name}"
+ tool.touch()
+ tool.chmod(mode)
+ return tool
+
+ def test_get_apple_template(self):
+ tmpl = get_apple_template("Distribution.template")
+ assert type(tmpl) == Template
+
+ def test_get_apple_template_not_file(self):
+ with self.assertRaises(Exception):
+ get_apple_template("tmpl-should-not-exist")
+
+ def test_save_text_file(self):
+ content = "Hello"
+ destination = Path(self.tmpdir) / "test_save_text_file"
+ save_text_file(content, destination)
+ with destination.open("r") as file:
+ assert content == file.read()
+
+ def test_get_app_info_plist(self):
+ app_path = Path(self.tmpdir) / "app"
+ (app_path / "Contents").mkdir(parents=True)
+ (app_path / "Contents/Info.plist").touch()
+ data = {"foo": "bar"}
+ with patch.object(mozpack.pkg.plistlib, "load", lambda x: data):
+ assert data == get_app_info_plist(app_path)
+
+ def test_get_app_info_plist_not_file(self):
+ app_path = Path(self.tmpdir) / "app-does-not-exist"
+ with self.assertRaises(Exception):
+ get_app_info_plist(app_path)
+
+ def _mock_payload(self, returncode):
+ def _mock_run(*args, **kwargs):
+ return self.MockSubprocessRun(returncode)
+
+ return _mock_run
+
+ def test_create_payload(self):
+ destination = Path(self.tmpdir) / "mockPayload"
+ with patch.object(mozpack.pkg.subprocess, "run", self._mock_payload(0)):
+ create_payload(destination, Path(self.tmpdir), "cpio")
+
+ def test_create_bom(self):
+ bom_path = Path(self.tmpdir) / "Bom"
+ bom_path.touch()
+ root_path = Path(self.tmpdir)
+ tool_path = Path(self.tmpdir) / "not-really-used-during-test"
+ with patch.object(mozpack.pkg.subprocess, "check_call", lambda *x: None):
+ create_bom(bom_path, root_path, tool_path)
+
+ def get_relative_glob_list(self):
+ source = Path(self.tmpdir)
+ (source / "testfile").touch()
+ glob = "*"
+ assert len(get_relative_glob_list(source, glob)) == 1
+
+ def test_xar_package_folder(self):
+ source = Path(self.tmpdir)
+ dest = source / "fakedestination"
+ dest.touch()
+ tool = source / "faketool"
+ with patch.object(mozpack.pkg.subprocess, "check_call", lambda *x, **y: None):
+ xar_package_folder(source, dest, tool)
+
+ def test_xar_package_folder_not_absolute(self):
+ source = Path("./some/relative/path")
+ dest = Path("./some/other/relative/path")
+ tool = source / "faketool"
+ with patch.object(mozpack.pkg.subprocess, "check_call", lambda: None):
+ with self.assertRaises(Exception):
+ xar_package_folder(source, dest, tool)
+
+ def test_create_pkg(self):
+ def noop(*x, **y):
+ pass
+
+ def mock_get_app_info_plist(*args):
+ return {"CFBundleShortVersionString": "1.0.0"}
+
+ def mock_get_apple_template(*args):
+ return Template("fake template")
+
+ source = Path(self.tmpdir) / "FakeApp.app"
+ source.mkdir()
+ output = Path(self.tmpdir) / "output.pkg"
+ fake_tool = Path(self.tmpdir) / "faketool"
+ with patch.multiple(
+ mozpack.pkg,
+ get_app_info_plist=mock_get_app_info_plist,
+ get_apple_template=mock_get_apple_template,
+ save_text_file=noop,
+ create_payload=noop,
+ create_bom=noop,
+ xar_package_folder=noop,
+ ):
+ create_pkg(source, output, fake_tool, fake_tool, fake_tool)
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/test/test_unify.py b/python/mozbuild/mozpack/test/test_unify.py
new file mode 100644
index 0000000000..15de50dccc
--- /dev/null
+++ b/python/mozbuild/mozpack/test/test_unify.py
@@ -0,0 +1,250 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import sys
+from io import StringIO
+
+import mozunit
+
+from mozbuild.util import ensureParentDir
+from mozpack.errors import AccumulatedErrors, ErrorMessage, errors
+from mozpack.files import FileFinder
+from mozpack.mozjar import JarWriter
+from mozpack.test.test_files import MockDest, TestWithTmpDir
+from mozpack.unify import UnifiedBuildFinder, UnifiedFinder
+
+
+class TestUnified(TestWithTmpDir):
+ def create_one(self, which, path, content):
+ file = self.tmppath(os.path.join(which, path))
+ ensureParentDir(file)
+ if isinstance(content, str):
+ content = content.encode("utf-8")
+ open(file, "wb").write(content)
+
+ def create_both(self, path, content):
+ for p in ["a", "b"]:
+ self.create_one(p, path, content)
+
+
+class TestUnifiedFinder(TestUnified):
+ def test_unified_finder(self):
+ self.create_both("foo/bar", "foobar")
+ self.create_both("foo/baz", "foobaz")
+ self.create_one("a", "bar", "bar")
+ self.create_one("b", "baz", "baz")
+ self.create_one("a", "qux", "foobar")
+ self.create_one("b", "qux", "baz")
+ self.create_one("a", "test/foo", "a\nb\nc\n")
+ self.create_one("b", "test/foo", "b\nc\na\n")
+ self.create_both("test/bar", "a\nb\nc\n")
+
+ finder = UnifiedFinder(
+ FileFinder(self.tmppath("a")),
+ FileFinder(self.tmppath("b")),
+ sorted=["test"],
+ )
+ self.assertEqual(
+ sorted(
+ [(f, c.open().read().decode("utf-8")) for f, c in finder.find("foo")]
+ ),
+ [("foo/bar", "foobar"), ("foo/baz", "foobaz")],
+ )
+ self.assertRaises(ErrorMessage, any, finder.find("bar"))
+ self.assertRaises(ErrorMessage, any, finder.find("baz"))
+ self.assertRaises(ErrorMessage, any, finder.find("qux"))
+ self.assertEqual(
+ sorted(
+ [(f, c.open().read().decode("utf-8")) for f, c in finder.find("test")]
+ ),
+ [("test/bar", "a\nb\nc\n"), ("test/foo", "a\nb\nc\n")],
+ )
+
+
+class TestUnifiedBuildFinder(TestUnified):
+ def test_unified_build_finder(self):
+ finder = UnifiedBuildFinder(
+ FileFinder(self.tmppath("a")), FileFinder(self.tmppath("b"))
+ )
+
+ # Test chrome.manifest unification
+ self.create_both("chrome.manifest", "a\nb\nc\n")
+ self.create_one("a", "chrome/chrome.manifest", "a\nb\nc\n")
+ self.create_one("b", "chrome/chrome.manifest", "b\nc\na\n")
+ self.assertEqual(
+ sorted(
+ [
+ (f, c.open().read().decode("utf-8"))
+ for f, c in finder.find("**/chrome.manifest")
+ ]
+ ),
+ [("chrome.manifest", "a\nb\nc\n"), ("chrome/chrome.manifest", "a\nb\nc\n")],
+ )
+
+ # Test buildconfig.html unification
+ self.create_one(
+ "a",
+ "chrome/browser/foo/buildconfig.html",
+ "\n".join(
+ [
+ "<html>",
+ " <body>",
+ " <div>",
+ " <h1>Build Configuration</h1>",
+ " <div>foo</div>",
+ " </div>",
+ " </body>",
+ "</html>",
+ ]
+ ),
+ )
+ self.create_one(
+ "b",
+ "chrome/browser/foo/buildconfig.html",
+ "\n".join(
+ [
+ "<html>",
+ " <body>",
+ " <div>",
+ " <h1>Build Configuration</h1>",
+ " <div>bar</div>",
+ " </div>",
+ " </body>",
+ "</html>",
+ ]
+ ),
+ )
+ self.assertEqual(
+ sorted(
+ [
+ (f, c.open().read().decode("utf-8"))
+ for f, c in finder.find("**/buildconfig.html")
+ ]
+ ),
+ [
+ (
+ "chrome/browser/foo/buildconfig.html",
+ "\n".join(
+ [
+ "<html>",
+ " <body>",
+ " <div>",
+ " <h1>Build Configuration</h1>",
+ " <div>foo</div>",
+ " <hr> </hr>",
+ " <div>bar</div>",
+ " </div>",
+ " </body>",
+ "</html>",
+ ]
+ ),
+ )
+ ],
+ )
+
+ # Test xpi file unification
+ xpi = MockDest()
+ with JarWriter(fileobj=xpi, compress=True) as jar:
+ jar.add("foo", "foo")
+ jar.add("bar", "bar")
+ foo_xpi = xpi.read()
+ self.create_both("foo.xpi", foo_xpi)
+
+ with JarWriter(fileobj=xpi, compress=True) as jar:
+ jar.add("foo", "bar")
+ self.create_one("a", "bar.xpi", foo_xpi)
+ self.create_one("b", "bar.xpi", xpi.read())
+
+ errors.out = StringIO()
+ with self.assertRaises(AccumulatedErrors), errors.accumulate():
+ self.assertEqual(
+ [(f, c.open().read()) for f, c in finder.find("*.xpi")],
+ [("foo.xpi", foo_xpi)],
+ )
+ errors.out = sys.stderr
+
+ # Test install.rdf unification
+ x86_64 = "Darwin_x86_64-gcc3"
+ x86 = "Darwin_x86-gcc3"
+ target_tag = "<{em}targetPlatform>{platform}</{em}targetPlatform>"
+ target_attr = '{em}targetPlatform="{platform}" '
+
+ rdf_tag = "".join(
+ [
+ '<{RDF}Description {em}bar="bar" {em}qux="qux">',
+ "<{em}foo>foo</{em}foo>",
+ "{targets}",
+ "<{em}baz>baz</{em}baz>",
+ "</{RDF}Description>",
+ ]
+ )
+ rdf_attr = "".join(
+ [
+ '<{RDF}Description {em}bar="bar" {attr}{em}qux="qux">',
+ "{targets}",
+ "<{em}foo>foo</{em}foo><{em}baz>baz</{em}baz>",
+ "</{RDF}Description>",
+ ]
+ )
+
+ for descr_ns, target_ns in (("RDF:", ""), ("", "em:"), ("RDF:", "em:")):
+ # First we need to infuse the above strings with our namespaces and
+ # platform values.
+ ns = {"RDF": descr_ns, "em": target_ns}
+ target_tag_x86_64 = target_tag.format(platform=x86_64, **ns)
+ target_tag_x86 = target_tag.format(platform=x86, **ns)
+ target_attr_x86_64 = target_attr.format(platform=x86_64, **ns)
+ target_attr_x86 = target_attr.format(platform=x86, **ns)
+
+ tag_x86_64 = rdf_tag.format(targets=target_tag_x86_64, **ns)
+ tag_x86 = rdf_tag.format(targets=target_tag_x86, **ns)
+ tag_merged = rdf_tag.format(
+ targets=target_tag_x86_64 + target_tag_x86, **ns
+ )
+ tag_empty = rdf_tag.format(targets="", **ns)
+
+ attr_x86_64 = rdf_attr.format(attr=target_attr_x86_64, targets="", **ns)
+ attr_x86 = rdf_attr.format(attr=target_attr_x86, targets="", **ns)
+ attr_merged = rdf_attr.format(
+ attr="", targets=target_tag_x86_64 + target_tag_x86, **ns
+ )
+
+ # This table defines the test cases, columns "a" and "b" being the
+ # contents of the install.rdf of the respective platform and
+ # "result" the exepected merged content after unification.
+ testcases = (
+ # _____a_____ _____b_____ ___result___#
+ (tag_x86_64, tag_x86, tag_merged),
+ (tag_x86_64, tag_empty, tag_empty),
+ (tag_empty, tag_x86, tag_empty),
+ (tag_empty, tag_empty, tag_empty),
+ (attr_x86_64, attr_x86, attr_merged),
+ (tag_x86_64, attr_x86, tag_merged),
+ (attr_x86_64, tag_x86, attr_merged),
+ (attr_x86_64, tag_empty, tag_empty),
+ (tag_empty, attr_x86, tag_empty),
+ )
+
+ # Now create the files from the above table and compare
+ results = []
+ for emid, (rdf_a, rdf_b, result) in enumerate(testcases):
+ filename = "ext/id{0}/install.rdf".format(emid)
+ self.create_one("a", filename, rdf_a)
+ self.create_one("b", filename, rdf_b)
+ results.append((filename, result))
+
+ self.assertEqual(
+ sorted(
+ [
+ (f, c.open().read().decode("utf-8"))
+ for f, c in finder.find("**/install.rdf")
+ ]
+ ),
+ results,
+ )
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozbuild/mozpack/unify.py b/python/mozbuild/mozpack/unify.py
new file mode 100644
index 0000000000..ca4d0017a9
--- /dev/null
+++ b/python/mozbuild/mozpack/unify.py
@@ -0,0 +1,265 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import re
+import struct
+import subprocess
+from collections import OrderedDict
+from tempfile import mkstemp
+
+import buildconfig
+
+import mozpack.path as mozpath
+from mozbuild.util import hexdump
+from mozpack.errors import errors
+from mozpack.executables import MACHO_SIGNATURES
+from mozpack.files import BaseFile, BaseFinder, ExecutableFile, GeneratedFile
+
+# Regular expressions for unifying install.rdf
+FIND_TARGET_PLATFORM = re.compile(
+ r"""
+ <(?P<ns>[-._0-9A-Za-z]+:)?targetPlatform> # The targetPlatform tag, with any namespace
+ (?P<platform>[^<]*) # The actual platform value
+ </(?P=ns)?targetPlatform> # The closing tag
+ """,
+ re.X,
+)
+FIND_TARGET_PLATFORM_ATTR = re.compile(
+ r"""
+ (?P<tag><(?:[-._0-9A-Za-z]+:)?Description) # The opening part of the <Description> tag
+ (?P<attrs>[^>]*?)\s+ # The initial attributes
+ (?P<ns>[-._0-9A-Za-z]+:)?targetPlatform= # The targetPlatform attribute, with any namespace
+ [\'"](?P<platform>[^\'"]+)[\'"] # The actual platform value
+ (?P<otherattrs>[^>]*?>) # The remaining attributes and closing angle bracket
+ """,
+ re.X,
+)
+
+
+def may_unify_binary(file):
+ """
+ Return whether the given BaseFile instance is an ExecutableFile that
+ may be unified. Only non-fat Mach-O binaries are to be unified.
+ """
+ if isinstance(file, ExecutableFile):
+ signature = file.open().read(4)
+ if len(signature) < 4:
+ return False
+ signature = struct.unpack(">L", signature)[0]
+ if signature in MACHO_SIGNATURES:
+ return True
+ return False
+
+
+class UnifiedExecutableFile(BaseFile):
+ """
+ File class for executable and library files that to be unified with 'lipo'.
+ """
+
+ def __init__(self, executable1, executable2):
+ """
+ Initialize a UnifiedExecutableFile with a pair of ExecutableFiles to
+ be unified. They are expected to be non-fat Mach-O executables.
+ """
+ assert isinstance(executable1, ExecutableFile)
+ assert isinstance(executable2, ExecutableFile)
+ self._executables = (executable1, executable2)
+
+ def copy(self, dest, skip_if_older=True):
+ """
+ Create a fat executable from the two Mach-O executable given when
+ creating the instance.
+ skip_if_older is ignored.
+ """
+ assert isinstance(dest, str)
+ tmpfiles = []
+ try:
+ for e in self._executables:
+ fd, f = mkstemp()
+ os.close(fd)
+ tmpfiles.append(f)
+ e.copy(f, skip_if_older=False)
+ lipo = buildconfig.substs.get("LIPO") or "lipo"
+ subprocess.check_call([lipo, "-create"] + tmpfiles + ["-output", dest])
+ except Exception as e:
+ errors.error(
+ "Failed to unify %s and %s: %s"
+ % (self._executables[0].path, self._executables[1].path, str(e))
+ )
+ finally:
+ for f in tmpfiles:
+ os.unlink(f)
+
+
+class UnifiedFinder(BaseFinder):
+ """
+ Helper to get unified BaseFile instances from two distinct trees on the
+ file system.
+ """
+
+ def __init__(self, finder1, finder2, sorted=[], **kargs):
+ """
+ Initialize a UnifiedFinder. finder1 and finder2 are BaseFinder
+ instances from which files are picked. UnifiedFinder.find() will act as
+ FileFinder.find() but will error out when matches can only be found in
+ one of the two trees and not the other. It will also error out if
+ matches can be found on both ends but their contents are not identical.
+
+ The sorted argument gives a list of mozpath.match patterns. File
+ paths matching one of these patterns will have their contents compared
+ with their lines sorted.
+ """
+ assert isinstance(finder1, BaseFinder)
+ assert isinstance(finder2, BaseFinder)
+ self._finder1 = finder1
+ self._finder2 = finder2
+ self._sorted = sorted
+ BaseFinder.__init__(self, finder1.base, **kargs)
+
+ def _find(self, path):
+ """
+ UnifiedFinder.find() implementation.
+ """
+ # There is no `OrderedSet`. Operator `|` was added only in
+ # Python 3.9, so we merge by hand.
+ all_paths = OrderedDict()
+
+ files1 = OrderedDict()
+ for p, f in self._finder1.find(path):
+ files1[p] = f
+ all_paths[p] = True
+ files2 = OrderedDict()
+ for p, f in self._finder2.find(path):
+ files2[p] = f
+ all_paths[p] = True
+
+ for p in all_paths:
+ err = errors.count
+ unified = self.unify_file(p, files1.get(p), files2.get(p))
+ if unified:
+ yield p, unified
+ elif err == errors.count: # No errors have already been reported.
+ self._report_difference(p, files1.get(p), files2.get(p))
+
+ def _report_difference(self, path, file1, file2):
+ """
+ Report differences between files in both trees.
+ """
+ if not file1:
+ errors.error("File missing in %s: %s" % (self._finder1.base, path))
+ return
+ if not file2:
+ errors.error("File missing in %s: %s" % (self._finder2.base, path))
+ return
+
+ errors.error(
+ "Can't unify %s: file differs between %s and %s"
+ % (path, self._finder1.base, self._finder2.base)
+ )
+ if not isinstance(file1, ExecutableFile) and not isinstance(
+ file2, ExecutableFile
+ ):
+ from difflib import unified_diff
+
+ try:
+ lines1 = [l.decode("utf-8") for l in file1.open().readlines()]
+ lines2 = [l.decode("utf-8") for l in file2.open().readlines()]
+ except UnicodeDecodeError:
+ lines1 = hexdump(file1.open().read())
+ lines2 = hexdump(file2.open().read())
+
+ for line in unified_diff(
+ lines1,
+ lines2,
+ os.path.join(self._finder1.base, path),
+ os.path.join(self._finder2.base, path),
+ ):
+ errors.out.write(line)
+
+ def unify_file(self, path, file1, file2):
+ """
+ Given two BaseFiles and the path they were found at, return a
+ unified version of the files. If the files match, the first BaseFile
+ may be returned.
+ If the files don't match or one of them is `None`, the method returns
+ `None`.
+ Subclasses may decide to unify by using one of the files in that case.
+ """
+ if not file1 or not file2:
+ return None
+
+ if may_unify_binary(file1) and may_unify_binary(file2):
+ return UnifiedExecutableFile(file1, file2)
+
+ content1 = file1.open().readlines()
+ content2 = file2.open().readlines()
+ if content1 == content2:
+ return file1
+ for pattern in self._sorted:
+ if mozpath.match(path, pattern):
+ if sorted(content1) == sorted(content2):
+ return file1
+ break
+ return None
+
+
+class UnifiedBuildFinder(UnifiedFinder):
+ """
+ Specialized UnifiedFinder for Mozilla applications packaging. It allows
+ ``*.manifest`` files to differ in their order, and unifies ``buildconfig.html``
+ files by merging their content.
+ """
+
+ def __init__(self, finder1, finder2, **kargs):
+ UnifiedFinder.__init__(
+ self, finder1, finder2, sorted=["**/*.manifest"], **kargs
+ )
+
+ def unify_file(self, path, file1, file2):
+ """
+ Unify files taking Mozilla application special cases into account.
+ Otherwise defer to UnifiedFinder.unify_file.
+ """
+ basename = mozpath.basename(path)
+ if file1 and file2 and basename == "buildconfig.html":
+ content1 = file1.open().readlines()
+ content2 = file2.open().readlines()
+ # Copy everything from the first file up to the end of its <div>,
+ # insert a <hr> between the two files and copy the second file's
+ # content beginning after its leading <h1>.
+ return GeneratedFile(
+ b"".join(
+ content1[: content1.index(b" </div>\n")]
+ + [b" <hr> </hr>\n"]
+ + content2[
+ content2.index(b" <h1>Build Configuration</h1>\n") + 1 :
+ ]
+ )
+ )
+ elif file1 and file2 and basename == "install.rdf":
+ # install.rdf files often have em:targetPlatform (either as
+ # attribute or as tag) that will differ between platforms. The
+ # unified install.rdf should contain both em:targetPlatforms if
+ # they exist, or strip them if only one file has a target platform.
+ content1, content2 = (
+ FIND_TARGET_PLATFORM_ATTR.sub(
+ lambda m: m.group("tag")
+ + m.group("attrs")
+ + m.group("otherattrs")
+ + "<%stargetPlatform>%s</%stargetPlatform>"
+ % (m.group("ns") or "", m.group("platform"), m.group("ns") or ""),
+ f.open().read().decode("utf-8"),
+ )
+ for f in (file1, file2)
+ )
+
+ platform2 = FIND_TARGET_PLATFORM.search(content2)
+ return GeneratedFile(
+ FIND_TARGET_PLATFORM.sub(
+ lambda m: m.group(0) + platform2.group(0) if platform2 else "",
+ content1,
+ )
+ )
+ return UnifiedFinder.unify_file(self, path, file1, file2)
diff --git a/python/mozbuild/setup.py b/python/mozbuild/setup.py
new file mode 100644
index 0000000000..30785493b0
--- /dev/null
+++ b/python/mozbuild/setup.py
@@ -0,0 +1,29 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from setuptools import find_packages, setup
+
+VERSION = "0.2"
+
+setup(
+ author="Mozilla Foundation",
+ author_email="dev-builds@lists.mozilla.org",
+ name="mozbuild",
+ description="Mozilla build system functionality.",
+ license="MPL 2.0",
+ packages=find_packages(),
+ version=VERSION,
+ install_requires=[
+ "jsmin",
+ "mozfile",
+ ],
+ classifiers=[
+ "Development Status :: 3 - Alpha",
+ "Topic :: Software Development :: Build Tools",
+ "License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)",
+ "Programming Language :: Python :: 2.7",
+ "Programming Language :: Python :: Implementation :: CPython",
+ ],
+ keywords="mozilla build",
+)
diff --git a/python/mozlint/.ruff.toml b/python/mozlint/.ruff.toml
new file mode 100644
index 0000000000..11e713da73
--- /dev/null
+++ b/python/mozlint/.ruff.toml
@@ -0,0 +1,4 @@
+extend = "../../pyproject.toml"
+
+[isort]
+known-first-party = ["mozlint"]
diff --git a/python/mozlint/mozlint/__init__.py b/python/mozlint/mozlint/__init__.py
new file mode 100644
index 0000000000..bcab4a48b1
--- /dev/null
+++ b/python/mozlint/mozlint/__init__.py
@@ -0,0 +1,7 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+# flake8: noqa
+
+from .result import Issue
+from .roller import LintRoller
diff --git a/python/mozlint/mozlint/cli.py b/python/mozlint/mozlint/cli.py
new file mode 100644
index 0000000000..0262173367
--- /dev/null
+++ b/python/mozlint/mozlint/cli.py
@@ -0,0 +1,445 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import sys
+from argparse import REMAINDER, SUPPRESS, ArgumentParser
+from pathlib import Path
+
+from mozlint.errors import NoValidLinter
+from mozlint.formatters import all_formatters
+
+
+class MozlintParser(ArgumentParser):
+ arguments = [
+ [
+ ["paths"],
+ {
+ "nargs": "*",
+ "default": None,
+ "help": "Paths to file or directories to lint, like "
+ "'browser/components/loop' or 'mobile/android'. "
+ "If not provided, defaults to the files changed according "
+ "to --outgoing and --workdir.",
+ },
+ ],
+ [
+ ["-l", "--linter"],
+ {
+ "dest": "linters",
+ "default": [],
+ "action": "append",
+ "help": "Linters to run, e.g 'eslint'. By default all linters "
+ "are run for all the appropriate files.",
+ },
+ ],
+ [
+ ["--list"],
+ {
+ "dest": "list_linters",
+ "default": False,
+ "action": "store_true",
+ "help": "List all available linters and exit.",
+ },
+ ],
+ [
+ ["-W", "--warnings"],
+ {
+ "const": True,
+ "nargs": "?",
+ "choices": ["soft"],
+ "dest": "show_warnings",
+ "help": "Display and fail on warnings in addition to errors. "
+ "--warnings=soft can be used to report warnings but only fail "
+ "on errors.",
+ },
+ ],
+ [
+ ["-v", "--verbose"],
+ {
+ "dest": "show_verbose",
+ "default": False,
+ "action": "store_true",
+ "help": "Enable verbose logging.",
+ },
+ ],
+ [
+ ["-f", "--format"],
+ {
+ "dest": "formats",
+ "action": "append",
+ "help": "Formatter to use. Defaults to 'stylish' on stdout. "
+ "You can specify an optional path as --format formatter:path "
+ "that will be used instead of stdout. "
+ "You can also use multiple formatters at the same time. "
+ "Formatters available: {}.".format(", ".join(all_formatters.keys())),
+ },
+ ],
+ [
+ ["-n", "--no-filter"],
+ {
+ "dest": "use_filters",
+ "default": True,
+ "action": "store_false",
+ "help": "Ignore all filtering. This is useful for quickly "
+ "testing a directory that otherwise wouldn't be run, "
+ "without needing to modify the config file.",
+ },
+ ],
+ [
+ ["--include-third-party"],
+ {
+ "dest": "include_third-party",
+ "default": False,
+ "action": "store_true",
+ "help": "Also run the linter(s) on third-party code",
+ },
+ ],
+ [
+ ["-o", "--outgoing"],
+ {
+ "const": True,
+ "nargs": "?",
+ "help": "Lint files touched by commits that are not on the remote repository. "
+ "Without arguments, finds the default remote that would be pushed to. "
+ "The remote branch can also be specified manually. Works with "
+ "mercurial or git.",
+ },
+ ],
+ [
+ ["-w", "--workdir"],
+ {
+ "const": "all",
+ "nargs": "?",
+ "choices": ["staged", "all"],
+ "help": "Lint files touched by changes in the working directory "
+ "(i.e haven't been committed yet). On git, --workdir=staged "
+ "can be used to only consider staged files. Works with "
+ "mercurial or git.",
+ },
+ ],
+ [
+ ["-r", "--rev"],
+ {
+ "default": None,
+ "type": str,
+ "help": "Lint files touched by changes in revisions described by REV. "
+ "For mercurial, it may be any revset. For git, it is a single tree-ish.",
+ },
+ ],
+ [
+ ["--fix"],
+ {
+ "action": "store_true",
+ "default": False,
+ "help": "Fix lint errors if possible. Any errors that could not be fixed "
+ "will be printed as normal.",
+ },
+ ],
+ [
+ ["--edit"],
+ {
+ "action": "store_true",
+ "default": False,
+ "help": "Each file containing lint errors will be opened in $EDITOR one after "
+ "the other.",
+ },
+ ],
+ [
+ ["--setup"],
+ {
+ "action": "store_true",
+ "default": False,
+ "help": "Bootstrap linter dependencies without running any of the linters.",
+ },
+ ],
+ [
+ ["-j", "--jobs"],
+ {
+ "default": None,
+ "dest": "num_procs",
+ "type": int,
+ "help": "Number of worker processes to spawn when running linters. "
+ "Defaults to the number of cores in your CPU.",
+ },
+ ],
+ # Paths to check for linter configurations.
+ # Default: tools/lint set in tools/lint/mach_commands.py
+ [
+ ["--config-path"],
+ {
+ "action": "append",
+ "default": [],
+ "dest": "config_paths",
+ "help": SUPPRESS,
+ },
+ ],
+ [
+ ["--check-exclude-list"],
+ {
+ "dest": "check_exclude_list",
+ "default": False,
+ "action": "store_true",
+ "help": "Run linters for all the paths in the exclude list.",
+ },
+ ],
+ [
+ ["extra_args"],
+ {
+ "nargs": REMAINDER,
+ "help": "Extra arguments that will be forwarded to the underlying linter.",
+ },
+ ],
+ ]
+
+ def __init__(self, **kwargs):
+ ArgumentParser.__init__(self, usage=self.__doc__, **kwargs)
+
+ for cli, args in self.arguments:
+ self.add_argument(*cli, **args)
+
+ def parse_known_args(self, *args, **kwargs):
+ # Allow '-wo' or '-ow' as shorthand for both --workdir and --outgoing.
+ for token in ("-wo", "-ow"):
+ if token in args[0]:
+ i = args[0].index(token)
+ args[0].pop(i)
+ args[0][i:i] = [token[:2], "-" + token[2]]
+
+ # This is here so the eslint mach command doesn't lose 'extra_args'
+ # when using mach's dispatch functionality.
+ args, extra = ArgumentParser.parse_known_args(self, *args, **kwargs)
+ args.extra_args = extra
+
+ self.validate(args)
+ return args, extra
+
+ def validate(self, args):
+ if args.edit and not os.environ.get("EDITOR"):
+ self.error("must set the $EDITOR environment variable to use --edit")
+
+ if args.paths:
+ invalid = [p for p in args.paths if not os.path.exists(p)]
+ if invalid:
+ self.error(
+ "the following paths do not exist:\n{}".format("\n".join(invalid))
+ )
+
+ if args.formats:
+ formats = []
+ for fmt in args.formats:
+ if isinstance(fmt, tuple): # format is already processed
+ formats.append(fmt)
+ continue
+
+ path = None
+ if ":" in fmt:
+ # Detect optional formatter path
+ pos = fmt.index(":")
+ fmt, path = fmt[:pos], os.path.realpath(fmt[pos + 1 :])
+
+ # Check path is writable
+ fmt_dir = os.path.dirname(path)
+ if not os.access(fmt_dir, os.W_OK | os.X_OK):
+ self.error(
+ "the following directory is not writable: {}".format(
+ fmt_dir
+ )
+ )
+
+ if fmt not in all_formatters.keys():
+ self.error(
+ "the following formatter is not available: {}".format(fmt)
+ )
+
+ formats.append((fmt, path))
+ args.formats = formats
+ else:
+ # Can't use argparse default or this choice will be always present
+ args.formats = [("stylish", None)]
+
+
+def find_linters(config_paths, linters=None):
+ lints = {}
+ for search_path in config_paths:
+ if not os.path.isdir(search_path):
+ continue
+
+ sys.path.insert(0, search_path)
+ files = os.listdir(search_path)
+ for f in files:
+ name = os.path.basename(f)
+
+ if not name.endswith(".yml"):
+ continue
+
+ name = name.rsplit(".", 1)[0]
+
+ if linters and name not in linters:
+ continue
+
+ lints[name] = os.path.join(search_path, f)
+
+ linters_not_found = list(set(linters).difference(set(lints.keys())))
+ return {"lint_paths": lints.values(), "linters_not_found": linters_not_found}
+
+
+def get_exclude_list_output(result, paths):
+ # Store the paths of all the subdirectories leading to the error files
+ error_file_paths = set()
+ for issues in result.issues.values():
+ error_file = issues[0].relpath
+ error_file_paths.add(error_file)
+ parent_dir = os.path.dirname(error_file)
+ while parent_dir:
+ error_file_paths.add(parent_dir)
+ parent_dir = os.path.dirname(parent_dir)
+
+ paths = [os.path.dirname(path) if path[-1] == "/" else path for path in paths]
+ # Remove all the error paths to get the list of green paths
+ green_paths = sorted(set(paths).difference(error_file_paths))
+
+ if green_paths:
+ out = (
+ "The following list of paths are now green "
+ "and can be removed from the exclude list:\n\n"
+ )
+ out += "\n".join(green_paths)
+
+ else:
+ out = "No path in the exclude list is green."
+
+ return out
+
+
+def run(
+ paths,
+ linters,
+ formats,
+ outgoing,
+ workdir,
+ rev,
+ edit,
+ check_exclude_list,
+ setup=False,
+ list_linters=False,
+ num_procs=None,
+ virtualenv_manager=None,
+ setupargs=None,
+ **lintargs
+):
+ from mozlint import LintRoller, formatters
+ from mozlint.editor import edit_issues
+
+ lintargs["config_paths"] = [
+ os.path.join(lintargs["root"], p) for p in lintargs["config_paths"]
+ ]
+
+ # Always perform exhaustive linting for exclude list paths
+ lintargs["use_filters"] = lintargs["use_filters"] and not check_exclude_list
+
+ if list_linters:
+ lint_paths = find_linters(lintargs["config_paths"], linters)
+ linters = [
+ os.path.splitext(os.path.basename(l))[0] for l in lint_paths["lint_paths"]
+ ]
+ print("\n".join(sorted(linters)))
+ print(
+ "\nNote that clang-tidy checks are not run as part of this "
+ "command, but using the static-analysis command."
+ )
+ return 0
+
+ lint = LintRoller(setupargs=setupargs or {}, **lintargs)
+ linters_info = find_linters(lintargs["config_paths"], linters)
+
+ result = None
+
+ try:
+
+ lint.read(linters_info["lint_paths"])
+
+ if check_exclude_list:
+ if len(lint.linters) > 1:
+ print("error: specify a single linter to check with `-l/--linter`")
+ return 1
+ paths = lint.linters[0]["local_exclude"]
+
+ if (
+ not paths
+ and Path.cwd() == Path(lint.root)
+ and not (outgoing or workdir or rev)
+ ):
+ print(
+ "warning: linting the entire repo takes a long time, using --outgoing and "
+ "--workdir instead. If you want to lint the entire repo, run `./mach lint .`"
+ )
+ # Setting the default values
+ outgoing = True
+ workdir = "all"
+
+ # Always run bootstrapping, but return early if --setup was passed in.
+ ret = lint.setup(virtualenv_manager=virtualenv_manager)
+ if setup:
+ return ret
+
+ if linters_info["linters_not_found"] != []:
+ raise NoValidLinter
+
+ # run all linters
+ result = lint.roll(
+ paths, outgoing=outgoing, workdir=workdir, rev=rev, num_procs=num_procs
+ )
+ except NoValidLinter as e:
+ result = lint.result
+ print(str(e))
+
+ if edit and result.issues:
+ edit_issues(result)
+ result = lint.roll(result.issues.keys(), num_procs=num_procs)
+
+ for every in linters_info["linters_not_found"]:
+ result.failed_setup.add(every)
+
+ if check_exclude_list:
+ # Get and display all those paths in the exclude list which are
+ # now green and can be safely removed from the list
+ out = get_exclude_list_output(result, paths)
+ print(out, file=sys.stdout)
+ return result.returncode
+
+ for formatter_name, path in formats:
+ formatter = formatters.get(formatter_name)
+
+ out = formatter(result)
+ # We do this only for `json` that is mostly used in automation
+ if not out and formatter_name == "json":
+ out = "{}"
+
+ if out:
+ fh = open(path, "w") if path else sys.stdout
+
+ if not path and fh.encoding == "ascii":
+ # If sys.stdout.encoding is ascii, printing output will fail
+ # due to the stylish formatter's use of unicode characters.
+ # Ideally the user should fix their environment by setting
+ # `LC_ALL=C.UTF-8` or similar. But this is a common enough
+ # problem that we help them out a little here by manually
+ # encoding and writing to the stdout buffer directly.
+ out += "\n"
+ fh.buffer.write(out.encode("utf-8", errors="replace"))
+ fh.buffer.flush()
+ else:
+ print(out, file=fh)
+
+ if path:
+ fh.close()
+
+ return result.returncode
+
+
+if __name__ == "__main__":
+ parser = MozlintParser()
+ args = vars(parser.parse_args())
+ sys.exit(run(**args))
diff --git a/python/mozlint/mozlint/editor.py b/python/mozlint/mozlint/editor.py
new file mode 100644
index 0000000000..1738892f93
--- /dev/null
+++ b/python/mozlint/mozlint/editor.py
@@ -0,0 +1,57 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import subprocess
+import tempfile
+
+from mozlint import formatters
+
+
+def get_editor():
+ return os.environ.get("EDITOR")
+
+
+def edit_issues(result):
+ if not result.issues:
+ return
+
+ editor = get_editor()
+ if not editor:
+ print("warning: could not find a default editor")
+ return
+
+ name = os.path.basename(editor)
+ if name in ("vim", "nvim", "gvim"):
+ cmd = [
+ editor,
+ # need errorformat to match both Error and Warning, with or without a column
+ "--cmd",
+ "set errorformat+=%f:\\ line\\ %l\\\\,\\ col\\ %c\\\\,\\ %trror\\ -\\ %m",
+ "--cmd",
+ "set errorformat+=%f:\\ line\\ %l\\\\,\\ col\\ %c\\\\,\\ %tarning\\ -\\ %m",
+ "--cmd",
+ "set errorformat+=%f:\\ line\\ %l\\\\,\\ %trror\\ -\\ %m",
+ "--cmd",
+ "set errorformat+=%f:\\ line\\ %l\\\\,\\ %tarning\\ -\\ %m",
+ # start with quickfix window opened
+ "-c",
+ "copen",
+ # running with -q seems to open an empty buffer in addition to the
+ # first file, this removes that empty buffer
+ "-c",
+ "1bd",
+ ]
+
+ with tempfile.NamedTemporaryFile(mode="w") as fh:
+ s = formatters.get("compact", summary=False)(result)
+ fh.write(s)
+ fh.flush()
+
+ cmd.extend(["-q", fh.name])
+ subprocess.call(cmd)
+
+ else:
+ for path, errors in result.issues.items():
+ subprocess.call([editor, path])
diff --git a/python/mozlint/mozlint/errors.py b/python/mozlint/mozlint/errors.py
new file mode 100644
index 0000000000..4b36f00f69
--- /dev/null
+++ b/python/mozlint/mozlint/errors.py
@@ -0,0 +1,33 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+class LintException(Exception):
+ pass
+
+
+class LinterNotFound(LintException):
+ def __init__(self, path):
+ LintException.__init__(self, "Could not find lint file '{}'".format(path))
+
+
+class NoValidLinter(LintException):
+ def __init__(self):
+ LintException.__init__(
+ self,
+ "Invalid linters given, run again using valid linters or no linters",
+ )
+
+
+class LinterParseError(LintException):
+ def __init__(self, path, message):
+ LintException.__init__(self, "{}: {}".format(path, message))
+
+
+class LintersNotConfigured(LintException):
+ def __init__(self):
+ LintException.__init__(
+ self,
+ "No linters registered! Use `LintRoller.read` " "to register a linter.",
+ )
diff --git a/python/mozlint/mozlint/formatters/__init__.py b/python/mozlint/mozlint/formatters/__init__.py
new file mode 100644
index 0000000000..e50616216f
--- /dev/null
+++ b/python/mozlint/mozlint/formatters/__init__.py
@@ -0,0 +1,31 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import json
+
+from ..result import IssueEncoder
+from .compact import CompactFormatter
+from .stylish import StylishFormatter
+from .summary import SummaryFormatter
+from .treeherder import TreeherderFormatter
+from .unix import UnixFormatter
+
+
+class JSONFormatter(object):
+ def __call__(self, result):
+ return json.dumps(result.issues, cls=IssueEncoder)
+
+
+all_formatters = {
+ "compact": CompactFormatter,
+ "json": JSONFormatter,
+ "stylish": StylishFormatter,
+ "summary": SummaryFormatter,
+ "treeherder": TreeherderFormatter,
+ "unix": UnixFormatter,
+}
+
+
+def get(name, **fmtargs):
+ return all_formatters[name](**fmtargs)
diff --git a/python/mozlint/mozlint/formatters/compact.py b/python/mozlint/mozlint/formatters/compact.py
new file mode 100644
index 0000000000..54ee194215
--- /dev/null
+++ b/python/mozlint/mozlint/formatters/compact.py
@@ -0,0 +1,41 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import attr
+
+from ..result import Issue
+
+
+class CompactFormatter(object):
+ """Formatter for compact output.
+
+ This formatter prints one error per line, mimicking the
+ eslint 'compact' formatter.
+ """
+
+ # If modifying this format, please also update the vim errorformats in editor.py
+ fmt = "{path}: line {lineno}{column}, {level} - {message} ({rule})"
+
+ def __init__(self, summary=True):
+ self.summary = summary
+
+ def __call__(self, result):
+ message = []
+ num_problems = 0
+ for path, errors in sorted(result.issues.items()):
+ num_problems += len(errors)
+ for err in errors:
+ assert isinstance(err, Issue)
+
+ d = attr.asdict(err)
+ d["column"] = ", col %s" % d["column"] if d["column"] else ""
+ d["level"] = d["level"].capitalize()
+ d["rule"] = d["rule"] or d["linter"]
+ message.append(self.fmt.format(**d))
+
+ if self.summary and num_problems:
+ message.append(
+ "\n{} problem{}".format(num_problems, "" if num_problems == 1 else "s")
+ )
+ return "\n".join(message)
diff --git a/python/mozlint/mozlint/formatters/stylish.py b/python/mozlint/mozlint/formatters/stylish.py
new file mode 100644
index 0000000000..3f80bc7ad2
--- /dev/null
+++ b/python/mozlint/mozlint/formatters/stylish.py
@@ -0,0 +1,156 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from mozterm import Terminal
+
+from ..result import Issue
+from ..util.string import pluralize
+
+
+class StylishFormatter(object):
+ """Formatter based on the eslint default."""
+
+ _indent_ = " "
+
+ # Colors later on in the list are fallbacks in case the terminal
+ # doesn't support colors earlier in the list.
+ # See http://www.calmar.ws/vim/256-xterm-24bit-rgb-color-chart.html
+ _colors = {
+ "grey": [247, 8, 7],
+ "red": [1],
+ "green": [2],
+ "yellow": [3],
+ "brightred": [9, 1],
+ "brightyellow": [11, 3],
+ }
+
+ fmt = """
+ {c1}{lineno}{column} {c2}{level}{normal} {message} {c1}{rule}({linter}){normal}
+{diff}""".lstrip(
+ "\n"
+ )
+ fmt_summary = (
+ "{t.bold}{c}\u2716 {problem} ({error}, {warning}{failure}, {fixed}){t.normal}"
+ )
+
+ def __init__(self, disable_colors=False):
+ self.term = Terminal(disable_styling=disable_colors)
+ self.num_colors = self.term.number_of_colors
+
+ def color(self, color):
+ for num in self._colors[color]:
+ if num < self.num_colors:
+ return self.term.color(num)
+ return ""
+
+ def _reset_max(self):
+ self.max_lineno = 0
+ self.max_column = 0
+ self.max_level = 0
+ self.max_message = 0
+
+ def _update_max(self, err):
+ """Calculates the longest length of each token for spacing."""
+ self.max_lineno = max(self.max_lineno, len(str(err.lineno)))
+ if err.column:
+ self.max_column = max(self.max_column, len(str(err.column)))
+ self.max_level = max(self.max_level, len(str(err.level)))
+ self.max_message = max(self.max_message, len(err.message))
+
+ def _get_colored_diff(self, diff):
+ if not diff:
+ return ""
+
+ new_diff = ""
+ for line in diff.split("\n"):
+ if line.startswith("+"):
+ new_diff += self.color("green")
+ elif line.startswith("-"):
+ new_diff += self.color("red")
+ else:
+ new_diff += self.term.normal
+ new_diff += self._indent_ + line + "\n"
+ return new_diff
+
+ def __call__(self, result):
+ message = []
+ failed = result.failed
+
+ num_errors = 0
+ num_warnings = 0
+ num_fixed = result.fixed
+ for path, errors in sorted(result.issues.items()):
+ self._reset_max()
+
+ message.append(self.term.underline(path))
+ # Do a first pass to calculate required padding
+ for err in errors:
+ assert isinstance(err, Issue)
+ self._update_max(err)
+ if err.level == "error":
+ num_errors += 1
+ else:
+ num_warnings += 1
+
+ for err in sorted(
+ errors, key=lambda e: (int(e.lineno), int(e.column or 0))
+ ):
+ if err.column:
+ col = ":" + str(err.column).ljust(self.max_column)
+ else:
+ col = "".ljust(self.max_column + 1)
+
+ args = {
+ "normal": self.term.normal,
+ "c1": self.color("grey"),
+ "c2": self.color("red")
+ if err.level == "error"
+ else self.color("yellow"),
+ "lineno": str(err.lineno).rjust(self.max_lineno),
+ "column": col,
+ "level": err.level.ljust(self.max_level),
+ "rule": "{} ".format(err.rule) if err.rule else "",
+ "linter": err.linter.lower(),
+ "message": err.message.ljust(self.max_message),
+ "diff": self._get_colored_diff(err.diff).ljust(self.max_message),
+ }
+ message.append(self.fmt.format(**args).rstrip().rstrip("\n"))
+
+ message.append("") # newline
+
+ # If there were failures, make it clear which linters failed
+ for fail in failed:
+ message.append(
+ "{c}A failure occurred in the {name} linter.".format(
+ c=self.color("brightred"), name=fail
+ )
+ )
+
+ # Print a summary
+ message.append(
+ self.fmt_summary.format(
+ t=self.term,
+ c=self.color("brightred")
+ if num_errors or failed
+ else self.color("brightyellow"),
+ problem=pluralize("problem", num_errors + num_warnings + len(failed)),
+ error=pluralize("error", num_errors),
+ warning=pluralize(
+ "warning", num_warnings or result.total_suppressed_warnings
+ ),
+ failure=", {}".format(pluralize("failure", len(failed)))
+ if failed
+ else "",
+ fixed="{} fixed".format(num_fixed),
+ )
+ )
+
+ if result.total_suppressed_warnings > 0 and num_errors == 0:
+ message.append(
+ "(pass {c1}-W/--warnings{c2} to see warnings.)".format(
+ c1=self.color("grey"), c2=self.term.normal
+ )
+ )
+
+ return "\n".join(message)
diff --git a/python/mozlint/mozlint/formatters/summary.py b/python/mozlint/mozlint/formatters/summary.py
new file mode 100644
index 0000000000..e6ecf37508
--- /dev/null
+++ b/python/mozlint/mozlint/formatters/summary.py
@@ -0,0 +1,50 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+from collections import defaultdict
+
+import mozpack.path as mozpath
+
+from ..util.string import pluralize
+
+
+class SummaryFormatter(object):
+ def __init__(self, depth=None):
+ self.depth = depth or int(os.environ.get("MOZLINT_SUMMARY_DEPTH", 1))
+
+ def __call__(self, result):
+ paths = set(
+ list(result.issues.keys()) + list(result.suppressed_warnings.keys())
+ )
+
+ commonprefix = mozpath.commonprefix([mozpath.abspath(p) for p in paths])
+ commonprefix = commonprefix.rsplit("/", 1)[0] + "/"
+
+ summary = defaultdict(lambda: [0, 0])
+ for path in paths:
+ abspath = mozpath.abspath(path)
+ assert abspath.startswith(commonprefix)
+
+ if abspath != commonprefix:
+ parts = mozpath.split(mozpath.relpath(abspath, commonprefix))[
+ : self.depth
+ ]
+ abspath = mozpath.join(commonprefix, *parts)
+
+ summary[abspath][0] += len(
+ [r for r in result.issues[path] if r.level == "error"]
+ )
+ summary[abspath][1] += len(
+ [r for r in result.issues[path] if r.level == "warning"]
+ )
+ summary[abspath][1] += result.suppressed_warnings[path]
+
+ msg = []
+ for path, (errors, warnings) in sorted(summary.items()):
+ warning_str = (
+ ", {}".format(pluralize("warning", warnings)) if warnings else ""
+ )
+ msg.append("{}: {}{}".format(path, pluralize("error", errors), warning_str))
+ return "\n".join(msg)
diff --git a/python/mozlint/mozlint/formatters/treeherder.py b/python/mozlint/mozlint/formatters/treeherder.py
new file mode 100644
index 0000000000..66c7c59eee
--- /dev/null
+++ b/python/mozlint/mozlint/formatters/treeherder.py
@@ -0,0 +1,34 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import attr
+
+from ..result import Issue
+
+
+class TreeherderFormatter(object):
+ """Formatter for treeherder friendly output.
+
+ This formatter looks ugly, but prints output such that
+ treeherder is able to highlight the errors and warnings.
+ This is a stop-gap until bug 1276486 is fixed.
+ """
+
+ fmt = "TEST-UNEXPECTED-{level} | {path}:{lineno}{column} | {message} ({rule})"
+
+ def __call__(self, result):
+ message = []
+ for path, errors in sorted(result.issues.items()):
+ for err in errors:
+ assert isinstance(err, Issue)
+
+ d = attr.asdict(err)
+ d["column"] = ":%s" % d["column"] if d["column"] else ""
+ d["level"] = d["level"].upper()
+ d["rule"] = d["rule"] or d["linter"]
+ message.append(self.fmt.format(**d))
+
+ if not message:
+ message.append("No lint issues found.")
+ return "\n".join(message)
diff --git a/python/mozlint/mozlint/formatters/unix.py b/python/mozlint/mozlint/formatters/unix.py
new file mode 100644
index 0000000000..ae096f3e2e
--- /dev/null
+++ b/python/mozlint/mozlint/formatters/unix.py
@@ -0,0 +1,33 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import attr
+
+from ..result import Issue
+
+
+class UnixFormatter(object):
+ """Formatter that respects Unix output conventions frequently
+ employed by preprocessors and compilers. The format is
+ `<FILENAME>:<LINE>[:<COL>]: <RULE> <LEVEL>: <MESSAGE>`.
+
+ """
+
+ fmt = "{path}:{lineno}:{column} {rule} {level}: {message}"
+
+ def __call__(self, result):
+ msg = []
+
+ for path, errors in sorted(result.issues.items()):
+ for err in errors:
+ assert isinstance(err, Issue)
+
+ slots = attr.asdict(err)
+ slots["path"] = slots["relpath"]
+ slots["column"] = "%d:" % slots["column"] if slots["column"] else ""
+ slots["rule"] = slots["rule"] or slots["linter"]
+
+ msg.append(self.fmt.format(**slots))
+
+ return "\n".join(msg)
diff --git a/python/mozlint/mozlint/parser.py b/python/mozlint/mozlint/parser.py
new file mode 100644
index 0000000000..eac502495b
--- /dev/null
+++ b/python/mozlint/mozlint/parser.py
@@ -0,0 +1,130 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+
+import yaml
+
+from .errors import LinterNotFound, LinterParseError
+from .types import supported_types
+
+GLOBAL_SUPPORT_FILES = []
+
+
+class Parser(object):
+ """Reads and validates lint configuration files."""
+
+ required_attributes = (
+ "name",
+ "description",
+ "type",
+ "payload",
+ )
+
+ def __init__(self, root):
+ self.root = root
+
+ def __call__(self, path):
+ return self.parse(path)
+
+ def _validate(self, linter):
+ relpath = os.path.relpath(linter["path"], self.root)
+
+ missing_attrs = []
+ for attr in self.required_attributes:
+ if attr not in linter:
+ missing_attrs.append(attr)
+
+ if missing_attrs:
+ raise LinterParseError(
+ relpath,
+ "Missing required attribute(s): " "{}".format(",".join(missing_attrs)),
+ )
+
+ if linter["type"] not in supported_types:
+ raise LinterParseError(relpath, "Invalid type '{}'".format(linter["type"]))
+
+ for attr in ("include", "exclude", "support-files"):
+ if attr not in linter:
+ continue
+
+ if not isinstance(linter[attr], list) or not all(
+ isinstance(a, str) for a in linter[attr]
+ ):
+ raise LinterParseError(
+ relpath,
+ "The {} directive must be a " "list of strings!".format(attr),
+ )
+ invalid_paths = set()
+ for path in linter[attr]:
+ if "*" in path:
+ if attr == "include":
+ raise LinterParseError(
+ relpath,
+ "Paths in the include directive cannot "
+ "contain globs:\n {}".format(path),
+ )
+ continue
+
+ abspath = path
+ if not os.path.isabs(abspath):
+ abspath = os.path.join(self.root, path)
+
+ if not os.path.exists(abspath):
+ invalid_paths.add(" " + path)
+
+ if invalid_paths:
+ raise LinterParseError(
+ relpath,
+ "The {} directive contains the following "
+ "paths that don't exist:\n{}".format(
+ attr, "\n".join(sorted(invalid_paths))
+ ),
+ )
+
+ if "setup" in linter:
+ if linter["setup"].count(":") != 1:
+ raise LinterParseError(
+ relpath,
+ "The setup attribute '{!r}' must have the "
+ "form 'module:object'".format(linter["setup"]),
+ )
+
+ if "extensions" in linter:
+ linter["extensions"] = [e.strip(".") for e in linter["extensions"]]
+
+ def parse(self, path):
+ """Read a linter and return its LINTER definition.
+
+ :param path: Path to the linter.
+ :returns: List of linter definitions ([dict])
+ :raises: LinterNotFound, LinterParseError
+ """
+ if not os.path.isfile(path):
+ raise LinterNotFound(path)
+
+ if not path.endswith(".yml"):
+ raise LinterParseError(
+ path, "Invalid filename, linters must end with '.yml'!"
+ )
+
+ with open(path) as fh:
+ configs = list(yaml.safe_load_all(fh))
+
+ if not configs:
+ raise LinterParseError(path, "No lint definitions found!")
+
+ linters = []
+ for config in configs:
+ for name, linter in config.items():
+ linter["name"] = name
+ linter["path"] = path
+ self._validate(linter)
+ linter.setdefault("support-files", []).extend(
+ GLOBAL_SUPPORT_FILES + [path]
+ )
+ linter.setdefault("include", ["."])
+ linters.append(linter)
+
+ return linters
diff --git a/python/mozlint/mozlint/pathutils.py b/python/mozlint/mozlint/pathutils.py
new file mode 100644
index 0000000000..b1b4b644bc
--- /dev/null
+++ b/python/mozlint/mozlint/pathutils.py
@@ -0,0 +1,313 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+
+from mozpack import path as mozpath
+from mozpack.files import FileFinder
+
+
+class FilterPath(object):
+ """Helper class to make comparing and matching file paths easier."""
+
+ def __init__(self, path):
+ self.path = os.path.normpath(path)
+ self._finder = None
+
+ @property
+ def finder(self):
+ if self._finder:
+ return self._finder
+ self._finder = FileFinder(mozpath.normsep(self.path))
+ return self._finder
+
+ @property
+ def ext(self):
+ return os.path.splitext(self.path)[1].strip(".")
+
+ @property
+ def exists(self):
+ return os.path.exists(self.path)
+
+ @property
+ def isfile(self):
+ return os.path.isfile(self.path)
+
+ @property
+ def isdir(self):
+ return os.path.isdir(self.path)
+
+ def join(self, *args):
+ return FilterPath(os.path.join(self.path, *args))
+
+ def match(self, patterns):
+ a = mozpath.normsep(self.path)
+ for p in patterns:
+ if isinstance(p, FilterPath):
+ p = p.path
+ p = mozpath.normsep(p)
+ if mozpath.match(a, p):
+ return True
+ return False
+
+ def contains(self, other):
+ """Return True if other is a subdirectory of self or equals self."""
+ if isinstance(other, FilterPath):
+ other = other.path
+ a = os.path.abspath(self.path)
+ b = os.path.normpath(os.path.abspath(other))
+
+ parts_a = a.split(os.sep)
+ parts_b = b.split(os.sep)
+
+ if len(parts_a) > len(parts_b):
+ return False
+
+ for i, part in enumerate(parts_a):
+ if part != parts_b[i]:
+ return False
+ return True
+
+ def __repr__(self):
+ return repr(self.path)
+
+
+def collapse(paths, base=None, dotfiles=False):
+ """Given an iterable of paths, collapse them into the smallest possible set
+ of paths that contain the original set (without containing any extra paths).
+
+ For example, if directory 'a' contains two files b.txt and c.txt, calling:
+
+ collapse(['a/b.txt', 'a/c.txt'])
+
+ returns ['a']. But if a third file d.txt also exists, then it will return
+ ['a/b.txt', 'a/c.txt'] since ['a'] would also include that extra file.
+
+ :param paths: An iterable of paths (files and directories) to collapse.
+ :returns: The smallest set of paths (files and directories) that contain
+ the original set of paths and only the original set.
+ """
+ if not paths:
+ if not base:
+ return []
+
+ # Need to test whether directory chain is empty. If it is then bubble
+ # the base back up so that it counts as 'covered'.
+ for _, _, names in os.walk(base):
+ if names:
+ return []
+ return [base]
+
+ if not base:
+ paths = list(map(mozpath.abspath, paths))
+ base = mozpath.commonprefix(paths).rstrip("/")
+
+ # Make sure `commonprefix` factors in sibling directories that have the
+ # same prefix in their basenames.
+ parent = mozpath.dirname(base)
+ same_prefix = [
+ p for p in os.listdir(parent) if p.startswith(mozpath.basename(base))
+ ]
+ if not os.path.isdir(base) or len(same_prefix) > 1:
+ base = parent
+
+ if base in paths:
+ return [base]
+
+ covered = set()
+ full = set()
+ for name in os.listdir(base):
+ if not dotfiles and name[0] == ".":
+ continue
+
+ path = mozpath.join(base, name)
+ full.add(path)
+
+ if path in paths:
+ # This path was explicitly specified, so just bubble it back up
+ # without recursing down into it (if it was a directory).
+ covered.add(path)
+ elif os.path.isdir(path):
+ new_paths = [p for p in paths if p.startswith(path)]
+ covered.update(collapse(new_paths, base=path, dotfiles=dotfiles))
+
+ if full == covered:
+ # Every file under this base was covered, so we can collapse them all
+ # up into the base path.
+ return [base]
+ return list(covered)
+
+
+def filterpaths(root, paths, include, exclude=None, extensions=None):
+ """Filters a list of paths.
+
+ Given a list of paths and some filtering rules, return the set of paths
+ that should be linted.
+
+ :param paths: A starting list of paths to possibly lint.
+ :param include: A list of paths that should be included (required).
+ :param exclude: A list of paths that should be excluded (optional).
+ :param extensions: A list of file extensions which should be considered (optional).
+ :returns: A tuple containing a list of file paths to lint and a list of
+ paths to exclude.
+ """
+
+ def normalize(path):
+ if "*" not in path and not os.path.isabs(path):
+ path = os.path.join(root, path)
+ return FilterPath(path)
+
+ # Includes are always paths and should always exist.
+ include = list(map(normalize, include))
+
+ # Exclude paths with and without globs will be handled separately,
+ # pull them apart now.
+ exclude = list(map(normalize, exclude or []))
+ excludepaths = [p for p in exclude if p.exists]
+ excludeglobs = [p.path for p in exclude if not p.exists]
+
+ keep = set()
+ discard = set()
+ for path in list(map(normalize, paths)):
+ # Exclude bad file extensions
+ if extensions and path.isfile and path.ext not in extensions:
+ continue
+
+ if path.match(excludeglobs):
+ continue
+
+ # First handle include/exclude directives
+ # that exist (i.e don't have globs)
+ for inc in include:
+ # Only excludes that are subdirectories of the include
+ # path matter.
+ excs = [e for e in excludepaths if inc.contains(e)]
+
+ if path.contains(inc):
+ # If specified path is an ancestor of include path,
+ # then lint the include path.
+ keep.add(inc)
+
+ # We can't apply these exclude paths without explicitly
+ # including every sibling file. Rather than do that,
+ # just return them and hope the underlying linter will
+ # deal with them.
+ discard.update(excs)
+
+ elif inc.contains(path):
+ # If the include path is an ancestor of the specified
+ # path, then add the specified path only if there are
+ # no exclude paths in-between them.
+ if not any(e.contains(path) for e in excs):
+ keep.add(path)
+ discard.update([e for e in excs if path.contains(e)])
+
+ # Next expand excludes with globs in them so we can add them to
+ # the set of files to discard.
+ for pattern in excludeglobs:
+ for p, f in path.finder.find(pattern):
+ discard.add(path.join(p))
+
+ return (
+ [f.path for f in keep if f.exists],
+ collapse([f.path for f in discard if f.exists]),
+ )
+
+
+def findobject(path):
+ """
+ Find a Python object given a path of the form <modulepath>:<objectpath>.
+ Conceptually equivalent to
+
+ def find_object(modulepath, objectpath):
+ import <modulepath> as mod
+ return mod.<objectpath>
+ """
+ if path.count(":") != 1:
+ raise ValueError(
+ 'python path {!r} does not have the form "module:object"'.format(path)
+ )
+
+ modulepath, objectpath = path.split(":")
+ obj = __import__(modulepath)
+ for a in modulepath.split(".")[1:]:
+ obj = getattr(obj, a)
+ for a in objectpath.split("."):
+ obj = getattr(obj, a)
+ return obj
+
+
+def ancestors(path):
+ while path:
+ yield path
+ (path, child) = os.path.split(path)
+ if child == "":
+ break
+
+
+def get_ancestors_by_name(name, path, root):
+ """Returns a list of files called `name` in `path`'s ancestors,
+ sorted from closest->furthest. This can be useful for finding
+ relevant configuration files.
+ """
+ configs = []
+ for path in ancestors(path):
+ config = os.path.join(path, name)
+ if os.path.isfile(config):
+ configs.append(config)
+ if path == root:
+ break
+ return configs
+
+
+def expand_exclusions(paths, config, root):
+ """Returns all files that match patterns and aren't excluded.
+
+ This is used by some external linters who receive 'batch' files (e.g dirs)
+ but aren't capable of applying their own exclusions. There is an argument
+ to be made that this step should just apply to all linters no matter what.
+
+ Args:
+ paths (list): List of candidate paths to lint.
+ config (dict): Linter's config object.
+ root (str): Root of the repository.
+
+ Returns:
+ Generator which generates list of paths that weren't excluded.
+ """
+ extensions = [e.lstrip(".") for e in config.get("extensions", [])]
+ find_dotfiles = config.get("find-dotfiles", False)
+
+ def normalize(path):
+ path = mozpath.normpath(path)
+ if os.path.isabs(path):
+ return path
+ return mozpath.join(root, path)
+
+ exclude = list(map(normalize, config.get("exclude", [])))
+ for path in paths:
+ path = mozpath.normsep(path)
+ if os.path.isfile(path):
+ if any(path.startswith(e) for e in exclude if "*" not in e):
+ continue
+
+ if any(mozpath.match(path, e) for e in exclude if "*" in e):
+ continue
+
+ yield path
+ continue
+
+ ignore = [
+ e[len(path) :].lstrip("/")
+ for e in exclude
+ if mozpath.commonprefix((path, e)) == path
+ ]
+ finder = FileFinder(path, ignore=ignore, find_dotfiles=find_dotfiles)
+
+ _, ext = os.path.splitext(path)
+ ext.lstrip(".")
+
+ for ext in extensions:
+ for p, f in finder.find("**/*.{}".format(ext)):
+ yield os.path.join(path, p)
diff --git a/python/mozlint/mozlint/result.py b/python/mozlint/mozlint/result.py
new file mode 100644
index 0000000000..01b04afee6
--- /dev/null
+++ b/python/mozlint/mozlint/result.py
@@ -0,0 +1,163 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+from collections import defaultdict
+from itertools import chain
+from json import JSONEncoder
+
+import attr
+import mozpack.path as mozpath
+
+
+class ResultSummary(object):
+ """Represents overall result state from an entire lint run."""
+
+ root = None
+
+ def __init__(self, root, fail_on_warnings=True):
+ self.fail_on_warnings = fail_on_warnings
+ self.reset()
+
+ # Store the repository root folder to be able to build
+ # Issues relative paths to that folder
+ if ResultSummary.root is None:
+ ResultSummary.root = mozpath.normpath(root)
+
+ def reset(self):
+ self.issues = defaultdict(list)
+ self.failed_run = set()
+ self.failed_setup = set()
+ self.suppressed_warnings = defaultdict(int)
+ self.fixed = 0
+
+ def has_issues_failure(self):
+ """Returns true in case issues were detected during the lint run. Do not
+ consider warning issues in case `self.fail_on_warnings` is set to False.
+ """
+ if self.fail_on_warnings is False:
+ return any(
+ result.level != "warning" for result in chain(*self.issues.values())
+ )
+ return len(self.issues) >= 1
+
+ @property
+ def returncode(self):
+ if self.has_issues_failure() or self.failed:
+ return 1
+ return 0
+
+ @property
+ def failed(self):
+ return self.failed_setup | self.failed_run
+
+ @property
+ def total_issues(self):
+ return sum([len(v) for v in self.issues.values()])
+
+ @property
+ def total_suppressed_warnings(self):
+ return sum(self.suppressed_warnings.values())
+
+ @property
+ def total_fixed(self):
+ return self.fixed
+
+ def update(self, other):
+ """Merge results from another ResultSummary into this one."""
+ for path, obj in other.issues.items():
+ self.issues[path].extend(obj)
+
+ self.failed_run |= other.failed_run
+ self.failed_setup |= other.failed_setup
+ self.fixed += other.fixed
+ for k, v in other.suppressed_warnings.items():
+ self.suppressed_warnings[k] += v
+
+
+@attr.s(slots=True, kw_only=True)
+class Issue(object):
+ """Represents a single lint issue and its related metadata.
+
+ :param linter: name of the linter that flagged this error
+ :param path: path to the file containing the error
+ :param message: text describing the error
+ :param lineno: line number that contains the error
+ :param column: column containing the error
+ :param level: severity of the error, either 'warning' or 'error' (default 'error')
+ :param hint: suggestion for fixing the error (optional)
+ :param source: source code context of the error (optional)
+ :param rule: name of the rule that was violated (optional)
+ :param lineoffset: denotes an error spans multiple lines, of the form
+ (<lineno offset>, <num lines>) (optional)
+ :param diff: a diff describing the changes that need to be made to the code
+ """
+
+ linter = attr.ib()
+ path = attr.ib()
+ lineno = attr.ib(
+ default=None, converter=lambda lineno: int(lineno) if lineno else 0
+ )
+ column = attr.ib(
+ default=None, converter=lambda column: int(column) if column else column
+ )
+ message = attr.ib()
+ hint = attr.ib(default=None)
+ source = attr.ib(default=None)
+ level = attr.ib(default=None, converter=lambda level: level or "error")
+ rule = attr.ib(default=None)
+ lineoffset = attr.ib(default=None)
+ diff = attr.ib(default=None)
+ relpath = attr.ib(init=False, default=None)
+
+ def __attrs_post_init__(self):
+ root = ResultSummary.root
+ assert root is not None, "Missing ResultSummary.root"
+ if os.path.isabs(self.path):
+ self.path = mozpath.normpath(self.path)
+ self.relpath = mozpath.relpath(self.path, root)
+ else:
+ self.relpath = mozpath.normpath(self.path)
+ self.path = mozpath.join(root, self.path)
+
+
+class IssueEncoder(JSONEncoder):
+ """Class for encoding :class:`~result.Issue` to json.
+
+ Usage:
+
+ .. code-block:: python
+
+ json.dumps(results, cls=IssueEncoder)
+
+ """
+
+ def default(self, o):
+ if isinstance(o, Issue):
+ return attr.asdict(o)
+ return JSONEncoder.default(self, o)
+
+
+def from_config(config, **kwargs):
+ """Create a :class:`~result.Issue` from a linter config.
+
+ Convenience method that pulls defaults from a linter
+ config and forwards them.
+
+ :param config: linter config as defined in a .yml file
+ :param kwargs: same as :class:`~result.Issue`
+ :returns: :class:`~result.Issue` object
+ """
+ args = {}
+ for arg in attr.fields(Issue):
+ if arg.init:
+ args[arg.name] = kwargs.get(arg.name, config.get(arg.name))
+
+ if not args["linter"]:
+ args["linter"] = config.get("name")
+
+ if not args["message"]:
+ args["message"] = config.get("description")
+
+ return Issue(**args)
diff --git a/python/mozlint/mozlint/roller.py b/python/mozlint/mozlint/roller.py
new file mode 100644
index 0000000000..1425178114
--- /dev/null
+++ b/python/mozlint/mozlint/roller.py
@@ -0,0 +1,421 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import atexit
+import copy
+import logging
+import os
+import signal
+import sys
+import time
+import traceback
+from concurrent.futures import ProcessPoolExecutor
+from concurrent.futures.process import _python_exit as futures_atexit
+from itertools import chain
+from math import ceil
+from multiprocessing import cpu_count, get_context
+from multiprocessing.queues import Queue
+from subprocess import CalledProcessError
+from typing import Dict, Set
+
+import mozpack.path as mozpath
+from mozversioncontrol import (
+ InvalidRepoPath,
+ MissingUpstreamRepo,
+ get_repository_object,
+)
+
+from .errors import LintersNotConfigured, NoValidLinter
+from .parser import Parser
+from .pathutils import findobject
+from .result import ResultSummary
+from .types import supported_types
+
+SHUTDOWN = False
+orig_sigint = signal.getsignal(signal.SIGINT)
+
+logger = logging.getLogger("mozlint")
+handler = logging.StreamHandler()
+formatter = logging.Formatter(
+ "%(asctime)s.%(msecs)d %(lintname)s (%(pid)s) | %(message)s", "%H:%M:%S"
+)
+handler.setFormatter(formatter)
+logger.addHandler(handler)
+
+
+def _run_worker(config, paths, **lintargs):
+ log = logging.LoggerAdapter(
+ logger, {"lintname": config.get("name"), "pid": os.getpid()}
+ )
+ lintargs["log"] = log
+ result = ResultSummary(lintargs["root"])
+
+ if SHUTDOWN:
+ return result
+
+ # Override warnings setup for code review
+ # Only disactivating when code_review_warnings is set to False on a linter.yml in use
+ if os.environ.get("CODE_REVIEW") == "1" and config.get(
+ "code_review_warnings", True
+ ):
+ lintargs["show_warnings"] = True
+
+ # Override ignore thirdparty
+ # Only deactivating include_thirdparty is set on a linter.yml in use
+ if config.get("include_thirdparty", False):
+ lintargs["include_thirdparty"] = True
+
+ func = supported_types[config["type"]]
+ start_time = time.monotonic()
+ try:
+ res = func(paths, config, **lintargs)
+ # Some linters support fixed operations
+ # dict returned - {"results":results,"fixed":fixed}
+ if isinstance(res, dict):
+ result.fixed += res["fixed"]
+ res = res["results"] or []
+ elif isinstance(res, list):
+ res = res or []
+ else:
+ print("Unexpected type received")
+ assert False
+ except Exception:
+ traceback.print_exc()
+ res = 1
+ except (KeyboardInterrupt, SystemExit):
+ return result
+ finally:
+ end_time = time.monotonic()
+ log.debug("Finished in {:.2f} seconds".format(end_time - start_time))
+ sys.stdout.flush()
+
+ if not isinstance(res, (list, tuple)):
+ if res:
+ result.failed_run.add(config["name"])
+ else:
+ for r in res:
+ if not lintargs.get("show_warnings") and r.level == "warning":
+ result.suppressed_warnings[r.path] += 1
+ continue
+
+ result.issues[r.path].append(r)
+
+ return result
+
+
+class InterruptableQueue(Queue):
+ """A multiprocessing.Queue that catches KeyboardInterrupt when a worker is
+ blocking on it and returns None.
+
+ This is needed to gracefully handle KeyboardInterrupts when a worker is
+ blocking on ProcessPoolExecutor's call queue.
+ """
+
+ def __init__(self, *args, **kwargs):
+ kwargs["ctx"] = get_context()
+ super(InterruptableQueue, self).__init__(*args, **kwargs)
+
+ def get(self, *args, **kwargs):
+ try:
+ return Queue.get(self, *args, **kwargs)
+ except KeyboardInterrupt:
+ return None
+
+
+def _worker_sigint_handler(signum, frame):
+ """Sigint handler for the worker subprocesses.
+
+ Tells workers not to process the extra jobs on the call queue that couldn't
+ be canceled by the parent process.
+ """
+ global SHUTDOWN
+ SHUTDOWN = True
+ orig_sigint(signum, frame)
+
+
+def wrap_futures_atexit():
+ """Sometimes futures' atexit handler can spew tracebacks. This wrapper
+ suppresses them."""
+ try:
+ futures_atexit()
+ except Exception:
+ # Generally `atexit` handlers aren't supposed to raise exceptions, but the
+ # futures' handler can sometimes raise when the user presses `CTRL-C`. We
+ # suppress all possible exceptions here so users have a nice experience
+ # when canceling their lint run. Any exceptions raised by this function
+ # won't be useful anyway.
+ pass
+
+
+atexit.unregister(futures_atexit)
+atexit.register(wrap_futures_atexit)
+
+
+class LintRoller(object):
+ """Registers and runs linters.
+
+ :param root: Path to which relative paths will be joined. If
+ unspecified, root will either be determined from
+ version control or cwd.
+ :param lintargs: Arguments to pass to the underlying linter(s).
+ """
+
+ MAX_PATHS_PER_JOB = (
+ 50 # set a max size to prevent command lines that are too long on Windows
+ )
+
+ def __init__(self, root, exclude=None, setupargs=None, **lintargs):
+ self.parse = Parser(root)
+ try:
+ self.vcs = get_repository_object(root)
+ except InvalidRepoPath:
+ self.vcs = None
+
+ self.linters = []
+ self.lintargs = lintargs
+ self.lintargs["root"] = root
+ self._setupargs = setupargs or {}
+
+ # result state
+ self.result = ResultSummary(
+ root,
+ # Prevent failing on warnings when the --warnings parameter is set to "soft"
+ fail_on_warnings=lintargs.get("show_warnings") != "soft",
+ )
+
+ self.root = root
+ self.exclude = exclude or []
+
+ if lintargs.get("show_verbose"):
+ logger.setLevel(logging.DEBUG)
+ else:
+ logger.setLevel(logging.WARNING)
+
+ self.log = logging.LoggerAdapter(
+ logger, {"lintname": "mozlint", "pid": os.getpid()}
+ )
+
+ def read(self, paths):
+ """Parse one or more linters and add them to the registry.
+
+ :param paths: A path or iterable of paths to linter definitions.
+ """
+ if isinstance(paths, str):
+ paths = (paths,)
+
+ for linter in chain(*[self.parse(p) for p in paths]):
+ # Add only the excludes present in paths
+ linter["local_exclude"] = linter.get("exclude", [])[:]
+ # Add in our global excludes
+ linter.setdefault("exclude", []).extend(self.exclude)
+ self.linters.append(linter)
+
+ def setup(self, virtualenv_manager=None):
+ """Run setup for applicable linters"""
+ if not self.linters:
+ raise NoValidLinter
+
+ for linter in self.linters:
+ if "setup" not in linter:
+ continue
+
+ try:
+ setupargs = copy.deepcopy(self.lintargs)
+ setupargs.update(self._setupargs)
+ setupargs["name"] = linter["name"]
+ setupargs["log"] = logging.LoggerAdapter(
+ self.log, {"lintname": linter["name"]}
+ )
+ if virtualenv_manager is not None:
+ setupargs["virtualenv_manager"] = virtualenv_manager
+ start_time = time.monotonic()
+ res = findobject(linter["setup"])(
+ **setupargs,
+ )
+ self.log.debug(
+ f"setup for {linter['name']} finished in "
+ f"{round(time.monotonic() - start_time, 2)} seconds"
+ )
+ except Exception:
+ traceback.print_exc()
+ res = 1
+
+ if res:
+ self.result.failed_setup.add(linter["name"])
+
+ if self.result.failed_setup:
+ print(
+ "error: problem with lint setup, skipping {}".format(
+ ", ".join(sorted(self.result.failed_setup))
+ )
+ )
+ self.linters = [
+ l for l in self.linters if l["name"] not in self.result.failed_setup
+ ]
+ return 1
+ return 0
+
+ def should_lint_entire_tree(self, vcs_paths: Set[str], linter: Dict) -> bool:
+ """Return `True` if the linter should be run on the entire tree."""
+ # Don't lint the entire tree when `--fix` is passed to linters.
+ if "fix" in self.lintargs and self.lintargs["fix"]:
+ return False
+
+ # Lint the whole tree when a `support-file` is modified.
+ return any(
+ os.path.isfile(p) and mozpath.match(p, pattern)
+ for pattern in linter.get("support-files", [])
+ for p in vcs_paths
+ )
+
+ def _generate_jobs(self, paths, vcs_paths, num_procs):
+ def __get_current_paths(path=self.root):
+ return [os.path.join(path, p) for p in os.listdir(path)]
+
+ """A job is of the form (<linter:dict>, <paths:list>)."""
+ for linter in self.linters:
+ if self.should_lint_entire_tree(vcs_paths, linter):
+ lpaths = __get_current_paths()
+ print(
+ "warning: {} support-file modified, linting entire tree "
+ "(press ctrl-c to cancel)".format(linter["name"])
+ )
+ elif paths == {self.root}:
+ # If the command line is ".", the path will match with the root
+ # directory. In this case, get all the paths, so that we can
+ # benefit from chunking below.
+ lpaths = __get_current_paths()
+ else:
+ lpaths = paths.union(vcs_paths)
+
+ lpaths = list(lpaths) or __get_current_paths(os.getcwd())
+ chunk_size = (
+ min(self.MAX_PATHS_PER_JOB, int(ceil(len(lpaths) / num_procs))) or 1
+ )
+ if linter["type"] == "global":
+ # Global linters lint the entire tree in one job.
+ chunk_size = len(lpaths) or 1
+ assert chunk_size > 0
+
+ while lpaths:
+ yield linter, lpaths[:chunk_size]
+ lpaths = lpaths[chunk_size:]
+
+ def _collect_results(self, future):
+ if future.cancelled():
+ return
+
+ # Merge this job's results with our global ones.
+ self.result.update(future.result())
+
+ def roll(self, paths=None, outgoing=None, workdir=None, rev=None, num_procs=None):
+ """Run all of the registered linters against the specified file paths.
+
+ :param paths: An iterable of files and/or directories to lint.
+ :param outgoing: Lint files touched by commits that are not on the remote repository.
+ :param workdir: Lint all files touched in the working directory.
+ :param num_procs: The number of processes to use. Default: cpu count
+ :return: A :class:`~result.ResultSummary` instance.
+ """
+ if not self.linters:
+ raise LintersNotConfigured
+
+ self.result.reset()
+
+ # Need to use a set in case vcs operations specify the same file
+ # more than once.
+ paths = paths or set()
+ if isinstance(paths, str):
+ paths = set([paths])
+ elif isinstance(paths, (list, tuple)):
+ paths = set(paths)
+
+ if not self.vcs and (workdir or outgoing):
+ print(
+ "error: '{}' is not a known repository, can't use "
+ "--workdir or --outgoing".format(self.lintargs["root"])
+ )
+
+ # Calculate files from VCS
+ vcs_paths = set()
+ try:
+ if workdir:
+ vcs_paths.update(self.vcs.get_changed_files("AM", mode=workdir))
+ if rev:
+ vcs_paths.update(self.vcs.get_changed_files("AM", rev=rev))
+ if outgoing:
+ upstream = outgoing if isinstance(outgoing, str) else None
+ try:
+ vcs_paths.update(
+ self.vcs.get_outgoing_files("AM", upstream=upstream)
+ )
+ except MissingUpstreamRepo:
+ print(
+ "warning: could not find default push, specify a remote for --outgoing"
+ )
+ except CalledProcessError as e:
+ print("error running: {}".format(" ".join(e.cmd)))
+ if e.output:
+ print(e.output)
+
+ if not (paths or vcs_paths) and (workdir or outgoing):
+ if os.environ.get("MOZ_AUTOMATION") and not os.environ.get(
+ "PYTEST_CURRENT_TEST"
+ ):
+ raise Exception(
+ "Despite being a CI lint job, no files were linted. Is the task "
+ "missing explicit paths?"
+ )
+
+ print("warning: no files linted")
+ return self.result
+
+ # Make sure all paths are absolute. Join `paths` to cwd and `vcs_paths` to root.
+ paths = set(map(os.path.abspath, paths))
+ vcs_paths = set(
+ [
+ os.path.join(self.root, p) if not os.path.isabs(p) else p
+ for p in vcs_paths
+ ]
+ )
+
+ num_procs = num_procs or cpu_count()
+ jobs = list(self._generate_jobs(paths, vcs_paths, num_procs))
+
+ # Make sure we never spawn more processes than we have jobs.
+ num_procs = min(len(jobs), num_procs) or 1
+ if sys.platform == "win32":
+ # https://github.com/python/cpython/pull/13132
+ num_procs = min(num_procs, 61)
+
+ signal.signal(signal.SIGINT, _worker_sigint_handler)
+ executor = ProcessPoolExecutor(num_procs)
+ executor._call_queue = InterruptableQueue(executor._call_queue._maxsize)
+
+ # Submit jobs to the worker pool. The _collect_results method will be
+ # called when a job is finished. We store the futures so that they can
+ # be canceled in the event of a KeyboardInterrupt.
+ futures = []
+ for job in jobs:
+ future = executor.submit(_run_worker, *job, **self.lintargs)
+ future.add_done_callback(self._collect_results)
+ futures.append(future)
+
+ def _parent_sigint_handler(signum, frame):
+ """Sigint handler for the parent process.
+
+ Cancels all jobs that have not yet been placed on the call queue.
+ The parent process won't exit until all workers have terminated.
+ Assuming the linters are implemented properly, this shouldn't take
+ more than a couple seconds.
+ """
+ [f.cancel() for f in futures]
+ executor.shutdown(wait=True)
+ print("\nwarning: not all files were linted")
+ signal.signal(signal.SIGINT, signal.SIG_IGN)
+
+ signal.signal(signal.SIGINT, _parent_sigint_handler)
+ executor.shutdown()
+ signal.signal(signal.SIGINT, orig_sigint)
+ return self.result
diff --git a/python/mozlint/mozlint/types.py b/python/mozlint/mozlint/types.py
new file mode 100644
index 0000000000..1a9a0bd473
--- /dev/null
+++ b/python/mozlint/mozlint/types.py
@@ -0,0 +1,214 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import re
+import sys
+from abc import ABCMeta, abstractmethod
+
+from mozlog import commandline, get_default_logger, structuredlog
+from mozlog.reader import LogHandler
+from mozpack.files import FileFinder
+
+from . import result
+from .pathutils import expand_exclusions, filterpaths, findobject
+
+
+class BaseType(object):
+ """Abstract base class for all types of linters."""
+
+ __metaclass__ = ABCMeta
+ batch = False
+
+ def __call__(self, paths, config, **lintargs):
+ """Run linter defined by `config` against `paths` with `lintargs`.
+
+ :param paths: Paths to lint. Can be a file or directory.
+ :param config: Linter config the paths are being linted against.
+ :param lintargs: External arguments to the linter not defined in
+ the definition, but passed in by a consumer.
+ :returns: A list of :class:`~result.Issue` objects.
+ """
+ log = lintargs["log"]
+
+ if lintargs.get("use_filters", True):
+ paths, exclude = filterpaths(
+ lintargs["root"],
+ paths,
+ config["include"],
+ config.get("exclude", []),
+ config.get("extensions", []),
+ )
+ config["exclude"] = exclude
+ elif config.get("exclude"):
+ del config["exclude"]
+
+ if not paths:
+ return {"results": [], "fixed": 0}
+
+ log.debug(
+ "Passing the following paths:\n{paths}".format(
+ paths=" \n".join(paths),
+ )
+ )
+
+ if self.batch:
+ return self._lint(paths, config, **lintargs)
+
+ errors = []
+
+ try:
+ for p in paths:
+ result = self._lint(p, config, **lintargs)
+ if result:
+ errors.extend(result)
+ except KeyboardInterrupt:
+ pass
+ return errors
+
+ def _lint_dir(self, path, config, **lintargs):
+ if not config.get("extensions"):
+ patterns = ["**"]
+ else:
+ patterns = ["**/*.{}".format(e) for e in config["extensions"]]
+
+ exclude = [os.path.relpath(e, path) for e in config.get("exclude", [])]
+ finder = FileFinder(path, ignore=exclude)
+
+ errors = []
+ for pattern in patterns:
+ for p, f in finder.find(pattern):
+ errors.extend(self._lint(os.path.join(path, p), config, **lintargs))
+ return errors
+
+ @abstractmethod
+ def _lint(self, path, config, **lintargs):
+ pass
+
+
+class LineType(BaseType):
+ """Abstract base class for linter types that check each line individually.
+
+ Subclasses of this linter type will read each file and check the provided
+ payload against each line one by one.
+ """
+
+ __metaclass__ = ABCMeta
+
+ @abstractmethod
+ def condition(payload, line, config):
+ pass
+
+ def _lint(self, path, config, **lintargs):
+ if os.path.isdir(path):
+ return self._lint_dir(path, config, **lintargs)
+
+ payload = config["payload"]
+ with open(path, "r", errors="replace") as fh:
+ lines = fh.readlines()
+
+ errors = []
+ for i, line in enumerate(lines):
+ if self.condition(payload, line, config):
+ errors.append(result.from_config(config, path=path, lineno=i + 1))
+
+ return errors
+
+
+class StringType(LineType):
+ """Linter type that checks whether a substring is found."""
+
+ def condition(self, payload, line, config):
+ return payload in line
+
+
+class RegexType(LineType):
+ """Linter type that checks whether a regex match is found."""
+
+ def condition(self, payload, line, config):
+ flags = 0
+ if config.get("ignore-case"):
+ flags |= re.IGNORECASE
+
+ return re.search(payload, line, flags)
+
+
+class ExternalType(BaseType):
+ """Linter type that runs an external function.
+
+ The function is responsible for properly formatting the results
+ into a list of :class:`~result.Issue` objects.
+ """
+
+ batch = True
+
+ def _lint(self, files, config, **lintargs):
+ func = findobject(config["payload"])
+ return func(files, config, **lintargs)
+
+
+class ExternalFileType(ExternalType):
+ batch = False
+
+
+class GlobalType(ExternalType):
+ """Linter type that runs an external global linting function just once.
+
+ The function is responsible for properly formatting the results
+ into a list of :class:`~result.Issue` objects.
+ """
+
+ batch = True
+
+ def _lint(self, files, config, **lintargs):
+ # Global lints are expensive to invoke. Try to avoid running
+ # them based on extensions and exclusions.
+ try:
+ next(expand_exclusions(files, config, lintargs["root"]))
+ except StopIteration:
+ return []
+
+ func = findobject(config["payload"])
+ return func(config, **lintargs)
+
+
+class LintHandler(LogHandler):
+ def __init__(self, config):
+ self.config = config
+ self.results = []
+
+ def lint(self, data):
+ self.results.append(result.from_config(self.config, **data))
+
+
+class StructuredLogType(BaseType):
+ batch = True
+
+ def _lint(self, files, config, **lintargs):
+ handler = LintHandler(config)
+ logger = config.get("logger")
+ if logger is None:
+ logger = get_default_logger()
+ if logger is None:
+ logger = structuredlog.StructuredLogger(config["name"])
+ commandline.setup_logging(logger, {}, {"mach": sys.stdout})
+ logger.add_handler(handler)
+
+ func = findobject(config["payload"])
+ try:
+ func(files, config, logger, **lintargs)
+ except KeyboardInterrupt:
+ pass
+ return handler.results
+
+
+supported_types = {
+ "string": StringType(),
+ "regex": RegexType(),
+ "external": ExternalType(),
+ "external-file": ExternalFileType(),
+ "global": GlobalType(),
+ "structured_log": StructuredLogType(),
+}
+"""Mapping of type string to an associated instance."""
diff --git a/python/mozlint/mozlint/util/__init__.py b/python/mozlint/mozlint/util/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozlint/mozlint/util/__init__.py
diff --git a/python/mozlint/mozlint/util/implementation.py b/python/mozlint/mozlint/util/implementation.py
new file mode 100644
index 0000000000..9c72c0ea0f
--- /dev/null
+++ b/python/mozlint/mozlint/util/implementation.py
@@ -0,0 +1,35 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import signal
+from abc import ABC, abstractmethod
+
+from mozprocess import ProcessHandlerMixin
+
+
+class LintProcess(ProcessHandlerMixin, ABC):
+ def __init__(self, config, *args, **kwargs):
+ self.config = config
+ self.results = []
+
+ kwargs["universal_newlines"] = True
+ kwargs["processOutputLine"] = [self.process_line]
+ ProcessHandlerMixin.__init__(self, *args, **kwargs)
+
+ @abstractmethod
+ def process_line(self, line):
+ """Process a single line of output.
+
+ The implementation is responsible for creating one or more :class:`~mozlint.result.Issue`
+ and storing them somewhere accessible.
+
+ Args:
+ line (str): The line of output to process.
+ """
+ pass
+
+ def run(self, *args, **kwargs):
+ orig = signal.signal(signal.SIGINT, signal.SIG_IGN)
+ ProcessHandlerMixin.run(self, *args, **kwargs)
+ signal.signal(signal.SIGINT, orig)
diff --git a/python/mozlint/mozlint/util/string.py b/python/mozlint/mozlint/util/string.py
new file mode 100644
index 0000000000..9c1c7c99c2
--- /dev/null
+++ b/python/mozlint/mozlint/util/string.py
@@ -0,0 +1,9 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+def pluralize(s, num):
+ if num != 1:
+ s += "s"
+ return str(num) + " " + s
diff --git a/python/mozlint/setup.py b/python/mozlint/setup.py
new file mode 100644
index 0000000000..c16e15fd7f
--- /dev/null
+++ b/python/mozlint/setup.py
@@ -0,0 +1,26 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from setuptools import setup
+
+VERSION = 0.1
+DEPS = ["mozlog >= 6.0"]
+
+setup(
+ name="mozlint",
+ description="Framework for registering and running micro lints",
+ license="MPL 2.0",
+ author="Andrew Halberstadt",
+ author_email="ahalberstadt@mozilla.com",
+ url="",
+ packages=["mozlint"],
+ version=VERSION,
+ classifiers=[
+ "Environment :: Console",
+ "Development Status :: 3 - Alpha",
+ "License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)",
+ "Natural Language :: English",
+ ],
+ install_requires=DEPS,
+)
diff --git a/python/mozlint/test/__init__.py b/python/mozlint/test/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozlint/test/__init__.py
diff --git a/python/mozlint/test/conftest.py b/python/mozlint/test/conftest.py
new file mode 100644
index 0000000000..9683c23b13
--- /dev/null
+++ b/python/mozlint/test/conftest.py
@@ -0,0 +1,66 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import sys
+from argparse import Namespace
+
+import pytest
+
+from mozlint import LintRoller
+
+here = os.path.abspath(os.path.dirname(__file__))
+
+
+@pytest.fixture
+def lint(request):
+ lintargs = getattr(request.module, "lintargs", {})
+ lint = LintRoller(root=here, **lintargs)
+
+ # Add a few super powers to our lint instance
+ def mock_vcs(files):
+ def _fake_vcs_files(*args, **kwargs):
+ return files
+
+ setattr(lint.vcs, "get_changed_files", _fake_vcs_files)
+ setattr(lint.vcs, "get_outgoing_files", _fake_vcs_files)
+
+ setattr(lint, "vcs", Namespace())
+ setattr(lint, "mock_vcs", mock_vcs)
+ return lint
+
+
+@pytest.fixture(scope="session")
+def filedir():
+ return os.path.join(here, "files")
+
+
+@pytest.fixture(scope="module")
+def files(filedir, request):
+ suffix_filter = getattr(request.module, "files", [""])
+ return [
+ os.path.join(filedir, p)
+ for p in os.listdir(filedir)
+ if any(p.endswith(suffix) for suffix in suffix_filter)
+ ]
+
+
+@pytest.fixture(scope="session")
+def lintdir():
+ lintdir = os.path.join(here, "linters")
+ sys.path.insert(0, lintdir)
+ return lintdir
+
+
+@pytest.fixture(scope="module")
+def linters(lintdir):
+ def inner(*names):
+ return [
+ os.path.join(lintdir, p)
+ for p in os.listdir(lintdir)
+ if any(os.path.splitext(p)[0] == name for name in names)
+ if os.path.splitext(p)[1] == ".yml"
+ ]
+
+ return inner
diff --git a/python/mozlint/test/files/foobar.js b/python/mozlint/test/files/foobar.js
new file mode 100644
index 0000000000..d9754d0a2f
--- /dev/null
+++ b/python/mozlint/test/files/foobar.js
@@ -0,0 +1,2 @@
+// Oh no.. we called this variable foobar, bad!
+var foobar = "a string";
diff --git a/python/mozlint/test/files/foobar.py b/python/mozlint/test/files/foobar.py
new file mode 100644
index 0000000000..3b6416d211
--- /dev/null
+++ b/python/mozlint/test/files/foobar.py
@@ -0,0 +1,3 @@
+# Oh no.. we called this variable foobar, bad!
+
+foobar = "a string"
diff --git a/python/mozlint/test/files/irrelevant/file.txt b/python/mozlint/test/files/irrelevant/file.txt
new file mode 100644
index 0000000000..323fae03f4
--- /dev/null
+++ b/python/mozlint/test/files/irrelevant/file.txt
@@ -0,0 +1 @@
+foobar
diff --git a/python/mozlint/test/files/no_foobar.js b/python/mozlint/test/files/no_foobar.js
new file mode 100644
index 0000000000..6b95d646c0
--- /dev/null
+++ b/python/mozlint/test/files/no_foobar.js
@@ -0,0 +1,2 @@
+// What a relief
+var properlyNamed = "a string";
diff --git a/python/mozlint/test/filter/a.js b/python/mozlint/test/filter/a.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozlint/test/filter/a.js
diff --git a/python/mozlint/test/filter/a.py b/python/mozlint/test/filter/a.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozlint/test/filter/a.py
diff --git a/python/mozlint/test/filter/foo/empty.txt b/python/mozlint/test/filter/foo/empty.txt
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozlint/test/filter/foo/empty.txt
diff --git a/python/mozlint/test/filter/foobar/empty.txt b/python/mozlint/test/filter/foobar/empty.txt
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozlint/test/filter/foobar/empty.txt
diff --git a/python/mozlint/test/filter/subdir1/b.js b/python/mozlint/test/filter/subdir1/b.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozlint/test/filter/subdir1/b.js
diff --git a/python/mozlint/test/filter/subdir1/b.py b/python/mozlint/test/filter/subdir1/b.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozlint/test/filter/subdir1/b.py
diff --git a/python/mozlint/test/filter/subdir1/subdir3/d.js b/python/mozlint/test/filter/subdir1/subdir3/d.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozlint/test/filter/subdir1/subdir3/d.js
diff --git a/python/mozlint/test/filter/subdir1/subdir3/d.py b/python/mozlint/test/filter/subdir1/subdir3/d.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozlint/test/filter/subdir1/subdir3/d.py
diff --git a/python/mozlint/test/filter/subdir2/c.js b/python/mozlint/test/filter/subdir2/c.js
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozlint/test/filter/subdir2/c.js
diff --git a/python/mozlint/test/filter/subdir2/c.py b/python/mozlint/test/filter/subdir2/c.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozlint/test/filter/subdir2/c.py
diff --git a/python/mozlint/test/linters/badreturncode.yml b/python/mozlint/test/linters/badreturncode.yml
new file mode 100644
index 0000000000..72abf83cc7
--- /dev/null
+++ b/python/mozlint/test/linters/badreturncode.yml
@@ -0,0 +1,8 @@
+---
+BadReturnCodeLinter:
+ description: Returns an error code no matter what
+ include:
+ - files
+ type: external
+ extensions: ['.js', '.jsm']
+ payload: external:badreturncode
diff --git a/python/mozlint/test/linters/excludes.yml b/python/mozlint/test/linters/excludes.yml
new file mode 100644
index 0000000000..1fc1068735
--- /dev/null
+++ b/python/mozlint/test/linters/excludes.yml
@@ -0,0 +1,10 @@
+---
+ExcludesLinter:
+ description: >-
+ Make sure the string foobar never appears in browser js
+ files because it is bad
+ rule: no-foobar
+ exclude: ['**/foobar.js']
+ extensions: ['.js', 'jsm']
+ type: string
+ payload: foobar
diff --git a/python/mozlint/test/linters/excludes_empty.yml b/python/mozlint/test/linters/excludes_empty.yml
new file mode 100644
index 0000000000..03cd1aecab
--- /dev/null
+++ b/python/mozlint/test/linters/excludes_empty.yml
@@ -0,0 +1,8 @@
+---
+ExcludesEmptyLinter:
+ description: It's bad to have the string foobar in js files.
+ include:
+ - files
+ type: external
+ extensions: ['.js', '.jsm']
+ payload: foobar
diff --git a/python/mozlint/test/linters/explicit_path.yml b/python/mozlint/test/linters/explicit_path.yml
new file mode 100644
index 0000000000..1e7e8f4bf1
--- /dev/null
+++ b/python/mozlint/test/linters/explicit_path.yml
@@ -0,0 +1,8 @@
+---
+ExplicitPathLinter:
+ description: Only lint a specific file name
+ rule: no-foobar
+ include:
+ - files/no_foobar.js
+ type: string
+ payload: foobar
diff --git a/python/mozlint/test/linters/external.py b/python/mozlint/test/linters/external.py
new file mode 100644
index 0000000000..9c2e58909d
--- /dev/null
+++ b/python/mozlint/test/linters/external.py
@@ -0,0 +1,74 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import time
+
+from mozlint import result
+from mozlint.errors import LintException
+
+
+def badreturncode(files, config, **lintargs):
+ return 1
+
+
+def external(files, config, **lintargs):
+ if lintargs.get("fix"):
+ # mimics no results because they got fixed
+ return []
+
+ results = []
+ for path in files:
+ if os.path.isdir(path):
+ continue
+
+ with open(path, "r") as fh:
+ for i, line in enumerate(fh.readlines()):
+ if "foobar" in line:
+ results.append(
+ result.from_config(
+ config, path=path, lineno=i + 1, column=1, rule="no-foobar"
+ )
+ )
+ return results
+
+
+def raises(files, config, **lintargs):
+ raise LintException("Oh no something bad happened!")
+
+
+def slow(files, config, **lintargs):
+ time.sleep(2)
+ return []
+
+
+def structured(files, config, logger, **kwargs):
+ for path in files:
+ if os.path.isdir(path):
+ continue
+
+ with open(path, "r") as fh:
+ for i, line in enumerate(fh.readlines()):
+ if "foobar" in line:
+ logger.lint_error(
+ path=path, lineno=i + 1, column=1, rule="no-foobar"
+ )
+
+
+def passes(files, config, **lintargs):
+ return []
+
+
+def setup(**lintargs):
+ print("setup passed")
+
+
+def setupfailed(**lintargs):
+ print("setup failed")
+ return 1
+
+
+def setupraised(**lintargs):
+ print("setup raised")
+ raise LintException("oh no setup failed")
diff --git a/python/mozlint/test/linters/external.yml b/python/mozlint/test/linters/external.yml
new file mode 100644
index 0000000000..574b8df4cb
--- /dev/null
+++ b/python/mozlint/test/linters/external.yml
@@ -0,0 +1,8 @@
+---
+ExternalLinter:
+ description: It's bad to have the string foobar in js files.
+ include:
+ - files
+ type: external
+ extensions: ['.js', '.jsm']
+ payload: external:external
diff --git a/python/mozlint/test/linters/global.yml b/python/mozlint/test/linters/global.yml
new file mode 100644
index 0000000000..47d5ce81e4
--- /dev/null
+++ b/python/mozlint/test/linters/global.yml
@@ -0,0 +1,8 @@
+---
+GlobalLinter:
+ description: It's bad to have the string foobar in js files.
+ include:
+ - files
+ type: global
+ extensions: ['.js', '.jsm']
+ payload: global_payload:global_payload
diff --git a/python/mozlint/test/linters/global_payload.py b/python/mozlint/test/linters/global_payload.py
new file mode 100644
index 0000000000..ec620b6af1
--- /dev/null
+++ b/python/mozlint/test/linters/global_payload.py
@@ -0,0 +1,38 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import mozpack.path as mozpath
+from external import external
+from mozpack.files import FileFinder
+
+from mozlint import result
+
+
+def global_payload(config, **lintargs):
+ # A global linter that runs the external linter to actually lint.
+ finder = FileFinder(lintargs["root"])
+ files = [mozpath.join(lintargs["root"], p) for p, _ in finder.find("files/**")]
+ issues = external(files, config, **lintargs)
+ for issue in issues:
+ # Make issue look like it comes from this linter.
+ issue.linter = "global_payload"
+ return issues
+
+
+def global_skipped(config, **lintargs):
+ # A global linter that always registers a lint error. Absence of
+ # this error shows that the path exclusion mechanism can cause
+ # global lint payloads to not be invoked at all. In particular,
+ # the `extensions` field means that nothing under `files/**` will
+ # match.
+
+ finder = FileFinder(lintargs["root"])
+ files = [mozpath.join(lintargs["root"], p) for p, _ in finder.find("files/**")]
+
+ issues = []
+ issues.append(
+ result.from_config(
+ config, path=files[0], lineno=1, column=1, rule="not-skipped"
+ )
+ )
diff --git a/python/mozlint/test/linters/global_skipped.yml b/python/mozlint/test/linters/global_skipped.yml
new file mode 100644
index 0000000000..99b784e8be
--- /dev/null
+++ b/python/mozlint/test/linters/global_skipped.yml
@@ -0,0 +1,8 @@
+---
+GlobalSkippedLinter:
+ description: It's bad to run global linters when nothing matches.
+ include:
+ - files
+ type: global
+ extensions: ['.non.existent.extension']
+ payload: global_payload:global_skipped
diff --git a/python/mozlint/test/linters/invalid_exclude.yml b/python/mozlint/test/linters/invalid_exclude.yml
new file mode 100644
index 0000000000..7231d2c146
--- /dev/null
+++ b/python/mozlint/test/linters/invalid_exclude.yml
@@ -0,0 +1,6 @@
+---
+BadExcludeLinter:
+ description: Has an invalid exclude directive.
+ exclude: [0, 1] # should be a list of strings
+ type: string
+ payload: foobar
diff --git a/python/mozlint/test/linters/invalid_extension.ym b/python/mozlint/test/linters/invalid_extension.ym
new file mode 100644
index 0000000000..435fa10320
--- /dev/null
+++ b/python/mozlint/test/linters/invalid_extension.ym
@@ -0,0 +1,5 @@
+---
+BadExtensionLinter:
+ description: Has an invalid file extension.
+ type: string
+ payload: foobar
diff --git a/python/mozlint/test/linters/invalid_include.yml b/python/mozlint/test/linters/invalid_include.yml
new file mode 100644
index 0000000000..b76b3e6a61
--- /dev/null
+++ b/python/mozlint/test/linters/invalid_include.yml
@@ -0,0 +1,6 @@
+---
+BadIncludeLinter:
+ description: Has an invalid include directive.
+ include: should be a list
+ type: string
+ payload: foobar
diff --git a/python/mozlint/test/linters/invalid_include_with_glob.yml b/python/mozlint/test/linters/invalid_include_with_glob.yml
new file mode 100644
index 0000000000..857bb1376b
--- /dev/null
+++ b/python/mozlint/test/linters/invalid_include_with_glob.yml
@@ -0,0 +1,6 @@
+---
+BadIncludeLinterWithGlob:
+ description: Has an invalid include directive.
+ include: ['**/*.js']
+ type: string
+ payload: foobar
diff --git a/python/mozlint/test/linters/invalid_support_files.yml b/python/mozlint/test/linters/invalid_support_files.yml
new file mode 100644
index 0000000000..db39597d68
--- /dev/null
+++ b/python/mozlint/test/linters/invalid_support_files.yml
@@ -0,0 +1,6 @@
+---
+BadSupportFilesLinter:
+ description: Has an invalid support files directive.
+ support-files: should be a list
+ type: string
+ payload: foobar
diff --git a/python/mozlint/test/linters/invalid_type.yml b/python/mozlint/test/linters/invalid_type.yml
new file mode 100644
index 0000000000..29d82e541e
--- /dev/null
+++ b/python/mozlint/test/linters/invalid_type.yml
@@ -0,0 +1,5 @@
+---
+BadTypeLinter:
+ description: Has an invalid type.
+ type: invalid
+ payload: foobar
diff --git a/python/mozlint/test/linters/missing_attrs.yml b/python/mozlint/test/linters/missing_attrs.yml
new file mode 100644
index 0000000000..5abe15fcfc
--- /dev/null
+++ b/python/mozlint/test/linters/missing_attrs.yml
@@ -0,0 +1,3 @@
+---
+MissingAttrsLinter:
+ description: Missing type and payload
diff --git a/python/mozlint/test/linters/missing_definition.yml b/python/mozlint/test/linters/missing_definition.yml
new file mode 100644
index 0000000000..d66b2cb781
--- /dev/null
+++ b/python/mozlint/test/linters/missing_definition.yml
@@ -0,0 +1 @@
+# No definition
diff --git a/python/mozlint/test/linters/multiple.yml b/python/mozlint/test/linters/multiple.yml
new file mode 100644
index 0000000000..5b880b3691
--- /dev/null
+++ b/python/mozlint/test/linters/multiple.yml
@@ -0,0 +1,19 @@
+---
+StringLinter:
+ description: >-
+ Make sure the string foobar never appears in browser js
+ files because it is bad
+ rule: no-foobar
+ extensions: ['.js', 'jsm']
+ type: string
+ payload: foobar
+
+---
+RegexLinter:
+ description: >-
+ Make sure the string foobar never appears in browser js
+ files because it is bad
+ rule: no-foobar
+ extensions: ['.js', 'jsm']
+ type: regex
+ payload: foobar
diff --git a/python/mozlint/test/linters/non_existing_exclude.yml b/python/mozlint/test/linters/non_existing_exclude.yml
new file mode 100644
index 0000000000..8190123027
--- /dev/null
+++ b/python/mozlint/test/linters/non_existing_exclude.yml
@@ -0,0 +1,7 @@
+---
+BadExcludeLinter:
+ description: Has an invalid exclude directive.
+ exclude:
+ - files/does_not_exist
+ type: string
+ payload: foobar
diff --git a/python/mozlint/test/linters/non_existing_include.yml b/python/mozlint/test/linters/non_existing_include.yml
new file mode 100644
index 0000000000..5443d751ed
--- /dev/null
+++ b/python/mozlint/test/linters/non_existing_include.yml
@@ -0,0 +1,7 @@
+---
+BadIncludeLinter:
+ description: Has an invalid include directive.
+ include:
+ - files/does_not_exist
+ type: string
+ payload: foobar
diff --git a/python/mozlint/test/linters/non_existing_support_files.yml b/python/mozlint/test/linters/non_existing_support_files.yml
new file mode 100644
index 0000000000..e636fadf93
--- /dev/null
+++ b/python/mozlint/test/linters/non_existing_support_files.yml
@@ -0,0 +1,7 @@
+---
+BadSupportFilesLinter:
+ description: Has an invalid support-files directive.
+ support-files:
+ - files/does_not_exist
+ type: string
+ payload: foobar
diff --git a/python/mozlint/test/linters/raises.yml b/python/mozlint/test/linters/raises.yml
new file mode 100644
index 0000000000..9c0b234779
--- /dev/null
+++ b/python/mozlint/test/linters/raises.yml
@@ -0,0 +1,6 @@
+---
+RaisesLinter:
+ description: Raises an exception
+ include: ['.']
+ type: external
+ payload: external:raises
diff --git a/python/mozlint/test/linters/regex.yml b/python/mozlint/test/linters/regex.yml
new file mode 100644
index 0000000000..2c9c812428
--- /dev/null
+++ b/python/mozlint/test/linters/regex.yml
@@ -0,0 +1,10 @@
+---
+RegexLinter:
+ description: >-
+ Make sure the string foobar never appears in a js variable
+ file because it is bad.
+ rule: no-foobar
+ include: ['.']
+ extensions: ['js', '.jsm']
+ type: regex
+ payload: foobar
diff --git a/python/mozlint/test/linters/setup.yml b/python/mozlint/test/linters/setup.yml
new file mode 100644
index 0000000000..ac75d72c70
--- /dev/null
+++ b/python/mozlint/test/linters/setup.yml
@@ -0,0 +1,9 @@
+---
+SetupLinter:
+ description: It's bad to have the string foobar in js files.
+ include:
+ - files
+ type: external
+ extensions: ['.js', '.jsm']
+ payload: external:external
+ setup: external:setup
diff --git a/python/mozlint/test/linters/setupfailed.yml b/python/mozlint/test/linters/setupfailed.yml
new file mode 100644
index 0000000000..1e3543286f
--- /dev/null
+++ b/python/mozlint/test/linters/setupfailed.yml
@@ -0,0 +1,9 @@
+---
+SetupFailedLinter:
+ description: It's bad to have the string foobar in js files.
+ include:
+ - files
+ type: external
+ extensions: ['.js', '.jsm']
+ payload: external:external
+ setup: external:setupfailed
diff --git a/python/mozlint/test/linters/setupraised.yml b/python/mozlint/test/linters/setupraised.yml
new file mode 100644
index 0000000000..8c987f2d3c
--- /dev/null
+++ b/python/mozlint/test/linters/setupraised.yml
@@ -0,0 +1,9 @@
+---
+SetupRaisedLinter:
+ description: It's bad to have the string foobar in js files.
+ include:
+ - files
+ type: external
+ extensions: ['.js', '.jsm']
+ payload: external:external
+ setup: external:setupraised
diff --git a/python/mozlint/test/linters/slow.yml b/python/mozlint/test/linters/slow.yml
new file mode 100644
index 0000000000..2c47679010
--- /dev/null
+++ b/python/mozlint/test/linters/slow.yml
@@ -0,0 +1,8 @@
+---
+SlowLinter:
+ description: A linter that takes awhile to run
+ include:
+ - files
+ type: external
+ extensions: ['.js', '.jsm']
+ payload: external:slow
diff --git a/python/mozlint/test/linters/string.yml b/python/mozlint/test/linters/string.yml
new file mode 100644
index 0000000000..836d866ae2
--- /dev/null
+++ b/python/mozlint/test/linters/string.yml
@@ -0,0 +1,9 @@
+---
+StringLinter:
+ description: >-
+ Make sure the string foobar never appears in browser js
+ files because it is bad
+ rule: no-foobar
+ extensions: ['.js', 'jsm']
+ type: string
+ payload: foobar
diff --git a/python/mozlint/test/linters/structured.yml b/python/mozlint/test/linters/structured.yml
new file mode 100644
index 0000000000..01ef447ee3
--- /dev/null
+++ b/python/mozlint/test/linters/structured.yml
@@ -0,0 +1,8 @@
+---
+StructuredLinter:
+ description: "It's bad to have the string foobar in js files."
+ include:
+ - files
+ type: structured_log
+ extensions: ['.js', '.jsm']
+ payload: external:structured
diff --git a/python/mozlint/test/linters/support_files.yml b/python/mozlint/test/linters/support_files.yml
new file mode 100644
index 0000000000..0c278d51fa
--- /dev/null
+++ b/python/mozlint/test/linters/support_files.yml
@@ -0,0 +1,10 @@
+---
+SupportFilesLinter:
+ description: Linter that has a few support files
+ include:
+ - files
+ support-files:
+ - '**/*.py'
+ type: external
+ extensions: ['.js', '.jsm']
+ payload: external:passes
diff --git a/python/mozlint/test/linters/warning.yml b/python/mozlint/test/linters/warning.yml
new file mode 100644
index 0000000000..b86bfd07c7
--- /dev/null
+++ b/python/mozlint/test/linters/warning.yml
@@ -0,0 +1,11 @@
+---
+WarningLinter:
+ description: >-
+ Make sure the string foobar never appears in browser js
+ files because it is bad, but not too bad (just a warning)
+ rule: no-foobar
+ level: warning
+ include: ['.']
+ type: string
+ extensions: ['.js', 'jsm']
+ payload: foobar
diff --git a/python/mozlint/test/linters/warning_no_code_review.yml b/python/mozlint/test/linters/warning_no_code_review.yml
new file mode 100644
index 0000000000..20bfc0503b
--- /dev/null
+++ b/python/mozlint/test/linters/warning_no_code_review.yml
@@ -0,0 +1,12 @@
+---
+WarningNoCodeReviewLinter:
+ description: >-
+ Make sure the string foobar never appears in browser js
+ files because it is bad, but not too bad (just a warning)
+ rule: no-foobar-no-code-review
+ level: warning
+ include: ['.']
+ type: string
+ extensions: ['.js', 'jsm']
+ payload: foobar
+ code_review_warnings: false
diff --git a/python/mozlint/test/python.ini b/python/mozlint/test/python.ini
new file mode 100644
index 0000000000..5c2c11d73f
--- /dev/null
+++ b/python/mozlint/test/python.ini
@@ -0,0 +1,11 @@
+[DEFAULT]
+subsuite = mozlint
+
+[test_cli.py]
+[test_editor.py]
+[test_formatters.py]
+[test_parser.py]
+[test_pathutils.py]
+[test_result.py]
+[test_roller.py]
+[test_types.py]
diff --git a/python/mozlint/test/runcli.py b/python/mozlint/test/runcli.py
new file mode 100644
index 0000000000..be60a1da19
--- /dev/null
+++ b/python/mozlint/test/runcli.py
@@ -0,0 +1,17 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import sys
+
+from mozlint import cli
+
+here = os.path.abspath(os.path.dirname(__file__))
+
+if __name__ == "__main__":
+ parser = cli.MozlintParser()
+ args = vars(parser.parse_args(sys.argv[1:]))
+ args["root"] = here
+ args["config_paths"] = [os.path.join(here, "linters")]
+ sys.exit(cli.run(**args))
diff --git a/python/mozlint/test/test_cli.py b/python/mozlint/test/test_cli.py
new file mode 100644
index 0000000000..01aeaa74b4
--- /dev/null
+++ b/python/mozlint/test/test_cli.py
@@ -0,0 +1,127 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import subprocess
+import sys
+from distutils.spawn import find_executable
+
+import mozunit
+import pytest
+
+from mozlint import cli
+
+here = os.path.abspath(os.path.dirname(__file__))
+
+
+@pytest.fixture
+def parser():
+ return cli.MozlintParser()
+
+
+@pytest.fixture
+def run(parser, lintdir, files):
+ def inner(args=None):
+ args = args or []
+ args.extend(files)
+ lintargs = vars(parser.parse_args(args))
+ lintargs["root"] = here
+ lintargs["config_paths"] = [os.path.join(here, "linters")]
+ return cli.run(**lintargs)
+
+ return inner
+
+
+def test_cli_with_ascii_encoding(run, monkeypatch, capfd):
+ cmd = [sys.executable, "runcli.py", "-l=string", "-f=stylish", "files/foobar.js"]
+ env = os.environ.copy()
+ env["PYTHONPATH"] = os.pathsep.join(sys.path)
+ env["PYTHONIOENCODING"] = "ascii"
+ proc = subprocess.Popen(
+ cmd,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ cwd=here,
+ env=env,
+ universal_newlines=True,
+ )
+ out = proc.communicate()[0]
+ assert "Traceback" not in out
+
+
+def test_cli_run_with_fix(run, capfd):
+ ret = run(["-f", "json", "--fix", "--linter", "external"])
+ out, err = capfd.readouterr()
+ assert ret == 0
+ assert out.endswith("{}\n")
+
+
+@pytest.mark.skipif(not find_executable("echo"), reason="No `echo` executable found.")
+def test_cli_run_with_edit(run, parser, capfd):
+ os.environ["EDITOR"] = "echo"
+
+ ret = run(["-f", "compact", "--edit", "--linter", "external"])
+ out, err = capfd.readouterr()
+ out = out.splitlines()
+ assert ret == 1
+ assert out[0].endswith("foobar.js") # from the `echo` editor
+ assert "foobar.js: line 1, col 1, Error" in out[1]
+ assert "foobar.js: line 2, col 1, Error" in out[2]
+ assert "2 problems" in out[-1]
+ assert len(out) == 5
+
+ del os.environ["EDITOR"]
+ with pytest.raises(SystemExit):
+ parser.parse_args(["--edit"])
+
+
+def test_cli_run_with_setup(run, capfd):
+ # implicitly call setup
+ ret = run(["-l", "setup", "-l", "setupfailed", "-l", "setupraised"])
+ out, err = capfd.readouterr()
+ assert "setup passed" in out
+ assert "setup failed" in out
+ assert "setup raised" in out
+ assert ret == 1
+
+ # explicitly call setup
+ ret = run(["-l", "setup", "-l", "setupfailed", "-l", "setupraised", "--setup"])
+ out, err = capfd.readouterr()
+ assert "setup passed" in out
+ assert "setup failed" in out
+ assert "setup raised" in out
+ assert ret == 1
+
+
+def test_cli_for_exclude_list(run, monkeypatch, capfd):
+ ret = run(["-l", "excludes", "--check-exclude-list"])
+ out, err = capfd.readouterr()
+
+ assert "**/foobar.js" in out
+ assert (
+ "The following list of paths are now green and can be removed from the exclude list:"
+ in out
+ )
+
+ ret = run(["-l", "excludes_empty", "--check-exclude-list"])
+ out, err = capfd.readouterr()
+
+ assert "No path in the exclude list is green." in out
+ assert ret == 1
+
+
+def test_cli_run_with_wrong_linters(run, capfd):
+
+ run(["-l", "external", "-l", "foobar"])
+ out, err = capfd.readouterr()
+
+ # Check if it identifes foobar as invalid linter
+ assert "A failure occurred in the foobar linter." in out
+
+ # Check for exception message
+ assert "Invalid linters given, run again using valid linters or no linters" in out
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozlint/test/test_editor.py b/python/mozlint/test/test_editor.py
new file mode 100644
index 0000000000..7a15a613a6
--- /dev/null
+++ b/python/mozlint/test/test_editor.py
@@ -0,0 +1,92 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import subprocess
+
+import mozunit
+import pytest
+
+from mozlint import editor
+from mozlint.result import Issue, ResultSummary
+
+here = os.path.abspath(os.path.dirname(__file__))
+
+
+@pytest.fixture
+def capture_commands(monkeypatch):
+ def inner(commands):
+ def fake_subprocess_call(*args, **kwargs):
+ commands.append(args[0])
+
+ monkeypatch.setattr(subprocess, "call", fake_subprocess_call)
+
+ return inner
+
+
+@pytest.fixture
+def result():
+ result = ResultSummary("/fake/root")
+ result.issues["foo.py"].extend(
+ [
+ Issue(
+ linter="no-foobar",
+ path="foo.py",
+ lineno=1,
+ message="Oh no!",
+ ),
+ Issue(
+ linter="no-foobar",
+ path="foo.py",
+ lineno=3,
+ column=10,
+ message="To Yuma!",
+ ),
+ ]
+ )
+ return result
+
+
+def test_no_editor(monkeypatch, capture_commands, result):
+ commands = []
+ capture_commands(commands)
+
+ monkeypatch.delenv("EDITOR", raising=False)
+ editor.edit_issues(result)
+ assert commands == []
+
+
+def test_no_issues(monkeypatch, capture_commands, result):
+ commands = []
+ capture_commands(commands)
+
+ monkeypatch.setenv("EDITOR", "generic")
+ result.issues = {}
+ editor.edit_issues(result)
+ assert commands == []
+
+
+def test_vim(monkeypatch, capture_commands, result):
+ commands = []
+ capture_commands(commands)
+
+ monkeypatch.setenv("EDITOR", "vim")
+ editor.edit_issues(result)
+ assert len(commands) == 1
+ assert commands[0][0] == "vim"
+
+
+def test_generic(monkeypatch, capture_commands, result):
+ commands = []
+ capture_commands(commands)
+
+ monkeypatch.setenv("EDITOR", "generic")
+ editor.edit_issues(result)
+ assert len(commands) == len(result.issues)
+ assert all(c[0] == "generic" for c in commands)
+ assert all("foo.py" in c for c in commands)
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozlint/test/test_formatters.py b/python/mozlint/test/test_formatters.py
new file mode 100644
index 0000000000..5a276a1c23
--- /dev/null
+++ b/python/mozlint/test/test_formatters.py
@@ -0,0 +1,141 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import json
+
+import attr
+import mozpack.path as mozpath
+import mozunit
+import pytest
+
+from mozlint import formatters
+from mozlint.result import Issue, ResultSummary
+
+NORMALISED_PATHS = {
+ "abc": mozpath.normpath("a/b/c.txt"),
+ "def": mozpath.normpath("d/e/f.txt"),
+ "root": mozpath.abspath("/fake/root"),
+}
+
+EXPECTED = {
+ "compact": {
+ "kwargs": {},
+ "format": """
+/fake/root/a/b/c.txt: line 1, Error - oh no foo (foo)
+/fake/root/a/b/c.txt: line 4, col 10, Error - oh no baz (baz)
+/fake/root/a/b/c.txt: line 5, Error - oh no foo-diff (foo-diff)
+/fake/root/d/e/f.txt: line 4, col 2, Warning - oh no bar (bar-not-allowed)
+
+4 problems
+""".strip(),
+ },
+ "stylish": {
+ "kwargs": {"disable_colors": True},
+ "format": """
+/fake/root/a/b/c.txt
+ 1 error oh no foo (foo)
+ 4:10 error oh no baz (baz)
+ 5 error oh no foo-diff (foo-diff)
+ diff 1
+ - hello
+ + hello2
+
+/fake/root/d/e/f.txt
+ 4:2 warning oh no bar bar-not-allowed (bar)
+
+\u2716 4 problems (3 errors, 1 warning, 0 fixed)
+""".strip(),
+ },
+ "treeherder": {
+ "kwargs": {},
+ "format": """
+TEST-UNEXPECTED-ERROR | /fake/root/a/b/c.txt:1 | oh no foo (foo)
+TEST-UNEXPECTED-ERROR | /fake/root/a/b/c.txt:4:10 | oh no baz (baz)
+TEST-UNEXPECTED-ERROR | /fake/root/a/b/c.txt:5 | oh no foo-diff (foo-diff)
+TEST-UNEXPECTED-WARNING | /fake/root/d/e/f.txt:4:2 | oh no bar (bar-not-allowed)
+""".strip(),
+ },
+ "unix": {
+ "kwargs": {},
+ "format": """
+{abc}:1: foo error: oh no foo
+{abc}:4:10: baz error: oh no baz
+{abc}:5: foo-diff error: oh no foo-diff
+{def}:4:2: bar-not-allowed warning: oh no bar
+""".format(
+ **NORMALISED_PATHS
+ ).strip(),
+ },
+ "summary": {
+ "kwargs": {},
+ "format": """
+{root}/a: 3 errors
+{root}/d: 0 errors, 1 warning
+""".format(
+ **NORMALISED_PATHS
+ ).strip(),
+ },
+}
+
+
+@pytest.fixture
+def result(scope="module"):
+ result = ResultSummary("/fake/root")
+ containers = (
+ Issue(linter="foo", path="a/b/c.txt", message="oh no foo", lineno=1),
+ Issue(
+ linter="bar",
+ path="d/e/f.txt",
+ message="oh no bar",
+ hint="try baz instead",
+ level="warning",
+ lineno="4",
+ column="2",
+ rule="bar-not-allowed",
+ ),
+ Issue(
+ linter="baz",
+ path="a/b/c.txt",
+ message="oh no baz",
+ lineno=4,
+ column=10,
+ source="if baz:",
+ ),
+ Issue(
+ linter="foo-diff",
+ path="a/b/c.txt",
+ message="oh no foo-diff",
+ lineno=5,
+ source="if baz:",
+ diff="diff 1\n- hello\n+ hello2",
+ ),
+ )
+ result = ResultSummary("/fake/root")
+ for c in containers:
+ result.issues[c.path].append(c)
+ return result
+
+
+@pytest.mark.parametrize("name", EXPECTED.keys())
+def test_formatters(result, name):
+ opts = EXPECTED[name]
+ fmt = formatters.get(name, **opts["kwargs"])
+ # encoding to str bypasses a UnicodeEncodeError in pytest
+ assert fmt(result) == opts["format"]
+
+
+def test_json_formatter(result):
+ fmt = formatters.get("json")
+ formatted = json.loads(fmt(result))
+
+ assert set(formatted.keys()) == set(result.issues.keys())
+
+ attrs = attr.fields(Issue)
+ for errors in formatted.values():
+ for err in errors:
+ assert all(a.name in err for a in attrs)
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozlint/test/test_parser.py b/python/mozlint/test/test_parser.py
new file mode 100644
index 0000000000..2fbf26c8e5
--- /dev/null
+++ b/python/mozlint/test/test_parser.py
@@ -0,0 +1,80 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+
+import mozunit
+import pytest
+
+from mozlint.errors import LinterNotFound, LinterParseError
+from mozlint.parser import Parser
+
+here = os.path.abspath(os.path.dirname(__file__))
+
+
+@pytest.fixture(scope="module")
+def parse(lintdir):
+ parser = Parser(here)
+
+ def _parse(name):
+ path = os.path.join(lintdir, name)
+ return parser(path)
+
+ return _parse
+
+
+def test_parse_valid_linter(parse):
+ lintobj = parse("string.yml")
+ assert isinstance(lintobj, list)
+ assert len(lintobj) == 1
+
+ lintobj = lintobj[0]
+ assert isinstance(lintobj, dict)
+ assert "name" in lintobj
+ assert "description" in lintobj
+ assert "type" in lintobj
+ assert "payload" in lintobj
+ assert "extensions" in lintobj
+ assert "include" in lintobj
+ assert lintobj["include"] == ["."]
+ assert set(lintobj["extensions"]) == set(["js", "jsm"])
+
+
+def test_parser_valid_multiple(parse):
+ lintobj = parse("multiple.yml")
+ assert isinstance(lintobj, list)
+ assert len(lintobj) == 2
+
+ assert lintobj[0]["name"] == "StringLinter"
+ assert lintobj[1]["name"] == "RegexLinter"
+
+
+@pytest.mark.parametrize(
+ "linter",
+ [
+ "invalid_type.yml",
+ "invalid_extension.ym",
+ "invalid_include.yml",
+ "invalid_include_with_glob.yml",
+ "invalid_exclude.yml",
+ "invalid_support_files.yml",
+ "missing_attrs.yml",
+ "missing_definition.yml",
+ "non_existing_include.yml",
+ "non_existing_exclude.yml",
+ "non_existing_support_files.yml",
+ ],
+)
+def test_parse_invalid_linter(parse, linter):
+ with pytest.raises(LinterParseError):
+ parse(linter)
+
+
+def test_parse_non_existent_linter(parse):
+ with pytest.raises(LinterNotFound):
+ parse("missing_file.lint")
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozlint/test/test_pathutils.py b/python/mozlint/test/test_pathutils.py
new file mode 100644
index 0000000000..78f7883e88
--- /dev/null
+++ b/python/mozlint/test/test_pathutils.py
@@ -0,0 +1,166 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+from fnmatch import fnmatch
+
+import mozunit
+import pytest
+
+from mozlint import pathutils
+
+here = os.path.abspath(os.path.dirname(__file__))
+root = os.path.join(here, "filter")
+
+
+def assert_paths(a, b):
+ def normalize(p):
+ if not os.path.isabs(p):
+ p = os.path.join(root, p)
+ return os.path.normpath(p)
+
+ assert set(map(normalize, a)) == set(map(normalize, b))
+
+
+@pytest.mark.parametrize(
+ "test",
+ (
+ {
+ "paths": ["a.js", "subdir1/subdir3/d.js"],
+ "include": ["."],
+ "exclude": ["subdir1"],
+ "expected": ["a.js"],
+ },
+ {
+ "paths": ["a.js", "subdir1/subdir3/d.js"],
+ "include": ["subdir1/subdir3"],
+ "exclude": ["subdir1"],
+ "expected": ["subdir1/subdir3/d.js"],
+ },
+ {
+ "paths": ["."],
+ "include": ["."],
+ "exclude": ["**/c.py", "subdir1/subdir3"],
+ "extensions": ["py"],
+ "expected": ["."],
+ "expected_exclude": ["subdir2/c.py", "subdir1/subdir3"],
+ },
+ {
+ "paths": [
+ "a.py",
+ "a.js",
+ "subdir1/b.py",
+ "subdir2/c.py",
+ "subdir1/subdir3/d.py",
+ ],
+ "include": ["."],
+ "exclude": ["**/c.py", "subdir1/subdir3"],
+ "extensions": ["py"],
+ "expected": ["a.py", "subdir1/b.py"],
+ },
+ {
+ "paths": ["a.py", "a.js", "subdir2"],
+ "include": ["."],
+ "exclude": [],
+ "extensions": ["py"],
+ "expected": ["a.py", "subdir2"],
+ },
+ {
+ "paths": ["subdir1"],
+ "include": ["."],
+ "exclude": ["subdir1/subdir3"],
+ "extensions": ["py"],
+ "expected": ["subdir1"],
+ "expected_exclude": ["subdir1/subdir3"],
+ },
+ {
+ "paths": ["docshell"],
+ "include": ["docs"],
+ "exclude": [],
+ "expected": [],
+ },
+ {
+ "paths": ["does/not/exist"],
+ "include": ["."],
+ "exclude": [],
+ "expected": [],
+ },
+ ),
+)
+def test_filterpaths(test):
+ expected = test.pop("expected")
+ expected_exclude = test.pop("expected_exclude", [])
+
+ paths, exclude = pathutils.filterpaths(root, **test)
+ assert_paths(paths, expected)
+ assert_paths(exclude, expected_exclude)
+
+
+@pytest.mark.parametrize(
+ "test",
+ (
+ {
+ "paths": ["subdir1/b.js"],
+ "config": {
+ "exclude": ["subdir1"],
+ "extensions": "js",
+ },
+ "expected": [],
+ },
+ {
+ "paths": ["subdir1/subdir3"],
+ "config": {
+ "exclude": ["subdir1"],
+ "extensions": "js",
+ },
+ "expected": [],
+ },
+ ),
+)
+def test_expand_exclusions(test):
+ expected = test.pop("expected", [])
+
+ paths = list(pathutils.expand_exclusions(test["paths"], test["config"], root))
+ assert_paths(paths, expected)
+
+
+@pytest.mark.parametrize(
+ "paths,expected",
+ [
+ (["subdir1/*"], ["subdir1"]),
+ (["subdir2/*"], ["subdir2"]),
+ (["subdir1/*.*", "subdir1/subdir3/*", "subdir2/*"], ["subdir1", "subdir2"]),
+ ([root + "/*", "subdir1/*.*", "subdir1/subdir3/*", "subdir2/*"], [root]),
+ (["subdir1/b.py", "subdir1/subdir3"], ["subdir1/b.py", "subdir1/subdir3"]),
+ (["subdir1/b.py", "subdir1/b.js"], ["subdir1/b.py", "subdir1/b.js"]),
+ (["subdir1/subdir3"], ["subdir1/subdir3"]),
+ (
+ [
+ "foo",
+ "foobar",
+ ],
+ ["foo", "foobar"],
+ ),
+ ],
+)
+def test_collapse(paths, expected):
+ os.chdir(root)
+
+ inputs = []
+ for path in paths:
+ base, name = os.path.split(path)
+ if "*" in name:
+ for n in os.listdir(base):
+ if not fnmatch(n, name):
+ continue
+ inputs.append(os.path.join(base, n))
+ else:
+ inputs.append(path)
+
+ print("inputs: {}".format(inputs))
+ assert_paths(pathutils.collapse(inputs), expected)
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozlint/test/test_result.py b/python/mozlint/test/test_result.py
new file mode 100644
index 0000000000..02e8156b3c
--- /dev/null
+++ b/python/mozlint/test/test_result.py
@@ -0,0 +1,26 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import mozunit
+
+from mozlint.result import Issue, ResultSummary
+
+
+def test_issue_defaults():
+ ResultSummary.root = "/fake/root"
+
+ issue = Issue(linter="linter", path="path", message="message", lineno=None)
+ assert issue.lineno == 0
+ assert issue.column is None
+ assert issue.level == "error"
+
+ issue = Issue(
+ linter="linter", path="path", message="message", lineno="1", column="2"
+ )
+ assert issue.lineno == 1
+ assert issue.column == 2
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozlint/test/test_roller.py b/python/mozlint/test/test_roller.py
new file mode 100644
index 0000000000..2918047cd2
--- /dev/null
+++ b/python/mozlint/test/test_roller.py
@@ -0,0 +1,396 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import platform
+import signal
+import subprocess
+import sys
+import time
+from itertools import chain
+
+import mozunit
+import pytest
+
+from mozlint.errors import LintersNotConfigured, NoValidLinter
+from mozlint.result import Issue, ResultSummary
+from mozlint.roller import LintRoller
+
+here = os.path.abspath(os.path.dirname(__file__))
+
+
+def test_roll_no_linters_configured(lint, files):
+ with pytest.raises(LintersNotConfigured):
+ lint.roll(files)
+
+
+def test_roll_successful(lint, linters, files):
+ lint.read(linters("string", "regex", "external"))
+
+ result = lint.roll(files)
+ assert len(result.issues) == 1
+ assert result.failed == set([])
+
+ path = list(result.issues.keys())[0]
+ assert os.path.basename(path) == "foobar.js"
+
+ errors = result.issues[path]
+ assert isinstance(errors, list)
+ assert len(errors) == 6
+
+ container = errors[0]
+ assert isinstance(container, Issue)
+ assert container.rule == "no-foobar"
+
+
+def test_roll_from_subdir(lint, linters):
+ lint.read(linters("string", "regex", "external"))
+
+ oldcwd = os.getcwd()
+ try:
+ os.chdir(os.path.join(lint.root, "files"))
+
+ # Path relative to cwd works
+ result = lint.roll("foobar.js")
+ assert len(result.issues) == 1
+ assert len(result.failed) == 0
+ assert result.returncode == 1
+
+ # Path relative to root doesn't work
+ result = lint.roll(os.path.join("files", "foobar.js"))
+ assert len(result.issues) == 0
+ assert len(result.failed) == 0
+ assert result.returncode == 0
+
+ # Paths from vcs are always joined to root instead of cwd
+ lint.mock_vcs([os.path.join("files", "foobar.js")])
+ result = lint.roll(outgoing=True)
+ assert len(result.issues) == 1
+ assert len(result.failed) == 0
+ assert result.returncode == 1
+
+ result = lint.roll(workdir=True)
+ assert len(result.issues) == 1
+ assert len(result.failed) == 0
+ assert result.returncode == 1
+
+ result = lint.roll(rev='not public() and keyword("dummy revset expression")')
+ assert len(result.issues) == 1
+ assert len(result.failed) == 0
+ assert result.returncode == 1
+ finally:
+ os.chdir(oldcwd)
+
+
+def test_roll_catch_exception(lint, linters, files, capfd):
+ lint.read(linters("raises"))
+
+ lint.roll(files) # assert not raises
+ out, err = capfd.readouterr()
+ assert "LintException" in err
+
+
+def test_roll_with_global_excluded_path(lint, linters, files):
+ lint.exclude = ["**/foobar.js"]
+ lint.read(linters("string", "regex", "external"))
+ result = lint.roll(files)
+
+ assert len(result.issues) == 0
+ assert result.failed == set([])
+
+
+def test_roll_with_local_excluded_path(lint, linters, files):
+ lint.read(linters("excludes"))
+ result = lint.roll(files)
+
+ assert "**/foobar.js" in lint.linters[0]["local_exclude"]
+ assert len(result.issues) == 0
+ assert result.failed == set([])
+
+
+def test_roll_with_no_files_to_lint(lint, linters, capfd):
+ lint.read(linters("string", "regex", "external"))
+ lint.mock_vcs([])
+ result = lint.roll([], workdir=True)
+ assert isinstance(result, ResultSummary)
+ assert len(result.issues) == 0
+ assert len(result.failed) == 0
+
+ out, err = capfd.readouterr()
+ assert "warning: no files linted" in out
+
+
+def test_roll_with_invalid_extension(lint, linters, filedir):
+ lint.read(linters("external"))
+ result = lint.roll(os.path.join(filedir, "foobar.py"))
+ assert len(result.issues) == 0
+ assert result.failed == set([])
+
+
+def test_roll_with_failure_code(lint, linters, files):
+ lint.read(linters("badreturncode"))
+
+ result = lint.roll(files, num_procs=1)
+ assert len(result.issues) == 0
+ assert result.failed == set(["BadReturnCodeLinter"])
+
+
+def test_roll_warnings(lint, linters, files):
+ lint.read(linters("warning"))
+ result = lint.roll(files)
+ assert len(result.issues) == 0
+ assert result.total_issues == 0
+ assert len(result.suppressed_warnings) == 1
+ assert result.total_suppressed_warnings == 2
+
+ lint.lintargs["show_warnings"] = True
+ result = lint.roll(files)
+ assert len(result.issues) == 1
+ assert result.total_issues == 2
+ assert len(result.suppressed_warnings) == 0
+ assert result.total_suppressed_warnings == 0
+
+
+def test_roll_code_review(monkeypatch, linters, files):
+ monkeypatch.setenv("CODE_REVIEW", "1")
+ lint = LintRoller(root=here, show_warnings=False)
+ lint.read(linters("warning"))
+ result = lint.roll(files)
+ assert len(result.issues) == 1
+ assert result.total_issues == 2
+ assert len(result.suppressed_warnings) == 0
+ assert result.total_suppressed_warnings == 0
+ assert result.returncode == 1
+
+
+def test_roll_code_review_warnings_disabled(monkeypatch, linters, files):
+ monkeypatch.setenv("CODE_REVIEW", "1")
+ lint = LintRoller(root=here, show_warnings=False)
+ lint.read(linters("warning_no_code_review"))
+ result = lint.roll(files)
+ assert len(result.issues) == 0
+ assert result.total_issues == 0
+ assert lint.result.fail_on_warnings is True
+ assert len(result.suppressed_warnings) == 1
+ assert result.total_suppressed_warnings == 2
+ assert result.returncode == 0
+
+
+def test_roll_code_review_warnings_soft(linters, files):
+ lint = LintRoller(root=here, show_warnings="soft")
+ lint.read(linters("warning_no_code_review"))
+ result = lint.roll(files)
+ assert len(result.issues) == 1
+ assert result.total_issues == 2
+ assert lint.result.fail_on_warnings is False
+ assert len(result.suppressed_warnings) == 0
+ assert result.total_suppressed_warnings == 0
+ assert result.returncode == 0
+
+
+def fake_run_worker(config, paths, **lintargs):
+ result = ResultSummary(lintargs["root"])
+ result.issues["count"].append(1)
+ return result
+
+
+@pytest.mark.skipif(
+ platform.system() == "Windows",
+ reason="monkeypatch issues with multiprocessing on Windows",
+)
+@pytest.mark.parametrize("num_procs", [1, 4, 8, 16])
+def test_number_of_jobs(monkeypatch, lint, linters, files, num_procs):
+ monkeypatch.setattr(sys.modules[lint.__module__], "_run_worker", fake_run_worker)
+
+ linters = linters("string", "regex", "external")
+ lint.read(linters)
+ num_jobs = len(lint.roll(files, num_procs=num_procs).issues["count"])
+
+ if len(files) >= num_procs:
+ assert num_jobs == num_procs * len(linters)
+ else:
+ assert num_jobs == len(files) * len(linters)
+
+
+@pytest.mark.skipif(
+ platform.system() == "Windows",
+ reason="monkeypatch issues with multiprocessing on Windows",
+)
+@pytest.mark.parametrize("max_paths,expected_jobs", [(1, 12), (4, 6), (16, 6)])
+def test_max_paths_per_job(monkeypatch, lint, linters, files, max_paths, expected_jobs):
+ monkeypatch.setattr(sys.modules[lint.__module__], "_run_worker", fake_run_worker)
+
+ files = files[:4]
+ assert len(files) == 4
+
+ linters = linters("string", "regex", "external")[:3]
+ assert len(linters) == 3
+
+ lint.MAX_PATHS_PER_JOB = max_paths
+ lint.read(linters)
+ num_jobs = len(lint.roll(files, num_procs=2).issues["count"])
+ assert num_jobs == expected_jobs
+
+
+@pytest.mark.skipif(
+ platform.system() == "Windows",
+ reason="monkeypatch issues with multiprocessing on Windows",
+)
+@pytest.mark.parametrize("num_procs", [1, 4, 8, 16])
+def test_number_of_jobs_global(monkeypatch, lint, linters, files, num_procs):
+ monkeypatch.setattr(sys.modules[lint.__module__], "_run_worker", fake_run_worker)
+
+ linters = linters("global")
+ lint.read(linters)
+ num_jobs = len(lint.roll(files, num_procs=num_procs).issues["count"])
+
+ assert num_jobs == 1
+
+
+@pytest.mark.skipif(
+ platform.system() == "Windows",
+ reason="monkeypatch issues with multiprocessing on Windows",
+)
+@pytest.mark.parametrize("max_paths", [1, 4, 16])
+def test_max_paths_per_job_global(monkeypatch, lint, linters, files, max_paths):
+ monkeypatch.setattr(sys.modules[lint.__module__], "_run_worker", fake_run_worker)
+
+ files = files[:4]
+ assert len(files) == 4
+
+ linters = linters("global")[:1]
+ assert len(linters) == 1
+
+ lint.MAX_PATHS_PER_JOB = max_paths
+ lint.read(linters)
+ num_jobs = len(lint.roll(files, num_procs=2).issues["count"])
+ assert num_jobs == 1
+
+
+@pytest.mark.skipif(
+ platform.system() == "Windows",
+ reason="signal.CTRL_C_EVENT isn't causing a KeyboardInterrupt on Windows",
+)
+def test_keyboard_interrupt():
+ # We use two linters so we'll have two jobs. One (string.yml) will complete
+ # quickly. The other (slow.yml) will run slowly. This way the first worker
+ # will be be stuck blocking on the ProcessPoolExecutor._call_queue when the
+ # signal arrives and the other still be doing work.
+ cmd = [sys.executable, "runcli.py", "-l=string", "-l=slow", "files/foobar.js"]
+ env = os.environ.copy()
+ env["PYTHONPATH"] = os.pathsep.join(sys.path)
+ proc = subprocess.Popen(
+ cmd,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ cwd=here,
+ env=env,
+ universal_newlines=True,
+ )
+ time.sleep(1)
+ proc.send_signal(signal.SIGINT)
+
+ out = proc.communicate()[0]
+ print(out)
+ assert "warning: not all files were linted" in out
+ assert "2 problems" in out
+ assert "Traceback" not in out
+
+
+def test_support_files(lint, linters, filedir, monkeypatch, files):
+ jobs = []
+
+ # Replace the original _generate_jobs with a new one that simply
+ # adds jobs to a list (and then doesn't return anything).
+ orig_generate_jobs = lint._generate_jobs
+
+ def fake_generate_jobs(*args, **kwargs):
+ jobs.extend([job[1] for job in orig_generate_jobs(*args, **kwargs)])
+ return []
+
+ monkeypatch.setattr(lint, "_generate_jobs", fake_generate_jobs)
+
+ linter_path = linters("support_files")[0]
+ lint.read(linter_path)
+ lint.root = filedir
+
+ # Modified support files only lint entire root if --outgoing or --workdir
+ # are used.
+ path = os.path.join(filedir, "foobar.js")
+ vcs_path = os.path.join(filedir, "foobar.py")
+
+ lint.mock_vcs([vcs_path])
+ lint.roll(path)
+ actual_files = sorted(chain(*jobs))
+ assert actual_files == [path]
+
+ expected_files = sorted(files)
+
+ jobs = []
+ lint.roll(path, workdir=True)
+ actual_files = sorted(chain(*jobs))
+ assert actual_files == expected_files
+
+ jobs = []
+ lint.roll(path, outgoing=True)
+ actual_files = sorted(chain(*jobs))
+ assert actual_files == expected_files
+
+ jobs = []
+ lint.roll(path, rev='draft() and keyword("dummy revset expression")')
+ actual_files = sorted(chain(*jobs))
+ assert actual_files == expected_files
+
+ # Lint config file is implicitly added as a support file
+ lint.mock_vcs([linter_path])
+ jobs = []
+ lint.roll(path, outgoing=True, workdir=True)
+ actual_files = sorted(chain(*jobs))
+ assert actual_files == expected_files
+
+ # Avoid linting the entire root when `--fix` is passed.
+ lint.mock_vcs([vcs_path])
+ lint.lintargs["fix"] = True
+
+ jobs = []
+ lint.roll(path, outgoing=True)
+ actual_files = sorted(chain(*jobs))
+ assert actual_files == sorted([path, vcs_path]), (
+ "`--fix` with `--outgoing` on a `support-files` change should "
+ "avoid linting the entire root."
+ )
+
+ jobs = []
+ lint.roll(path, workdir=True)
+ actual_files = sorted(chain(*jobs))
+ assert actual_files == sorted([path, vcs_path]), (
+ "`--fix` with `--workdir` on a `support-files` change should "
+ "avoid linting the entire root."
+ )
+
+ jobs = []
+ lint.roll(path, rev='draft() and keyword("dummy revset expression")')
+ actual_files = sorted(chain(*jobs))
+ assert actual_files == sorted([path, vcs_path]), (
+ "`--fix` with `--rev` on a `support-files` change should "
+ "avoid linting the entire root."
+ )
+
+
+def test_setup(lint, linters, filedir, capfd):
+ with pytest.raises(NoValidLinter):
+ lint.setup()
+
+ lint.read(linters("setup", "setupfailed", "setupraised"))
+ lint.setup()
+ out, err = capfd.readouterr()
+ assert "setup passed" in out
+ assert "setup failed" in out
+ assert "setup raised" in out
+ assert "error: problem with lint setup, skipping" in out
+ assert lint.result.failed_setup == set(["SetupFailedLinter", "SetupRaisedLinter"])
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozlint/test/test_types.py b/python/mozlint/test/test_types.py
new file mode 100644
index 0000000000..6ed78747b7
--- /dev/null
+++ b/python/mozlint/test/test_types.py
@@ -0,0 +1,84 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+
+import mozpack.path as mozpath
+import mozunit
+import pytest
+
+from mozlint.result import Issue, ResultSummary
+
+
+@pytest.fixture
+def path(filedir):
+ def _path(name):
+ return mozpath.join(filedir, name)
+
+ return _path
+
+
+@pytest.fixture(
+ params=[
+ "external.yml",
+ "global.yml",
+ "regex.yml",
+ "string.yml",
+ "structured.yml",
+ ]
+)
+def linter(lintdir, request):
+ return os.path.join(lintdir, request.param)
+
+
+def test_linter_types(lint, linter, files, path):
+ lint.read(linter)
+ result = lint.roll(files)
+ assert isinstance(result, ResultSummary)
+ assert isinstance(result.issues, dict)
+ assert path("foobar.js") in result.issues
+ assert path("no_foobar.js") not in result.issues
+
+ issue = result.issues[path("foobar.js")][0]
+ assert isinstance(issue, Issue)
+
+ name = os.path.basename(linter).split(".")[0]
+ assert issue.linter.lower().startswith(name)
+
+
+def test_linter_missing_files(lint, linter, filedir):
+ # Missing files should be caught by `mozlint.cli`, so the only way they
+ # could theoretically happen is if they show up from versioncontrol. So
+ # let's just make sure they get ignored.
+ lint.read(linter)
+ files = [
+ os.path.join(filedir, "missing.js"),
+ os.path.join(filedir, "missing.py"),
+ ]
+ result = lint.roll(files)
+ assert result.returncode == 0
+
+ lint.mock_vcs(files)
+ result = lint.roll(outgoing=True)
+ assert result.returncode == 0
+
+
+def test_no_filter(lint, lintdir, files):
+ lint.read(os.path.join(lintdir, "explicit_path.yml"))
+ result = lint.roll(files)
+ assert len(result.issues) == 0
+
+ lint.lintargs["use_filters"] = False
+ result = lint.roll(files)
+ assert len(result.issues) == 3
+
+
+def test_global_skipped(lint, lintdir, files):
+ lint.read(os.path.join(lintdir, "global_skipped.yml"))
+ result = lint.roll(files)
+ assert len(result.issues) == 0
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozperftest/.ruff.toml b/python/mozperftest/.ruff.toml
new file mode 100644
index 0000000000..10338e3dbc
--- /dev/null
+++ b/python/mozperftest/.ruff.toml
@@ -0,0 +1,4 @@
+extend = "../../pyproject.toml"
+
+[isort]
+known-first-party = ["mozperftest"]
diff --git a/python/mozperftest/README.rst b/python/mozperftest/README.rst
new file mode 100644
index 0000000000..383530146b
--- /dev/null
+++ b/python/mozperftest/README.rst
@@ -0,0 +1,6 @@
+===========
+mozperftest
+===========
+
+**mozperftest** can be used to run performance tests against browsers.
+See the docs directory.
diff --git a/python/mozperftest/mozperftest/.coveragerc b/python/mozperftest/mozperftest/.coveragerc
new file mode 100644
index 0000000000..a3cdafa52c
--- /dev/null
+++ b/python/mozperftest/mozperftest/.coveragerc
@@ -0,0 +1,10 @@
+[run]
+omit =
+ python/mozperftest/mozperftest/tests/*
+include =
+ python/mozperftest/*
+
+[report]
+sort = Cover
+show_missing = True
+fail_under = 91
diff --git a/python/mozperftest/mozperftest/__init__.py b/python/mozperftest/mozperftest/__init__.py
new file mode 100644
index 0000000000..3d12258de5
--- /dev/null
+++ b/python/mozperftest/mozperftest/__init__.py
@@ -0,0 +1,13 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import mozlog
+
+from mozperftest.argparser import ( # noqa
+ PerftestArgumentParser,
+ PerftestToolsArgumentParser,
+)
+from mozperftest.environment import MachEnvironment # noqa
+from mozperftest.metadata import Metadata # noqa
+
+logger = mozlog.commandline.setup_logging("mozperftest", {})
diff --git a/python/mozperftest/mozperftest/argparser.py b/python/mozperftest/mozperftest/argparser.py
new file mode 100644
index 0000000000..3ed5aab647
--- /dev/null
+++ b/python/mozperftest/mozperftest/argparser.py
@@ -0,0 +1,475 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import copy
+import os
+from argparse import ArgumentParser, Namespace
+
+import mozlog
+
+here = os.path.abspath(os.path.dirname(__file__))
+try:
+ from mozbuild.base import MachCommandConditions as conditions
+ from mozbuild.base import MozbuildObject
+
+ build_obj = MozbuildObject.from_environment(cwd=here)
+except Exception:
+ build_obj = None
+ conditions = None
+
+from mozperftest.metrics import get_layers as metrics_layers # noqa
+from mozperftest.system import get_layers as system_layers # noqa
+from mozperftest.test import get_layers as test_layers # noqa
+from mozperftest.utils import convert_day # noqa
+
+FLAVORS = "desktop-browser", "mobile-browser", "doc", "xpcshell", "webpagetest"
+
+
+class Options:
+
+ general_args = {
+ "--flavor": {
+ "choices": FLAVORS,
+ "metavar": "{{{}}}".format(", ".join(FLAVORS)),
+ "default": "desktop-browser",
+ "help": "Only run tests of this flavor.",
+ },
+ "tests": {
+ "nargs": "*",
+ "metavar": "TEST",
+ "default": [],
+ "help": "Test to run. Can be a single test file or URL or a directory"
+ " of tests (to run recursively). If omitted, the entire suite is run.",
+ },
+ "--test-iterations": {
+ "type": int,
+ "default": 1,
+ "help": "Number of times the whole test is executed",
+ },
+ "--output": {
+ "type": str,
+ "default": "artifacts",
+ "help": "Path to where data will be stored, defaults to a top-level "
+ "`artifacts` folder.",
+ },
+ "--hooks": {
+ "type": str,
+ "default": None,
+ "help": "Script containing hooks. Can be a path or a URL.",
+ },
+ "--verbose": {"action": "store_true", "default": False, "help": "Verbose mode"},
+ "--push-to-try": {
+ "action": "store_true",
+ "default": False,
+ "help": "Pushin the test to try",
+ },
+ "--try-platform": {
+ "nargs": "*",
+ "type": str,
+ "default": "linux",
+ "help": "Platform to use on try",
+ "choices": ["g5", "pixel2", "linux", "mac", "win"],
+ },
+ "--on-try": {
+ "action": "store_true",
+ "default": False,
+ "help": "Running the test on try",
+ },
+ "--test-date": {
+ "type": convert_day,
+ "default": "today",
+ "help": "Used in multi-commit testing, it specifies the day to get test builds from. "
+ "Must follow the format `YYYY.MM.DD` or be `today` or `yesterday`.",
+ },
+ }
+
+ args = copy.deepcopy(general_args)
+
+
+for layer in system_layers() + test_layers() + metrics_layers():
+ if layer.activated:
+ # add an option to deactivate it
+ option_name = "--no-%s" % layer.name
+ option_help = "Deactivates the %s layer" % layer.name
+ else:
+ option_name = "--%s" % layer.name
+ option_help = "Activates the %s layer" % layer.name
+
+ Options.args[option_name] = {
+ "action": "store_true",
+ "default": False,
+ "help": option_help,
+ }
+
+ for option, value in layer.arguments.items():
+ option = "--%s-%s" % (layer.name, option.replace("_", "-"))
+ if option in Options.args:
+ raise KeyError("%s option already defined!" % option)
+ Options.args[option] = value
+
+
+class PerftestArgumentParser(ArgumentParser):
+ """%(prog)s [options] [test paths]"""
+
+ def __init__(self, app=None, **kwargs):
+ ArgumentParser.__init__(
+ self, usage=self.__doc__, conflict_handler="resolve", **kwargs
+ )
+ # XXX see if this list will vary depending on the flavor & app
+ self.oldcwd = os.getcwd()
+ self.app = app
+ if not self.app and build_obj:
+ if conditions.is_android(build_obj):
+ self.app = "android"
+ if not self.app:
+ self.app = "generic"
+ for name, options in Options.args.items():
+ self.add_argument(name, **options)
+
+ mozlog.commandline.add_logging_group(self)
+ self.set_by_user = []
+
+ def parse_helper(self, args):
+ for arg in args:
+ arg_part = arg.partition("--")[-1].partition("-")
+ layer_name = f"--{arg_part[0]}"
+ layer_exists = arg_part[1] and layer_name in Options.args
+ if layer_exists:
+ args.append(layer_name)
+
+ def get_user_args(self, args):
+ # suppress args that were not provided by the user.
+ res = {}
+ for key, value in args.items():
+ if key not in self.set_by_user:
+ continue
+ res[key] = value
+ return res
+
+ def _parse_known_args(self, arg_strings, namespace):
+ # at this point, the namespace is filled with default values
+ # defined in the args
+
+ # let's parse what the user really gave us in the CLI
+ # in a new namespace
+ user_namespace, extras = super()._parse_known_args(arg_strings, Namespace())
+
+ self.set_by_user = list([name for name, value in user_namespace._get_kwargs()])
+
+ # we can now merge both
+ for key, value in user_namespace._get_kwargs():
+ setattr(namespace, key, value)
+
+ return namespace, extras
+
+ def parse_args(self, args=None, namespace=None):
+ self.parse_helper(args)
+ return super().parse_args(args, namespace)
+
+ def parse_known_args(self, args=None, namespace=None):
+ self.parse_helper(args)
+ return super().parse_known_args(args, namespace)
+
+
+class SideBySideOptions:
+ args = [
+ [
+ ["-t", "--test-name"],
+ {
+ "type": str,
+ "required": True,
+ "dest": "test_name",
+ "help": "The name of the test task to get videos from.",
+ },
+ ],
+ [
+ ["--new-test-name"],
+ {
+ "type": str,
+ "default": None,
+ "help": "The name of the test task to get videos from in the new revision.",
+ },
+ ],
+ [
+ ["--base-revision"],
+ {
+ "type": str,
+ "required": True,
+ "help": "The base revision to compare a new revision to.",
+ },
+ ],
+ [
+ ["--new-revision"],
+ {
+ "type": str,
+ "required": True,
+ "help": "The base revision to compare a new revision to.",
+ },
+ ],
+ [
+ ["--base-branch"],
+ {
+ "type": str,
+ "default": "autoland",
+ "help": "Branch to search for the base revision.",
+ },
+ ],
+ [
+ ["--new-branch"],
+ {
+ "type": str,
+ "default": "autoland",
+ "help": "Branch to search for the new revision.",
+ },
+ ],
+ [
+ ["--base-platform"],
+ {
+ "type": str,
+ "required": True,
+ "dest": "platform",
+ "help": "Platform to return results for.",
+ },
+ ],
+ [
+ ["--new-platform"],
+ {
+ "type": str,
+ "default": None,
+ "help": "Platform to return results for in the new revision.",
+ },
+ ],
+ [
+ ["-o", "--overwrite"],
+ {
+ "action": "store_true",
+ "default": False,
+ "help": "If set, the downloaded task group data will be deleted before "
+ + "it gets re-downloaded.",
+ },
+ ],
+ [
+ ["--cold"],
+ {
+ "action": "store_true",
+ "default": False,
+ "help": "If set, we'll only look at cold pageload tests.",
+ },
+ ],
+ [
+ ["--warm"],
+ {
+ "action": "store_true",
+ "default": False,
+ "help": "If set, we'll only look at warm pageload tests.",
+ },
+ ],
+ [
+ ["--most-similar"],
+ {
+ "action": "store_true",
+ "default": False,
+ "help": "If set, we'll search for a video pairing that is the most similar.",
+ },
+ ],
+ [
+ ["--search-crons"],
+ {
+ "action": "store_true",
+ "default": False,
+ "help": "If set, we will search for the tasks within the cron jobs as well. ",
+ },
+ ],
+ [
+ ["--skip-download"],
+ {
+ "action": "store_true",
+ "default": False,
+ "help": "If set, we won't try to download artifacts again and we'll "
+ + "try using what already exists in the output folder.",
+ },
+ ],
+ [
+ ["--output"],
+ {
+ "type": str,
+ "default": None,
+ "help": "This is where the data will be saved. Defaults to CWD. "
+ + "You can include a name for the file here, otherwise it will "
+ + "default to side-by-side.mp4.",
+ },
+ ],
+ [
+ ["--metric"],
+ {
+ "type": str,
+ "default": "speedindex",
+ "help": "Metric to use for side-by-side comparison.",
+ },
+ ],
+ [
+ ["--vismetPath"],
+ {
+ "type": str,
+ "default": False,
+ "help": "Paths to visualmetrics.py for step chart generation.",
+ },
+ ],
+ [
+ ["--original"],
+ {
+ "action": "store_true",
+ "default": False,
+ "help": "If set, use the original videos in the side-by-side instead "
+ + "of the postprocessed videos.",
+ },
+ ],
+ [
+ ["--skip-slow-gif"],
+ {
+ "action": "store_true",
+ "default": False,
+ "help": "If set, the slow-motion GIFs won't be produced.",
+ },
+ ],
+ ]
+
+
+class ChangeDetectorOptions:
+ args = [
+ # TODO: Move the common tool arguments to a common
+ # argument class.
+ [
+ ["--task-name"],
+ {
+ "type": str,
+ "nargs": "*",
+ "default": [],
+ "dest": "task_names",
+ "help": "The full name of the test task to get data from e.g. "
+ "test-android-hw-a51-11-0-aarch64-shippable-qr/opt-"
+ "browsertime-tp6m-geckoview-sina-nofis.",
+ },
+ ],
+ [
+ ["-t", "--test-name"],
+ {
+ "type": str,
+ "default": None,
+ "dest": "test_name",
+ "help": "The name of the test task to get data from e.g. "
+ "browsertime-tp6m-geckoview-sina-nofis.",
+ },
+ ],
+ [
+ ["--platform"],
+ {
+ "type": str,
+ "default": None,
+ "help": "Platform to analyze e.g. "
+ "test-android-hw-a51-11-0-aarch64-shippable-qr/opt.",
+ },
+ ],
+ [
+ ["--new-test-name"],
+ {
+ "type": str,
+ "help": "The name of the test task to get data from in the "
+ "base revision e.g. browsertime-tp6m-geckoview-sina-nofis.",
+ },
+ ],
+ [
+ ["--new-platform"],
+ {
+ "type": str,
+ "help": "Platform to analyze in base revision e.g. "
+ "test-android-hw-a51-11-0-aarch64-shippable-qr/opt.",
+ },
+ ],
+ [
+ ["--depth"],
+ {
+ "type": int,
+ "default": None,
+ "help": "This sets how the change detector should run. "
+ "Default is None, which is a direct comparison between the "
+ "revisions. -1 will autocompute the number of revisions to "
+ "look at between the base, and new. Any other positive integer "
+ "acts as a maximum number to look at.",
+ },
+ ],
+ [
+ ["--base-revision"],
+ {
+ "type": str,
+ "required": True,
+ "help": "The base revision to compare a new revision to.",
+ },
+ ],
+ [
+ ["--new-revision"],
+ {
+ "type": str,
+ "required": True,
+ "help": "The new revision to compare a base revision to.",
+ },
+ ],
+ [
+ ["--base-branch"],
+ {
+ "type": str,
+ "default": "try",
+ "help": "Branch to search for the base revision.",
+ },
+ ],
+ [
+ ["--new-branch"],
+ {
+ "type": str,
+ "default": "try",
+ "help": "Branch to search for the new revision.",
+ },
+ ],
+ [
+ ["--skip-download"],
+ {
+ "action": "store_true",
+ "default": False,
+ "help": "If set, we won't try to download artifacts again and we'll "
+ + "try using what already exists in the output folder.",
+ },
+ ],
+ [
+ ["-o", "--overwrite"],
+ {
+ "action": "store_true",
+ "default": False,
+ "help": "If set, the downloaded task group data will be deleted before "
+ + "it gets re-downloaded.",
+ },
+ ],
+ ]
+
+
+class ToolingOptions:
+ args = {
+ "side-by-side": SideBySideOptions.args,
+ "change-detector": ChangeDetectorOptions.args,
+ }
+
+
+class PerftestToolsArgumentParser(ArgumentParser):
+ """%(prog)s [options] [test paths]"""
+
+ tool = None
+
+ def __init__(self, *args, **kwargs):
+ ArgumentParser.__init__(
+ self, usage=self.__doc__, conflict_handler="resolve", **kwargs
+ )
+
+ if PerftestToolsArgumentParser.tool is None:
+ raise SystemExit("No tool specified, cannot continue parsing")
+ else:
+ for name, options in ToolingOptions.args[PerftestToolsArgumentParser.tool]:
+ self.add_argument(*name, **options)
diff --git a/python/mozperftest/mozperftest/environment.py b/python/mozperftest/mozperftest/environment.py
new file mode 100644
index 0000000000..7be0f1f639
--- /dev/null
+++ b/python/mozperftest/mozperftest/environment.py
@@ -0,0 +1,106 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import contextlib
+import copy
+
+from mozperftest.argparser import FLAVORS
+from mozperftest.hooks import Hooks
+from mozperftest.layers import Layers, StopRunError
+from mozperftest.metrics import pick_metrics
+from mozperftest.system import pick_system
+from mozperftest.test import pick_test
+from mozperftest.utils import MachLogger
+
+SYSTEM, TEST, METRICS = 0, 1, 2
+
+
+class MachEnvironment(MachLogger):
+ def __init__(self, mach_cmd, flavor="desktop-browser", hooks=None, **kwargs):
+ MachLogger.__init__(self, mach_cmd)
+ self._mach_cmd = mach_cmd
+ self._mach_args = dict(
+ [(self._normalize(key), value) for key, value in kwargs.items()]
+ )
+ self.layers = []
+ if flavor not in FLAVORS:
+ raise NotImplementedError(flavor)
+ for layer in (pick_system, pick_test, pick_metrics):
+ self.add_layer(layer(self, flavor, mach_cmd))
+ if hooks is None:
+ # we just load an empty Hooks instance
+ hooks = Hooks(mach_cmd)
+ self.hooks = hooks
+
+ @contextlib.contextmanager
+ def frozen(self):
+ self.freeze()
+ try:
+ # used to trigger __enter__/__exit__
+ with self:
+ yield self
+ finally:
+ self.unfreeze()
+
+ def _normalize(self, name):
+ if name.startswith("--"):
+ name = name[2:]
+ return name.replace("-", "_")
+
+ def set_arg(self, name, value):
+ """Sets the argument"""
+ # see if we want to restrict to existing keys
+ self._mach_args[self._normalize(name)] = value
+
+ def get_arg(self, name, default=None, layer=None):
+ name = self._normalize(name)
+ marker = object()
+ res = self._mach_args.get(name, marker)
+ if res is marker:
+ # trying with the name prefixed with the layer name
+ if layer is not None and not name.startswith(layer.name):
+ name = "%s_%s" % (layer.name, name)
+ return self._mach_args.get(name, default)
+ return default
+ return res
+
+ def get_layer(self, name):
+ for layer in self.layers:
+ if isinstance(layer, Layers):
+ found = layer.get_layer(name)
+ if found is not None:
+ return found
+ elif layer.name == name:
+ return layer
+ return None
+
+ def add_layer(self, layer):
+ self.layers.append(layer)
+
+ def freeze(self):
+ # freeze args (XXX do we need to freeze more?)
+ self._saved_mach_args = copy.deepcopy(self._mach_args)
+
+ def unfreeze(self):
+ self._mach_args = self._saved_mach_args
+ self._saved_mach_args = None
+
+ def run(self, metadata):
+ # run the system and test layers
+ try:
+ with self.layers[SYSTEM] as syslayer, self.layers[TEST] as testlayer:
+ metadata = testlayer(syslayer(metadata))
+
+ # then run the metrics layers
+ with self.layers[METRICS] as metrics:
+ metadata = metrics(metadata)
+ except StopRunError:
+ # ends the cycle but without bubbling up the error
+ pass
+ return metadata
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, type, value, traceback):
+ return
diff --git a/python/mozperftest/mozperftest/fzf/__init__.py b/python/mozperftest/mozperftest/fzf/__init__.py
new file mode 100644
index 0000000000..6fbe8159b2
--- /dev/null
+++ b/python/mozperftest/mozperftest/fzf/__init__.py
@@ -0,0 +1,3 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
diff --git a/python/mozperftest/mozperftest/fzf/fzf.py b/python/mozperftest/mozperftest/fzf/fzf.py
new file mode 100644
index 0000000000..af9594db3f
--- /dev/null
+++ b/python/mozperftest/mozperftest/fzf/fzf.py
@@ -0,0 +1,116 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import json
+import os
+import subprocess
+import sys
+from distutils.spawn import find_executable
+from pathlib import Path
+
+from mach.util import get_state_dir
+from mozterm import Terminal
+
+HERE = Path(__file__).parent.resolve()
+SRC_ROOT = (HERE / ".." / ".." / ".." / "..").resolve()
+PREVIEW_SCRIPT = HERE / "preview.py"
+FZF_HEADER = """
+Please select a performance test to execute.
+{shortcuts}
+""".strip()
+
+fzf_shortcuts = {
+ "ctrl-t": "toggle-all",
+ "alt-bspace": "beginning-of-line+kill-line",
+ "?": "toggle-preview",
+}
+
+fzf_header_shortcuts = [
+ ("select", "tab"),
+ ("accept", "enter"),
+ ("cancel", "ctrl-c"),
+ ("cursor-up", "up"),
+ ("cursor-down", "down"),
+]
+
+
+def run_fzf(cmd, tasks):
+ env = dict(os.environ)
+ env.update(
+ {"PYTHONPATH": os.pathsep.join([p for p in sys.path if "requests" in p])}
+ )
+ proc = subprocess.Popen(
+ cmd,
+ stdout=subprocess.PIPE,
+ stdin=subprocess.PIPE,
+ env=env,
+ universal_newlines=True,
+ )
+ out = proc.communicate("\n".join(tasks))[0].splitlines()
+ selected = []
+ query = None
+ if out:
+ query = out[0]
+ selected = out[1:]
+ return query, selected
+
+
+def format_header():
+ terminal = Terminal()
+ shortcuts = []
+ for action, key in fzf_header_shortcuts:
+ shortcuts.append(
+ "{t.white}{action}{t.normal}: {t.yellow}<{key}>{t.normal}".format(
+ t=terminal, action=action, key=key
+ )
+ )
+ return FZF_HEADER.format(shortcuts=", ".join(shortcuts), t=terminal)
+
+
+def select(test_objects):
+ mozbuild_dir = Path(Path.home(), ".mozbuild")
+ os.makedirs(str(mozbuild_dir), exist_ok=True)
+ cache_file = Path(mozbuild_dir, ".perftestfuzzy")
+
+ with cache_file.open("w") as f:
+ f.write(json.dumps(test_objects))
+
+ def _display(task):
+ from mozperftest.script import ScriptInfo
+
+ path = Path(task["path"])
+ script_info = ScriptInfo(str(path))
+ flavor = script_info.script_type.name
+ if flavor == "browsertime":
+ flavor = "bt"
+ tags = script_info.get("tags", [])
+
+ location = str(path.parent).replace(str(SRC_ROOT), "").strip("/")
+ if len(tags) > 0:
+ return f"[{flavor}][{','.join(tags)}] {path.name} in {location}"
+ return f"[{flavor}] {path.name} in {location}"
+
+ candidate_tasks = [_display(t) for t in test_objects]
+
+ fzf_bin = find_executable(
+ "fzf", str(Path(get_state_dir(), "fzf", "bin"))
+ ) or find_executable("fzf")
+ if not fzf_bin:
+ raise AssertionError("Unable to find fzf")
+
+ key_shortcuts = [k + ":" + v for k, v in fzf_shortcuts.items()]
+
+ base_cmd = [
+ fzf_bin,
+ "-m",
+ "--bind",
+ ",".join(key_shortcuts),
+ "--header",
+ format_header(),
+ "--preview-window=right:50%",
+ "--print-query",
+ "--preview",
+ sys.executable + ' {} -t "{{+f}}"'.format(str(PREVIEW_SCRIPT)),
+ ]
+ query_str, tasks = run_fzf(base_cmd, sorted(candidate_tasks))
+ return tasks
diff --git a/python/mozperftest/mozperftest/fzf/preview.py b/python/mozperftest/mozperftest/fzf/preview.py
new file mode 100644
index 0000000000..d6441becbd
--- /dev/null
+++ b/python/mozperftest/mozperftest/fzf/preview.py
@@ -0,0 +1,90 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+This file is executed by fzf through the command line and needs to
+work in a standalone way on any Python 3 environment.
+
+This is why it alters PATH,making the assumption it's executed
+from within a source tree. Do not add dependencies unless they
+are in the source tree and added in SEARCH_PATHS.
+"""
+import argparse
+import importlib.util
+import json
+import sys
+from pathlib import Path
+
+HERE = Path(__file__).parent.resolve()
+SRC_ROOT = (HERE / ".." / ".." / ".." / "..").resolve()
+# make sure esprima is in the path
+SEARCH_PATHS = [
+ ("third_party", "python", "esprima"),
+]
+
+for path in SEARCH_PATHS:
+ path = Path(SRC_ROOT, *path)
+ if path.exists():
+ sys.path.insert(0, str(path))
+
+
+def get_test_objects():
+ """Loads .perftestfuzzy and returns its content.
+
+ The cache file is produced by the main fzf script and is used
+ as a way to let the preview script grab test_objects from the
+ mach command
+ """
+ cache_file = Path(Path.home(), ".mozbuild", ".perftestfuzzy")
+ with cache_file.open() as f:
+ return json.loads(f.read())
+
+
+def plain_display(taskfile):
+ """Preview window display.
+
+ Returns the reST summary for the perf test script.
+ """
+ # Lame way to catch the ScriptInfo class without loading mozperftest
+ script_info = HERE / ".." / "script.py"
+ spec = importlib.util.spec_from_file_location(
+ name="script.py", location=str(script_info)
+ )
+ module = importlib.util.module_from_spec(spec)
+ spec.loader.exec_module(module)
+ ScriptInfo = module.ScriptInfo
+
+ with open(taskfile) as f:
+ tasklist = [line.strip() for line in f]
+
+ tags, script_name, __, location = tasklist[0].split(" ")
+ script_path = Path(SRC_ROOT, location, script_name).resolve()
+
+ for ob in get_test_objects():
+ if ob["path"] == str(script_path):
+ print(ScriptInfo(ob["path"]))
+ return
+
+
+def process_args(args):
+ """Process preview arguments."""
+ argparser = argparse.ArgumentParser()
+ argparser.add_argument(
+ "-t",
+ "--tasklist",
+ type=str,
+ default=None,
+ help="Path to temporary file containing the selected tasks",
+ )
+ return argparser.parse_args(args=args)
+
+
+def main(args=None):
+ if args is None:
+ args = sys.argv[1:]
+ args = process_args(args)
+ plain_display(args.tasklist)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/python/mozperftest/mozperftest/hooks.py b/python/mozperftest/mozperftest/hooks.py
new file mode 100644
index 0000000000..b3491bc915
--- /dev/null
+++ b/python/mozperftest/mozperftest/hooks.py
@@ -0,0 +1,63 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import importlib
+import shutil
+import tempfile
+from pathlib import Path
+
+from mozperftest.utils import MachLogger, download_file
+
+_LOADED_MODULES = {}
+
+
+class Hooks(MachLogger):
+ def __init__(self, mach_cmd, hook_module=None):
+ MachLogger.__init__(self, mach_cmd)
+ self.tmp_dir = tempfile.mkdtemp()
+
+ if hook_module is None:
+ self._hooks = None
+ return
+
+ if not isinstance(hook_module, Path):
+ if hook_module.startswith("http"):
+ target = Path(self.tmp_dir, hook_module.split("/")[-1])
+ hook_module = download_file(hook_module, target)
+ else:
+ hook_module = Path(hook_module)
+
+ if hook_module.exists():
+ path = str(hook_module)
+ if path not in _LOADED_MODULES:
+ spec = importlib.util.spec_from_file_location("hooks", path)
+ hook_module = importlib.util.module_from_spec(spec)
+ spec.loader.exec_module(hook_module)
+ _LOADED_MODULES[path] = hook_module
+ self._hooks = _LOADED_MODULES[path]
+ else:
+ raise IOError("Could not find hook module. %s" % str(hook_module))
+
+ def cleanup(self):
+ if self.tmp_dir is None:
+ return
+ shutil.rmtree(self.tmp_dir)
+ self.tmp_dir = None
+
+ def exists(self, name):
+ if self._hooks is None:
+ return False
+ return hasattr(self._hooks, name)
+
+ def get(self, name):
+ if self._hooks is None:
+ return False
+ return getattr(self._hooks, name)
+
+ def run(self, name, *args, **kw):
+ if self._hooks is None:
+ return
+ if not hasattr(self._hooks, name):
+ return
+ self.debug("Running hook %s" % name)
+ return getattr(self._hooks, name)(*args, **kw)
diff --git a/python/mozperftest/mozperftest/layers.py b/python/mozperftest/mozperftest/layers.py
new file mode 100644
index 0000000000..8cec547bf5
--- /dev/null
+++ b/python/mozperftest/mozperftest/layers.py
@@ -0,0 +1,177 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import traceback
+
+from mozperftest.utils import MachLogger
+
+
+class StopRunError(Exception):
+ pass
+
+
+class Layer(MachLogger):
+ # layer name
+ name = "unset"
+
+ # activated by default ?
+ activated = False
+
+ # list of arguments grabbed by PerftestArgumentParser
+ arguments = {}
+
+ # If true, calls on_exception() on errors
+ user_exception = False
+
+ def __init__(self, env, mach_command):
+ MachLogger.__init__(self, mach_command)
+ self.return_code = 0
+ self.mach_cmd = mach_command
+ self.run_process = mach_command.run_process
+ self.env = env
+
+ def _normalize_arg(self, name):
+ if name.startswith("--"):
+ name = name[2:]
+ if not name.startswith(self.name):
+ name = "%s-%s" % (self.name, name)
+ return name.replace("-", "_")
+
+ def get_arg_names(self):
+ return [self._normalize_arg(arg) for arg in self.arguments]
+
+ def set_arg(self, name, value):
+ """Sets the argument"""
+ name = self._normalize_arg(name)
+ if name not in self.get_arg_names():
+ raise KeyError(
+ "%r tried to set %r, but does not own it" % (self.name, name)
+ )
+ return self.env.set_arg(name, value)
+
+ def get_arg(self, name, default=None):
+ return self.env.get_arg(name, default, self)
+
+ def __enter__(self):
+ self.debug("Running %s:setup" % self.name)
+ self.setup()
+ return self
+
+ def __exit__(self, type, value, traceback):
+ # XXX deal with errors here
+ self.debug("Running %s:teardown" % self.name)
+ self.teardown()
+
+ def __call__(self, metadata):
+ has_exc_handler = self.env.hooks.exists("on_exception")
+ self.debug("Running %s:run" % self.name)
+ try:
+ metadata = self.run(metadata)
+ except Exception as e:
+ if self.user_exception and has_exc_handler:
+ self.error("User handled error")
+ for line in traceback.format_exc().splitlines():
+ self.error(line)
+ resume_run = self.env.hooks.run("on_exception", self.env, self, e)
+ if resume_run:
+ return metadata
+ raise StopRunError()
+ else:
+ raise
+ return metadata
+
+ def setup(self):
+ pass
+
+ def teardown(self):
+ pass
+
+ def run(self, metadata):
+ return metadata
+
+
+class Layers(Layer):
+ def __init__(self, env, mach_command, factories):
+ super(Layers, self).__init__(env, mach_command)
+
+ def _active(layer):
+ # if it's activated by default, see if we need to deactivate
+ # it by looking for the --no-layername option
+ if layer.activated:
+ return not env.get_arg("no-" + layer.name, False)
+ # if it's deactivated by default, we look for --layername
+ return env.get_arg(layer.name, False)
+
+ self.layers = [
+ factory(env, mach_command) for factory in factories if _active(factory)
+ ]
+ self.env = env
+ self._counter = -1
+
+ def _normalize_arg(self, name):
+ if name.startswith("--"):
+ name = name[2:]
+ return name.replace("-", "_")
+
+ def get_layer(self, name):
+ for layer in self.layers:
+ if layer.name == name:
+ return layer
+ return None
+
+ @property
+ def name(self):
+ return " + ".join([l.name for l in self.layers])
+
+ def __iter__(self):
+ self._counter = -1
+ return self
+
+ def __next__(self):
+ self._counter += 1
+ try:
+ return self.layers[self._counter]
+ except IndexError:
+ raise StopIteration
+
+ def __enter__(self):
+ self.setup()
+ return self
+
+ def __exit__(self, type, value, traceback):
+ # XXX deal with errors here
+ self.teardown()
+
+ def setup(self):
+ for layer in self.layers:
+ self.debug("Running %s:setup" % layer.name)
+ layer.setup()
+
+ def teardown(self):
+ for layer in self.layers:
+ self.debug("Running %s:teardown" % layer.name)
+ layer.teardown()
+
+ def __call__(self, metadata):
+ for layer in self.layers:
+ metadata = layer(metadata)
+ return metadata
+
+ def set_arg(self, name, value):
+ """Sets the argument"""
+ name = self._normalize_arg(name)
+ found = False
+ for layer in self.layers:
+ if name in layer.get_arg_names():
+ found = True
+ break
+
+ if not found:
+ raise KeyError(
+ "%r tried to set %r, but does not own it" % (self.name, name)
+ )
+
+ return self.env.set_arg(name, value)
+
+ def get_arg(self, name, default=None):
+ return self.env.get_arg(name, default)
diff --git a/python/mozperftest/mozperftest/mach_commands.py b/python/mozperftest/mozperftest/mach_commands.py
new file mode 100644
index 0000000000..97cef13f43
--- /dev/null
+++ b/python/mozperftest/mozperftest/mach_commands.py
@@ -0,0 +1,305 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import json
+import os
+import sys
+from functools import partial
+
+from mach.decorators import Command, CommandArgument, SubCommand
+from mozbuild.base import MachCommandConditions as conditions
+
+_TRY_PLATFORMS = {
+ "g5-browsertime": "perftest-android-hw-g5-browsertime",
+ "linux-xpcshell": "perftest-linux-try-xpcshell",
+ "mac-xpcshell": "perftest-macosx-try-xpcshell",
+ "linux-browsertime": "perftest-linux-try-browsertime",
+ "mac-browsertime": "perftest-macosx-try-browsertime",
+ "win-browsertimee": "perftest-windows-try-browsertime",
+}
+
+
+HERE = os.path.dirname(__file__)
+
+
+def get_perftest_parser():
+ from mozperftest import PerftestArgumentParser
+
+ return PerftestArgumentParser
+
+
+def get_perftest_tools_parser(tool):
+ def tools_parser_func():
+ from mozperftest import PerftestToolsArgumentParser
+
+ PerftestToolsArgumentParser.tool = tool
+ return PerftestToolsArgumentParser
+
+ return tools_parser_func
+
+
+def get_parser():
+ return run_perftest._mach_command._parser
+
+
+@Command(
+ "perftest",
+ category="testing",
+ conditions=[partial(conditions.is_buildapp_in, apps=["firefox", "android"])],
+ description="Run any flavor of perftest",
+ parser=get_perftest_parser,
+)
+def run_perftest(command_context, **kwargs):
+ # original parser that brought us there
+ original_parser = get_parser()
+
+ from pathlib import Path
+
+ from mozperftest.script import ParseError, ScriptInfo, ScriptType
+
+ # user selection with fuzzy UI
+ from mozperftest.utils import ON_TRY
+
+ if not ON_TRY and kwargs.get("tests", []) == []:
+ from moztest.resolve import TestResolver
+
+ from mozperftest.fzf.fzf import select
+
+ resolver = command_context._spawn(TestResolver)
+ test_objects = list(resolver.resolve_tests(paths=None, flavor="perftest"))
+ selected = select(test_objects)
+
+ def full_path(selection):
+ __, script_name, __, location = selection.split(" ")
+ return str(
+ Path(
+ command_context.topsrcdir.rstrip(os.sep),
+ location.strip(os.sep),
+ script_name,
+ )
+ )
+
+ kwargs["tests"] = [full_path(s) for s in selected]
+
+ if kwargs["tests"] == []:
+ print("\nNo selection. Bye!")
+ return
+
+ if len(kwargs["tests"]) > 1:
+ print("\nSorry no support yet for multiple local perftest")
+ return
+
+ sel = "\n".join(kwargs["tests"])
+ print("\nGood job! Best selection.\n%s" % sel)
+ # if the script is xpcshell, we can force the flavor here
+ # XXX on multi-selection, what happens if we have seeveral flavors?
+ try:
+ script_info = ScriptInfo(kwargs["tests"][0])
+ except ParseError as e:
+ if e.exception is IsADirectoryError:
+ script_info = None
+ else:
+ raise
+ else:
+ if script_info.script_type == ScriptType.xpcshell:
+ kwargs["flavor"] = script_info.script_type.name
+ else:
+ # we set the value only if not provided (so "mobile-browser"
+ # can be picked)
+ if "flavor" not in kwargs:
+ kwargs["flavor"] = "desktop-browser"
+
+ push_to_try = kwargs.pop("push_to_try", False)
+ if push_to_try:
+ sys.path.append(str(Path(command_context.topsrcdir, "tools", "tryselect")))
+
+ from tryselect.push import push_to_try
+
+ perftest_parameters = {}
+ args = script_info.update_args(**original_parser.get_user_args(kwargs))
+ platform = args.pop("try_platform", "linux")
+ if isinstance(platform, str):
+ platform = [platform]
+
+ platform = ["%s-%s" % (plat, script_info.script_type.name) for plat in platform]
+
+ for plat in platform:
+ if plat not in _TRY_PLATFORMS:
+ # we can extend platform support here: linux, win, macOs, pixel2
+ # by adding more jobs in taskcluster/ci/perftest/kind.yml
+ # then picking up the right one here
+ raise NotImplementedError(
+ "%r doesn't exist or is not yet supported" % plat
+ )
+
+ def relative(path):
+ if path.startswith(command_context.topsrcdir):
+ return path[len(command_context.topsrcdir) :].lstrip(os.sep)
+ return path
+
+ for name, value in args.items():
+ # ignore values that are set to default
+ if original_parser.get_default(name) == value:
+ continue
+ if name == "tests":
+ value = [relative(path) for path in value]
+ perftest_parameters[name] = value
+
+ parameters = {
+ "try_task_config": {
+ "tasks": [_TRY_PLATFORMS[plat] for plat in platform],
+ "perftest-options": perftest_parameters,
+ },
+ "try_mode": "try_task_config",
+ }
+
+ task_config = {"parameters": parameters, "version": 2}
+ if args.get("verbose"):
+ print("Pushing run to try...")
+ print(json.dumps(task_config, indent=4, sort_keys=True))
+
+ push_to_try("perftest", "perftest", try_task_config=task_config)
+ return
+
+ from mozperftest.runner import run_tests
+
+ run_tests(command_context, kwargs, original_parser.get_user_args(kwargs))
+
+ print("\nFirefox. Fast For Good.\n")
+
+
+@Command(
+ "perftest-test",
+ category="testing",
+ description="Run perftest tests",
+ virtualenv_name="perftest-test",
+)
+@CommandArgument(
+ "tests", default=None, nargs="*", help="Tests to run. By default will run all"
+)
+@CommandArgument(
+ "-s",
+ "--skip-linters",
+ action="store_true",
+ default=False,
+ help="Skip flake8 and black",
+)
+@CommandArgument(
+ "-v", "--verbose", action="store_true", default=False, help="Verbose mode"
+)
+def run_tests(command_context, **kwargs):
+ from pathlib import Path
+
+ from mozperftest.utils import temporary_env
+
+ with temporary_env(
+ COVERAGE_RCFILE=str(Path(HERE, ".coveragerc")), RUNNING_TESTS="YES"
+ ):
+ _run_tests(command_context, **kwargs)
+
+
+def _run_tests(command_context, **kwargs):
+ from pathlib import Path
+
+ from mozperftest.utils import ON_TRY, checkout_python_script, checkout_script
+
+ venv = command_context.virtualenv_manager
+ skip_linters = kwargs.get("skip_linters", False)
+ verbose = kwargs.get("verbose", False)
+
+ if not ON_TRY and not skip_linters:
+ cmd = "./mach lint "
+ if verbose:
+ cmd += " -v"
+ cmd += " " + str(HERE)
+ if not checkout_script(cmd, label="linters", display=verbose, verbose=verbose):
+ raise AssertionError("Please fix your code.")
+
+ # running pytest with coverage
+ # coverage is done in three steps:
+ # 1/ coverage erase => erase any previous coverage data
+ # 2/ coverage run pytest ... => run the tests and collect info
+ # 3/ coverage report => generate the report
+ tests_dir = Path(HERE, "tests").resolve()
+ tests = kwargs.get("tests", [])
+ if tests == []:
+ tests = str(tests_dir)
+ run_coverage_check = not skip_linters
+ else:
+ run_coverage_check = False
+
+ def _get_test(test):
+ if Path(test).exists():
+ return str(test)
+ return str(tests_dir / test)
+
+ tests = " ".join([_get_test(test) for test in tests])
+
+ # on macOS + try we skip the coverage
+ # because macOS workers prevent us from installing
+ # packages from PyPI
+ if sys.platform == "darwin" and ON_TRY:
+ run_coverage_check = False
+
+ options = "-xs"
+ if kwargs.get("verbose"):
+ options += "v"
+
+ if run_coverage_check:
+ assert checkout_python_script(
+ venv, "coverage", ["erase"], label="remove old coverage data"
+ )
+ args = ["run", "-m", "pytest", options, "--durations", "10", tests]
+ assert checkout_python_script(
+ venv, "coverage", args, label="running tests", verbose=verbose
+ )
+ if run_coverage_check and not checkout_python_script(
+ venv, "coverage", ["report"], display=True
+ ):
+ raise ValueError("Coverage is too low!")
+
+
+@Command(
+ "perftest-tools",
+ category="testing",
+ description="Run perftest tools",
+)
+def run_tools(command_context, **kwargs):
+ """
+ Runs various perftest tools such as the side-by-side generator.
+ """
+ print("Runs various perftest tools such as the side-by-side generator.")
+
+
+@SubCommand(
+ "perftest-tools",
+ "side-by-side",
+ description="This tool can be used to generate a side-by-side visualization of two videos. "
+ "When using this tool, make sure that the `--test-name` is an exact match, i.e. if you are "
+ "comparing the task `test-linux64-shippable-qr/opt-browsertime-tp6-firefox-linkedin-e10s` "
+ "between two revisions, then use `browsertime-tp6-firefox-linkedin-e10s` as the suite name "
+ "and `test-linux64-shippable-qr/opt` as the platform.",
+ virtualenv_name="perftest-side-by-side",
+ parser=get_perftest_tools_parser("side-by-side"),
+)
+def run_side_by_side(command_context, **kwargs):
+ from mozperftest.runner import run_tools
+
+ kwargs["tool"] = "side-by-side"
+ run_tools(command_context, kwargs)
+
+
+@SubCommand(
+ "perftest-tools",
+ "change-detector",
+ description="This tool can be used to determine if there are differences between two "
+ "revisions. It can do either direct comparisons, or searching for regressions in between "
+ "two revisions (with a maximum or autocomputed depth).",
+ virtualenv_name="perftest-side-by-side",
+ parser=get_perftest_tools_parser("change-detector"),
+)
+def run_change_detector(command_context, **kwargs):
+ from mozperftest.runner import run_tools
+
+ kwargs["tool"] = "change-detector"
+ run_tools(command_context, kwargs)
diff --git a/python/mozperftest/mozperftest/metadata.py b/python/mozperftest/mozperftest/metadata.py
new file mode 100644
index 0000000000..95864e14e2
--- /dev/null
+++ b/python/mozperftest/mozperftest/metadata.py
@@ -0,0 +1,44 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+from collections import defaultdict
+
+from mozperftest.utils import MachLogger
+
+
+class Metadata(MachLogger):
+ def __init__(self, mach_cmd, env, flavor, script):
+ MachLogger.__init__(self, mach_cmd)
+ self._mach_cmd = mach_cmd
+ self.flavor = flavor
+ self.options = defaultdict(dict)
+ self._results = []
+ self._output = None
+ self._env = env
+ self.script = script
+
+ def run_hook(self, name, *args, **kw):
+ # this bypasses layer restrictions on args,
+ # which is fine since it's a user script
+ return self._env.hooks.run(name, *args, **kw)
+
+ def set_output(self, output):
+ self._output = output
+
+ def get_output(self):
+ return self._output
+
+ def add_result(self, result):
+ self._results.append(result)
+
+ def get_results(self):
+ return self._results
+
+ def clear_results(self):
+ self._results = []
+
+ def update_options(self, name, options):
+ self.options[name].update(options)
+
+ def get_options(self, name):
+ return self.options[name]
diff --git a/python/mozperftest/mozperftest/metrics/__init__.py b/python/mozperftest/mozperftest/metrics/__init__.py
new file mode 100644
index 0000000000..1ca5f7e408
--- /dev/null
+++ b/python/mozperftest/mozperftest/metrics/__init__.py
@@ -0,0 +1,23 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+from mozperftest.layers import Layers
+from mozperftest.metrics.consoleoutput import ConsoleOutput
+from mozperftest.metrics.notebookupload import Notebook
+from mozperftest.metrics.perfboard.influx import Influx
+from mozperftest.metrics.perfherder import Perfherder
+from mozperftest.metrics.visualmetrics import VisualMetrics
+
+
+def get_layers():
+ return VisualMetrics, Perfherder, ConsoleOutput, Notebook, Influx
+
+
+def pick_metrics(env, flavor, mach_cmd):
+ if flavor in ("desktop-browser", "mobile-browser"):
+ layers = get_layers()
+ else:
+ # we don't need VisualMetrics for xpcshell
+ layers = Perfherder, ConsoleOutput, Notebook, Influx
+
+ return Layers(env, mach_cmd, layers)
diff --git a/python/mozperftest/mozperftest/metrics/common.py b/python/mozperftest/mozperftest/metrics/common.py
new file mode 100644
index 0000000000..3598cd378a
--- /dev/null
+++ b/python/mozperftest/mozperftest/metrics/common.py
@@ -0,0 +1,356 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+from collections import defaultdict
+from pathlib import Path
+
+from mozperftest.metrics.exceptions import (
+ MetricsMissingResultsError,
+ MetricsMultipleTransformsError,
+)
+from mozperftest.metrics.notebook import PerftestETL
+from mozperftest.metrics.utils import metric_fields, validate_intermediate_results
+
+COMMON_ARGS = {
+ "metrics": {
+ "type": metric_fields,
+ "nargs": "*",
+ "default": [],
+ "help": "The metrics that should be retrieved from the data.",
+ },
+ "prefix": {"type": str, "default": "", "help": "Prefix used by the output files."},
+ "split-by": {
+ "type": str,
+ "default": None,
+ "help": "A metric name to use for splitting the data. For instance, "
+ "using browserScripts.pageinfo.url will split the data by the unique "
+ "URLs that are found.",
+ },
+ "simplify-names": {
+ "action": "store_true",
+ "default": False,
+ "help": "If set, metric names will be simplified to a single word. The PerftestETL "
+ "combines dictionary keys by `.`, and the final key contains that value of the data. "
+ "That final key becomes the new name of the metric.",
+ },
+ "simplify-exclude": {
+ "nargs": "*",
+ "default": ["statistics"],
+ "help": "When renaming/simplifying metric names, entries with these strings "
+ "will be ignored and won't get simplified. These options are only used when "
+ "--simplify-names is set.",
+ },
+ "transformer": {
+ "type": str,
+ "default": None,
+ "help": "The path to the file containing the custom transformer, "
+ "or the module to import along with the class name, "
+ "e.g. mozperftest.test.xpcshell:XpcShellTransformer",
+ },
+}
+
+
+class MetricsStorage(object):
+ """Holds data that is commonly used across all metrics layers.
+
+ An instance of this class represents data for a given and output
+ path and prefix.
+ """
+
+ def __init__(self, output_path, prefix, logger):
+ self.prefix = prefix
+ self.output_path = output_path
+ self.stddata = {}
+ self.ptnb_config = {}
+ self.results = []
+ self.logger = logger
+
+ p = Path(output_path)
+ p.mkdir(parents=True, exist_ok=True)
+
+ def _parse_results(self, results):
+ if isinstance(results, dict):
+ return [results]
+ res = []
+ # XXX we need to embrace pathlib everywhere.
+ if isinstance(results, (str, Path)):
+ # Expecting a single path or a directory
+ p = Path(results)
+ if not p.exists():
+ self.logger.warning("Given path does not exist: {}".format(results))
+ elif p.is_dir():
+ files = [f for f in p.glob("**/*.json") if not f.is_dir()]
+ res.extend(self._parse_results(files))
+ else:
+ res.append(p.as_posix())
+ if isinstance(results, list):
+ # Expecting a list of paths
+ for path in results:
+ res.extend(self._parse_results(path))
+ return res
+
+ def set_results(self, results):
+ """Processes and sets results provided by the metadata.
+
+ `results` can be a path to a file or a directory. Every
+ file is scanned and we build a list. Alternatively, it
+ can be a mapping containing the results, in that case
+ we just use it direcly, but keep it in a list.
+
+ :param results list/dict/str: Path, or list of paths to the data
+ (or the data itself in a dict) of the data to be processed.
+ """
+ # Parse the results into files (for now) and the settings
+ self.results = defaultdict(lambda: defaultdict(list))
+ self.settings = defaultdict(dict)
+ for res in results:
+ # Ensure that the results are valid before continuing
+ validate_intermediate_results(res)
+
+ name = res["name"]
+ if isinstance(res["results"], dict):
+ # XXX Implement subtest based parsing
+ raise NotImplementedError(
+ "Subtest-based processing is not implemented yet"
+ )
+
+ # Merge all entries with the same name into one
+ # result, if separation is needed use unique names
+ self.results[name]["files"].extend(self._parse_results(res["results"]))
+
+ suite_settings = self.settings[name]
+ for key, val in res.items():
+ if key == "results":
+ continue
+ suite_settings[key] = val
+
+ # Check the transform definitions
+ currtrfm = self.results[name]["transformer"]
+ if not currtrfm:
+ self.results[name]["transformer"] = res.get(
+ "transformer", "SingleJsonRetriever"
+ )
+ elif currtrfm != res.get("transformer", "SingleJsonRetriever"):
+ raise MetricsMultipleTransformsError(
+ f"Only one transformer allowed per data name! Found multiple for {name}: "
+ f"{[currtrfm, res['transformer']]}"
+ )
+
+ # Get the transform options if available
+ self.results[name]["options"] = res.get("transformer-options", {})
+
+ if not self.results:
+ self.return_code = 1
+ raise MetricsMissingResultsError("Could not find any results to process.")
+
+ def get_standardized_data(self, group_name="firefox", transformer=None):
+ """Returns a parsed, standardized results data set.
+
+ The dataset is computed once then cached unless overwrite is used.
+ The transformer dictates how the data will be parsed, by default it uses
+ a JSON transformer that flattens the dictionary while merging all the
+ common metrics together.
+
+ :param group_name str: The name for this results group.
+ :param transformer str: The name of the transformer to use
+ when parsing the data. Currently, only SingleJsonRetriever
+ is available.
+ :param overwrite str: if True, we recompute the results
+ :return dict: Standardized notebook data with containing the
+ requested metrics.
+ """
+ if self.stddata:
+ return self.stddata
+
+ for data_type, data_info in self.results.items():
+ tfm = transformer if transformer is not None else data_info["transformer"]
+ prefix = data_type
+ if self.prefix:
+ prefix = "{}-{}".format(self.prefix, data_type)
+
+ # Primarily used to store the transformer used on the data
+ # so that it can also be used for generating things
+ # like summary values for suites, and subtests.
+ self.ptnb_config[data_type] = {
+ "output": self.output_path,
+ "prefix": prefix,
+ "custom_transformer": tfm,
+ "file_groups": {data_type: data_info["files"]},
+ }
+
+ ptnb = PerftestETL(
+ file_groups=self.ptnb_config[data_type]["file_groups"],
+ config=self.ptnb_config[data_type],
+ prefix=self.prefix,
+ logger=self.logger,
+ custom_transform=tfm,
+ )
+ r = ptnb.process(**data_info["options"])
+ self.stddata[data_type] = r["data"]
+
+ return self.stddata
+
+ def filtered_metrics(
+ self,
+ group_name="firefox",
+ transformer=None,
+ metrics=None,
+ exclude=None,
+ split_by=None,
+ simplify_names=False,
+ simplify_exclude=["statistics"],
+ ):
+ """Filters the metrics to only those that were requested by `metrics`.
+
+ If metrics is Falsey (None, empty list, etc.) then no metrics
+ will be filtered. The entries in metrics are pattern matched with
+ the subtests in the standardized data (not a regular expression).
+ For example, if "firstPaint" is in metrics, then all subtests which
+ contain this string in their name will be kept.
+
+ :param metrics list: List of metrics to keep.
+ :param exclude list: List of string matchers to exclude from the metrics
+ gathered/reported.
+ :param split_by str: The name of a metric to use to split up data by.
+ :param simplify_exclude list: List of string matchers to exclude
+ from the naming simplification process.
+ :return dict: Standardized notebook data containing the
+ requested metrics.
+ """
+ results = self.get_standardized_data(
+ group_name=group_name, transformer=transformer
+ )
+ if not metrics:
+ return results
+ if not exclude:
+ exclude = []
+ if not simplify_exclude:
+ simplify_exclude = []
+
+ # Get the field to split the results by (if any)
+ if split_by is not None:
+ splitting_entry = None
+ for data_type, data_info in results.items():
+ for res in data_info:
+ if split_by in res["subtest"]:
+ splitting_entry = res
+ break
+ if splitting_entry is not None:
+ split_by = defaultdict(list)
+ for c, entry in enumerate(splitting_entry["data"]):
+ split_by[entry["value"]].append(c)
+
+ # Filter metrics
+ filtered = {}
+ for data_type, data_info in results.items():
+ newresults = []
+ for res in data_info:
+ if any([met["name"] in res["subtest"] for met in metrics]) and not any(
+ [met in res["subtest"] for met in exclude]
+ ):
+ res["transformer"] = self.ptnb_config[data_type][
+ "custom_transformer"
+ ]
+ newresults.append(res)
+ filtered[data_type] = newresults
+
+ # Simplify the filtered metric names
+ if simplify_names:
+
+ def _simplify(name):
+ if any([met in name for met in simplify_exclude]):
+ return None
+ return name.split(".")[-1]
+
+ self._alter_name(filtered, res, filter=_simplify)
+
+ # Split the filtered results
+ if split_by is not None:
+ newfilt = {}
+ total_iterations = sum([len(inds) for _, inds in split_by.items()])
+ for data_type in filtered:
+ if not filtered[data_type]:
+ # Ignore empty data types
+ continue
+
+ newresults = []
+ newfilt[data_type] = newresults
+ for split, indices in split_by.items():
+ for res in filtered[data_type]:
+ if len(res["data"]) != total_iterations:
+ # Skip data that cannot be split
+ continue
+ splitres = {key: val for key, val in res.items()}
+ splitres["subtest"] += " " + split
+ splitres["data"] = [res["data"][i] for i in indices]
+ splitres["transformer"] = self.ptnb_config[data_type][
+ "custom_transformer"
+ ]
+
+ newresults.append(splitres)
+
+ filtered = newfilt
+
+ return filtered
+
+ def _alter_name(self, filtered, res, filter):
+ previous = []
+ for data_type, data_info in filtered.items():
+ for res in data_info:
+ new = filter(res["subtest"])
+ if new is None:
+ continue
+ if new in previous:
+ self.logger.warning(
+ f"Another metric which ends with `{new}` was already found. "
+ f"{res['subtest']} will not be simplified."
+ )
+ continue
+ res["subtest"] = new
+ previous.append(new)
+
+
+_metrics = {}
+
+
+def filtered_metrics(
+ metadata,
+ path,
+ prefix,
+ group_name="firefox",
+ transformer=None,
+ metrics=None,
+ settings=False,
+ exclude=None,
+ split_by=None,
+ simplify_names=False,
+ simplify_exclude=["statistics"],
+):
+ """Returns standardized data extracted from the metadata instance.
+
+ We're caching an instance of MetricsStorage per metrics/storage
+ combination and compute the data only once when this function is called.
+ """
+ key = path, prefix
+ if key not in _metrics:
+ storage = _metrics[key] = MetricsStorage(path, prefix, metadata)
+ storage.set_results(metadata.get_results())
+ else:
+ storage = _metrics[key]
+
+ results = storage.filtered_metrics(
+ group_name=group_name,
+ transformer=transformer,
+ metrics=metrics,
+ exclude=exclude,
+ split_by=split_by,
+ simplify_names=simplify_names,
+ simplify_exclude=simplify_exclude,
+ )
+
+ # XXX returning two different types is a problem
+ # in case settings is false, we should return None for it
+ # and always return a 2-tuple
+ if settings:
+ return results, storage.settings
+ return results
diff --git a/python/mozperftest/mozperftest/metrics/consoleoutput.py b/python/mozperftest/mozperftest/metrics/consoleoutput.py
new file mode 100644
index 0000000000..a4d544f3ef
--- /dev/null
+++ b/python/mozperftest/mozperftest/metrics/consoleoutput.py
@@ -0,0 +1,59 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import os
+
+from mozperftest.layers import Layer
+from mozperftest.metrics.common import COMMON_ARGS, filtered_metrics
+
+RESULTS_TEMPLATE = """\
+
+==========================================================
+ Results ({})
+==========================================================
+
+{}
+
+"""
+
+
+class ConsoleOutput(Layer):
+ """Output metrics in the console."""
+
+ name = "console"
+ # By default activate the console layer when running locally.
+ activated = "MOZ_AUTOMATION" not in os.environ
+ arguments = COMMON_ARGS
+
+ def run(self, metadata):
+ # Get filtered metrics
+ results = filtered_metrics(
+ metadata,
+ self.get_arg("output"),
+ self.get_arg("prefix"),
+ metrics=self.get_arg("metrics"),
+ transformer=self.get_arg("transformer"),
+ split_by=self.get_arg("split-by"),
+ simplify_names=self.get_arg("simplify-names"),
+ simplify_exclude=self.get_arg("simplify-exclude"),
+ )
+
+ if not results:
+ self.warning("No results left after filtering")
+ return metadata
+
+ for name, res in results.items():
+ # Make a nicer view of the data
+ subtests = [
+ "{}: {}".format(r["subtest"], [v["value"] for v in r["data"]])
+ for r in res
+ ]
+
+ # Output the data to console
+ self.info(
+ "\n==========================================================\n"
+ "= Results =\n"
+ "=========================================================="
+ "\n" + "\n".join(subtests) + "\n"
+ )
+ return metadata
diff --git a/python/mozperftest/mozperftest/metrics/exceptions.py b/python/mozperftest/mozperftest/metrics/exceptions.py
new file mode 100644
index 0000000000..dcac64ded9
--- /dev/null
+++ b/python/mozperftest/mozperftest/metrics/exceptions.py
@@ -0,0 +1,53 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+class MetricsMultipleTransformsError(Exception):
+ """Raised when more than one transformer was specified.
+
+ This is because intermediate results with the same data
+ name are merged when being processed.
+ """
+
+ pass
+
+
+class MetricsMissingResultsError(Exception):
+ """Raised when no results could be found after parsing the intermediate results."""
+
+ pass
+
+
+class PerfherderValidDataError(Exception):
+ """Raised when no valid data (int/float) can be found to build perfherder blob."""
+
+ pass
+
+
+class NotebookInvalidTransformError(Exception):
+ """Raised when an invalid custom transformer is set."""
+
+ pass
+
+
+class NotebookTransformOptionsError(Exception):
+ """Raised when an invalid option is given to a transformer."""
+
+ pass
+
+
+class NotebookTransformError(Exception):
+ """Raised on generic errors within the transformers."""
+
+
+class NotebookDuplicateTransformsError(Exception):
+ """Raised when a directory contains more than one transformers have the same class name."""
+
+ pass
+
+
+class NotebookInvalidPathError(Exception):
+ """Raised when an invalid path is given."""
+
+ pass
diff --git a/python/mozperftest/mozperftest/metrics/notebook/__init__.py b/python/mozperftest/mozperftest/metrics/notebook/__init__.py
new file mode 100644
index 0000000000..8d69182664
--- /dev/null
+++ b/python/mozperftest/mozperftest/metrics/notebook/__init__.py
@@ -0,0 +1,7 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+from .perftestetl import PerftestETL
+from .perftestnotebook import PerftestNotebook
+
+__all__ = ["PerftestETL", "PerftestNotebook"]
diff --git a/python/mozperftest/mozperftest/metrics/notebook/constant.py b/python/mozperftest/mozperftest/metrics/notebook/constant.py
new file mode 100644
index 0000000000..ca40d289d4
--- /dev/null
+++ b/python/mozperftest/mozperftest/metrics/notebook/constant.py
@@ -0,0 +1,31 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import os
+import pathlib
+from types import MappingProxyType
+
+from .transformer import get_transformers
+
+
+class Constant(object):
+ """A singleton class to store all constants."""
+
+ __instance = None
+
+ def __new__(cls, *args, **kw):
+ if cls.__instance is None:
+ cls.__instance = object.__new__(cls, *args, **kw)
+ return cls.__instance
+
+ def __init__(self):
+ self.__here = pathlib.Path(os.path.dirname(os.path.abspath(__file__)))
+ self.__predefined_transformers = get_transformers(self.__here / "transforms")
+
+ @property
+ def predefined_transformers(self):
+ return MappingProxyType(self.__predefined_transformers).copy()
+
+ @property
+ def here(self):
+ return self.__here
diff --git a/python/mozperftest/mozperftest/metrics/notebook/notebook-sections/compare b/python/mozperftest/mozperftest/metrics/notebook/notebook-sections/compare
new file mode 100644
index 0000000000..f6870f0246
--- /dev/null
+++ b/python/mozperftest/mozperftest/metrics/notebook/notebook-sections/compare
@@ -0,0 +1,85 @@
+%% md
+<div id="table-wrapper">
+ <table id="compareTable" border="1"></table>
+</div>
+
+%% py
+from js import document, data_object
+import json
+import numpy as np
+
+split_data = {}
+dir_names = set()
+subtests = set()
+newest_run_name = ""
+for element in data_object:
+ name = element["name"]
+ if "- newest run" in name:
+ newest_run_name = name
+ subtest = element["subtest"]
+ dir_names.add(name)
+ subtests.add(subtest)
+
+ data = [p["value"] for p in element["data"]]
+ split_data.setdefault(name, {}).update({
+ subtest:{
+ "data":data,
+ "stats":{
+ "Mean": np.round(np.mean(data),2),
+ "Median": np.median(data),
+ "Std. Dev.": np.round(np.std(data),2)
+ }
+ }
+ })
+
+table = document.getElementById("compareTable")
+table.innerHTML=''
+
+# build table head
+thead = table.createTHead()
+throw = thead.insertRow()
+for name in ["Metrics", "Statistics"] + list(dir_names):
+ th = document.createElement("th")
+ th.appendChild(document.createTextNode(name))
+ throw.appendChild(th)
+
+def fillRow(row, subtest, stat):
+ row.insertCell().appendChild(document.createTextNode(stat))
+ newest_run_val = split_data[newest_run_name][subtest]["stats"][stat]
+ for name in dir_names:
+ cell_val = split_data[name][subtest]["stats"][stat]
+ diff = np.round((cell_val - newest_run_val * 1.0)/newest_run_val * 100, 2)
+ color = "red" if diff>0 else "green"
+ row.insertCell().innerHTML = f"{cell_val}\n(<span style=\"color:{color}\">{diff}</span>%)"
+
+# build table body
+tbody = document.createElement("tbody")
+for subtest in subtests:
+ row1 = tbody.insertRow()
+ cell0 = row1.insertCell()
+ cell0.appendChild(document.createTextNode(subtest))
+ cell0.rowSpan = 3;
+ a = split_data
+ fillRow(row1, subtest, "Mean")
+
+ row2 = tbody.insertRow()
+ fillRow(row2, subtest, "Median")
+
+ row3 = tbody.insertRow()
+ fillRow(row3, subtest, "Std. Dev.")
+
+table.appendChild(tbody)
+
+%% css
+#table-wrapper {
+ height: 600px;
+ overflow: auto;
+}
+
+#table {
+ display: table;
+}
+
+td {
+ white-space:pre-line;
+}
diff --git a/python/mozperftest/mozperftest/metrics/notebook/notebook-sections/header b/python/mozperftest/mozperftest/metrics/notebook/notebook-sections/header
new file mode 100644
index 0000000000..1a0f659e54
--- /dev/null
+++ b/python/mozperftest/mozperftest/metrics/notebook/notebook-sections/header
@@ -0,0 +1,12 @@
+%% md
+# Welcome to PerftestNotebook
+
+press the :fast_forward: button on your top left corner to run whole notebook
+
+%% fetch
+
+text: data_string = http://127.0.0.1:5000/data
+
+%% js
+
+var data_object = JSON.parse(data_string);
diff --git a/python/mozperftest/mozperftest/metrics/notebook/notebook-sections/scatterplot b/python/mozperftest/mozperftest/metrics/notebook/notebook-sections/scatterplot
new file mode 100644
index 0000000000..f68b540236
--- /dev/null
+++ b/python/mozperftest/mozperftest/metrics/notebook/notebook-sections/scatterplot
@@ -0,0 +1,15 @@
+%% py
+from js import data_object
+import matplotlib.pyplot as plt
+
+plt.figure()
+
+for element in data_object:
+ data_array = element["data"]
+ x = [x["xaxis"] for x in data_array]
+ y = [x["value"] for x in data_array]
+ label = element["name"]+"\n"+element["subtest"]
+ plt.scatter(x,y,label=label)
+
+plt.legend()
+plt.show()
diff --git a/python/mozperftest/mozperftest/metrics/notebook/perftestetl.py b/python/mozperftest/mozperftest/metrics/notebook/perftestetl.py
new file mode 100644
index 0000000000..bd28d9be6d
--- /dev/null
+++ b/python/mozperftest/mozperftest/metrics/notebook/perftestetl.py
@@ -0,0 +1,167 @@
+#!/usr/bin/env python3
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import json
+import os
+import pathlib
+from collections import OrderedDict
+
+from .constant import Constant
+from .transformer import SimplePerfherderTransformer, Transformer, get_transformer
+
+
+class PerftestETL(object):
+ """Controller class for the PerftestETL."""
+
+ def __init__(
+ self,
+ file_groups,
+ config,
+ prefix,
+ logger,
+ custom_transform=None,
+ sort_files=False,
+ ):
+ """Initializes PerftestETL.
+
+ :param dict file_groups: A dict of file groupings. The value
+ of each of the dict entries is the name of the data that
+ will be produced.
+ :param str custom_transform: The class name of a custom transformer.
+ """
+ self.fmt_data = {}
+ self.file_groups = file_groups
+ self.config = config
+ self.sort_files = sort_files
+ self.const = Constant()
+ self.prefix = prefix
+ self.logger = logger
+
+ # Gather the available transformers
+ tfms_dict = self.const.predefined_transformers
+
+ # XXX NOTEBOOK_PLUGIN functionality is broken at the moment.
+ # This code block will raise an exception if it detects it in
+ # the environment.
+ plugin_path = os.getenv("NOTEBOOK_PLUGIN")
+ if plugin_path:
+ raise Exception("NOTEBOOK_PLUGIN is currently broken.")
+
+ # Initialize the requested transformer
+ if custom_transform:
+ # try to load it directly, and fallback to registry
+ try:
+ tfm_cls = get_transformer(custom_transform)
+ except ImportError:
+ tfm_cls = tfms_dict.get(custom_transform)
+
+ if tfm_cls:
+ self.transformer = Transformer(
+ files=[],
+ custom_transformer=tfm_cls(),
+ logger=self.logger,
+ prefix=self.prefix,
+ )
+ self.logger.info(f"Found {custom_transform} transformer", self.prefix)
+ else:
+ raise Exception(f"Could not get a {custom_transform} transformer.")
+ else:
+ self.transformer = Transformer(
+ files=[],
+ custom_transformer=SimplePerfherderTransformer(),
+ logger=self.logger,
+ prefix=self.prefix,
+ )
+
+ def parse_file_grouping(self, file_grouping):
+ """Handles differences in the file_grouping definitions.
+
+ It can either be a path to a folder containing the files, a list of files,
+ or it can contain settings from an artifact_downloader instance.
+
+ :param file_grouping: A file grouping entry.
+ :return: A list of files to process.
+ """
+ files = []
+ if isinstance(file_grouping, list):
+ # A list of files was provided
+ files = file_grouping
+ elif isinstance(file_grouping, dict):
+ # A dictionary of settings from an artifact_downloader instance
+ # was provided here
+ raise Exception(
+ "Artifact downloader tooling is disabled for the time being."
+ )
+ elif isinstance(file_grouping, str):
+ # Assume a path to files was given
+ filepath = file_grouping
+ newf = [f.resolve().as_posix() for f in pathlib.Path(filepath).rglob("*")]
+ files = newf
+ else:
+ raise Exception(
+ "Unknown file grouping type provided here: %s" % file_grouping
+ )
+
+ if self.sort_files:
+ if isinstance(files, list):
+ files.sort()
+ else:
+ for _, file_list in files.items():
+ file_list.sort()
+ files = OrderedDict(sorted(files.items(), key=lambda entry: entry[0]))
+
+ if not files:
+ raise Exception(
+ "Could not find any files in this configuration: %s" % file_grouping
+ )
+
+ return files
+
+ def parse_output(self):
+ # XXX Fix up this function, it should only return a directory for output
+ # not a directory or a file. Or remove it completely, it's not very useful.
+ prefix = "" if "prefix" not in self.config else self.config["prefix"]
+ filepath = f"{prefix}std-output.json"
+
+ if "output" in self.config:
+ filepath = self.config["output"]
+ if os.path.isdir(filepath):
+ filepath = os.path.join(filepath, f"{prefix}std-output.json")
+
+ return filepath
+
+ def process(self, **kwargs):
+ """Process the file groups and return the results of the requested analyses.
+
+ :return: All the results in a dictionary. The field names are the Analyzer
+ funtions that were called.
+ """
+ fmt_data = []
+
+ for name, files in self.file_groups.items():
+ files = self.parse_file_grouping(files)
+ if isinstance(files, dict):
+ raise Exception(
+ "Artifact downloader tooling is disabled for the time being."
+ )
+ else:
+ # Transform the data
+ self.transformer.files = files
+ trfm_data = self.transformer.process(name, **kwargs)
+
+ if isinstance(trfm_data, list):
+ fmt_data.extend(trfm_data)
+ else:
+ fmt_data.append(trfm_data)
+
+ self.fmt_data = fmt_data
+
+ # Write formatted data output to filepath
+ output_data_filepath = self.parse_output()
+
+ print("Writing results to %s" % output_data_filepath)
+ with open(output_data_filepath, "w") as f:
+ json.dump(self.fmt_data, f, indent=4, sort_keys=True)
+
+ return {"data": self.fmt_data, "file-output": output_data_filepath}
diff --git a/python/mozperftest/mozperftest/metrics/notebook/perftestnotebook.py b/python/mozperftest/mozperftest/metrics/notebook/perftestnotebook.py
new file mode 100644
index 0000000000..99c3766b42
--- /dev/null
+++ b/python/mozperftest/mozperftest/metrics/notebook/perftestnotebook.py
@@ -0,0 +1,79 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import json
+import webbrowser
+from http.server import BaseHTTPRequestHandler, HTTPServer
+
+from .constant import Constant
+
+
+class PerftestNotebook(object):
+ """Controller class for PerftestNotebook."""
+
+ def __init__(self, data, logger, prefix):
+ """Initialize the PerftestNotebook.
+
+ :param dict data: Standardized data, post-transformation.
+ """
+ self.data = data
+ self.logger = logger
+ self.prefix = prefix
+ self.const = Constant()
+
+ def get_notebook_section(self, func):
+ """Fetch notebook content based on analysis name.
+
+ :param str func: analysis or notebook section name
+ """
+ template_path = self.const.here / "notebook-sections" / func
+ if not template_path.exists():
+ self.logger.warning(
+ f"Could not find the notebook-section called {func}", self.prefix
+ )
+ return ""
+ with template_path.open() as f:
+ return f.read()
+
+ def post_to_iodide(self, analysis=None, start_local_server=True):
+ """Build notebook and post it to iodide.
+
+ :param list analysis: notebook section names, analysis to perform in iodide
+ """
+ data = self.data
+ notebook_sections = ""
+
+ template_header_path = self.const.here / "notebook-sections" / "header"
+ with template_header_path.open() as f:
+ notebook_sections += f.read()
+
+ if analysis:
+ for func in analysis:
+ notebook_sections += self.get_notebook_section(func)
+
+ template_upload_file_path = self.const.here / "template_upload_file.html"
+ with template_upload_file_path.open() as f:
+ html = f.read().replace("replace_me", repr(notebook_sections))
+
+ upload_file_path = self.const.here / "upload_file.html"
+ with upload_file_path.open("w") as f:
+ f.write(html)
+
+ # set up local server. Iodide will fetch data from localhost:5000/data
+ class DataRequestHandler(BaseHTTPRequestHandler):
+ def do_GET(self):
+ if self.path == "/data":
+ self.send_response(200)
+ self.send_header("Content-type", "application/json")
+ self.send_header("Access-Control-Allow-Origin", "*")
+ self.end_headers()
+ self.wfile.write(bytes(json.dumps(data).encode("utf-8")))
+
+ PORT_NUMBER = 5000
+ server = HTTPServer(("", PORT_NUMBER), DataRequestHandler)
+ if start_local_server:
+ webbrowser.open_new_tab(str(upload_file_path))
+ try:
+ server.serve_forever()
+ finally:
+ server.server_close()
diff --git a/python/mozperftest/mozperftest/metrics/notebook/template_upload_file.html b/python/mozperftest/mozperftest/metrics/notebook/template_upload_file.html
new file mode 100644
index 0000000000..2400be4e87
--- /dev/null
+++ b/python/mozperftest/mozperftest/metrics/notebook/template_upload_file.html
@@ -0,0 +1,39 @@
+<!DOCTYPE html>
+<!-- This Source Code Form is subject to the terms of the Mozilla Public
+ - License, v. 2.0. If a copy of the MPL was not distributed with this
+ - file, You can obtain one at http://mozilla.org/MPL/2.0/. -->
+<html>
+ <body>
+ Redirecting to Iodide...
+ <script>
+ function post(path, params, method='post') {
+ const form = document.createElement('form');
+ form.method = method;
+ form.action = path;
+ form.id = 'uploadform';
+
+ for (const key in params) {
+ if (params.hasOwnProperty(key)) {
+ const textarea = document.createElement('textarea');
+ textarea.name = key;
+ textarea.value = params[key];
+ textarea.style.display = "none";
+ form.appendChild(textarea);
+ }
+ }
+
+
+ document.body.appendChild(form);
+ form.submit();
+ }
+
+ // TODO Need to escape all `'`,
+ // Otherwsie, this will result in javascript failures.
+ var template = replace_me
+
+ // Create a form object, and send it
+ // after release, change back to https://alpha.iodide.io/from-template/
+ post("https://alpha.iodide.io/from-template/", {"iomd": template})
+ </script>
+ </body>
+</html>
diff --git a/python/mozperftest/mozperftest/metrics/notebook/transformer.py b/python/mozperftest/mozperftest/metrics/notebook/transformer.py
new file mode 100644
index 0000000000..7ecbc40d89
--- /dev/null
+++ b/python/mozperftest/mozperftest/metrics/notebook/transformer.py
@@ -0,0 +1,228 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import importlib.util
+import inspect
+import json
+import pathlib
+
+from jsonschema import validate
+
+from mozperftest.metrics.exceptions import (
+ NotebookDuplicateTransformsError,
+ NotebookInvalidPathError,
+ NotebookInvalidTransformError,
+)
+from mozperftest.runner import HERE
+from mozperftest.utils import load_class
+
+
+class Transformer(object):
+ """Abstract class for data transformers."""
+
+ def __init__(self, files=None, custom_transformer=None, logger=None, prefix=None):
+ """Initialize the transformer with files.
+
+ :param list files: A list of files containing data to transform.
+ :param object custom_transformer: A custom transformer instance.
+ Must implement `transform` and `merge` methods.
+ """
+ self._files = files
+ self.logger = logger
+ self.prefix = prefix
+
+ if custom_transformer:
+ valid = (
+ hasattr(custom_transformer, "transform")
+ and hasattr(custom_transformer, "merge")
+ and callable(custom_transformer.transform)
+ and callable(custom_transformer.merge)
+ )
+
+ if not valid:
+ raise NotebookInvalidTransformError(
+ "The custom transformer must contain `transform` and `merge` methods."
+ )
+
+ self._custom_transformer = custom_transformer
+
+ with pathlib.Path(HERE, "schemas", "transformer_schema.json").open() as f:
+ self.schema = json.load(f)
+
+ @property
+ def files(self):
+ return self._files
+
+ @files.setter
+ def files(self, val):
+ if not isinstance(val, list):
+ self.logger.warning(
+ "`files` must be a list, got %s" % type(val), self.prefix
+ )
+ return
+ self._files = val
+
+ @property
+ def custom_transformer(self):
+ return self._custom_transformer
+
+ def open_data(self, file):
+ """Opens a file of data.
+
+ If it's not a JSON file, then the data
+ will be opened as a text file.
+
+ :param str file: Path to the data file.
+ :return: Data contained in the file.
+ """
+ with open(file) as f:
+ if file.endswith(".json"):
+ return json.load(f)
+ return f.readlines()
+
+ def process(self, name, **kwargs):
+ """Process all the known data into a merged, and standardized data format.
+
+ :param str name: Name of the merged data.
+ :return dict: Merged data.
+ """
+ trfmdata = []
+
+ for file in self.files:
+ data = {}
+
+ # Open data
+ try:
+ if hasattr(self._custom_transformer, "open_data"):
+ data = self._custom_transformer.open_data(file)
+ else:
+ data = self.open_data(file)
+ except Exception as e:
+ self.logger.warning(
+ "Failed to open file %s, skipping" % file, self.prefix
+ )
+ self.logger.warning("%s %s" % (e.__class__.__name__, e), self.prefix)
+
+ # Transform data
+ try:
+ data = self._custom_transformer.transform(data, **kwargs)
+ if not isinstance(data, list):
+ data = [data]
+ for entry in data:
+ for ele in entry["data"]:
+ if "file" not in ele:
+ ele.update({"file": file})
+ trfmdata.extend(data)
+ except Exception as e:
+ self.logger.warning(
+ "Failed to transform file %s, skipping" % file, self.prefix
+ )
+ self.logger.warning("%s %s" % (e.__class__.__name__, e), self.prefix)
+
+ merged = self._custom_transformer.merge(trfmdata)
+
+ if isinstance(merged, dict):
+ merged["name"] = name
+ else:
+ for e in merged:
+ e["name"] = name
+
+ validate(instance=merged, schema=self.schema)
+ return merged
+
+
+class SimplePerfherderTransformer:
+ """Transforms perfherder data into the standardized data format."""
+
+ entry_number = 0
+
+ def transform(self, data):
+ self.entry_number += 1
+ return {
+ "data": [{"value": data["suites"][0]["value"], "xaxis": self.entry_number}]
+ }
+
+ def merge(self, sde):
+ merged = {"data": []}
+ for entry in sde:
+ if isinstance(entry["data"], list):
+ merged["data"].extend(entry["data"])
+ else:
+ merged["data"].append(entry["data"])
+
+ self.entry_number = 0
+ return merged
+
+
+def get_transformer(path, ret_members=False):
+ """This function returns a Transformer class with the given path.
+
+ :param str path: The path points to the custom transformer.
+ :param bool ret_members: If true then return inspect.getmembers().
+ :return Transformer if not ret_members else inspect.getmembers().
+ """
+ file = pathlib.Path(path)
+
+ if file.suffix != ".py":
+ return load_class(path)
+
+ if not file.exists():
+ raise NotebookInvalidPathError(f"The path {path} does not exist.")
+
+ # Importing a source file directly
+ spec = importlib.util.spec_from_file_location(name=file.name, location=path)
+ module = importlib.util.module_from_spec(spec)
+ spec.loader.exec_module(module)
+
+ members = inspect.getmembers(
+ module,
+ lambda c: inspect.isclass(c)
+ and hasattr(c, "transform")
+ and hasattr(c, "merge")
+ and callable(c.transform)
+ and callable(c.merge),
+ )
+
+ if not members and not ret_members:
+ raise NotebookInvalidTransformError(
+ f"The path {path} was found but it was not a valid transformer."
+ )
+
+ return members if ret_members else members[0][-1]
+
+
+def get_transformers(dirpath=None):
+ """This function returns a dict of transformers under the given path.
+
+ If more than one transformers have the same class name, an exception will be raised.
+
+ :param pathlib.Path dirpath: Path to a directory containing the transformers.
+ :return dict: {"Transformer class name": Transformer class}.
+ """
+
+ ret = {}
+
+ if not dirpath.exists():
+ raise NotebookInvalidPathError(f"The path {dirpath.as_posix()} does not exist.")
+
+ if not dirpath.is_dir():
+ raise NotebookInvalidPathError(
+ f"Path given is not a directory: {dirpath.as_posix()}"
+ )
+
+ tfm_files = list(dirpath.glob("*.py"))
+ importlib.machinery.SOURCE_SUFFIXES.append("")
+
+ for file in tfm_files:
+ members = get_transformer(file.resolve().as_posix(), True)
+
+ for (name, tfm_class) in members:
+ if name in ret:
+ raise NotebookDuplicateTransformsError(
+ f"Duplicated transformer {name} "
+ + f"is found in the directory {dirpath.as_posix()}."
+ + "Please define each transformer class with a unique class name.",
+ )
+ ret.update({name: tfm_class})
+
+ return ret
diff --git a/python/mozperftest/mozperftest/metrics/notebook/transforms/__init__.py b/python/mozperftest/mozperftest/metrics/notebook/transforms/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozperftest/mozperftest/metrics/notebook/transforms/__init__.py
diff --git a/python/mozperftest/mozperftest/metrics/notebook/transforms/logcattime.py b/python/mozperftest/mozperftest/metrics/notebook/transforms/logcattime.py
new file mode 100644
index 0000000000..184b327540
--- /dev/null
+++ b/python/mozperftest/mozperftest/metrics/notebook/transforms/logcattime.py
@@ -0,0 +1,121 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import re
+from datetime import datetime, timedelta
+
+from mozperftest.metrics.exceptions import (
+ NotebookTransformError,
+ NotebookTransformOptionsError,
+)
+
+TIME_MATCHER = re.compile(r"(\s+[\d.:]+\s+)")
+
+
+class LogCatTimeTransformer:
+ """Used for parsing times/durations from logcat logs."""
+
+ def open_data(self, file):
+ with open(file) as f:
+ return f.read()
+
+ def _get_duration(self, startline, endline):
+ """Parse duration between two logcat lines.
+
+ Expecting lines with a prefix like:
+ 05-26 11:45:41.226 ...
+
+ We only parse the hours, minutes, seconds, and milliseconds here
+ because we have no use for the days and other times.
+ """
+ match = TIME_MATCHER.search(startline)
+ if not match:
+ return None
+ start = match.group(1).strip()
+
+ match = TIME_MATCHER.search(endline)
+ if not match:
+ return None
+ end = match.group(1).strip()
+
+ sdt = datetime.strptime(start, "%H:%M:%S.%f")
+ edt = datetime.strptime(end, "%H:%M:%S.%f")
+
+ # If the ending is less than the start, we rolled into a new
+ # day, so we add 1 day to the end time to handle this
+ if sdt > edt:
+ edt += timedelta(1)
+
+ return (edt - sdt).total_seconds() * 1000
+
+ def _parse_logcat(self, logcat, first_ts, second_ts=None, processor=None):
+ """Parse data from logcat lines.
+
+ If two regexes are provided (first_ts, and second_ts), then the elapsed
+ time between those lines will be measured. Otherwise, if only `first_ts`
+ is defined then, we expect a number as the first group from the
+ match. Optionally, a `processor` function can be provided to process
+ all the groups that were obtained from the match, allowing users to
+ customize what the result is.
+
+ :param list logcat: The logcat lines to parse.
+ :param str first_ts: Regular expression for the first matching line.
+ :param str second_ts: Regular expression for the second matching line.
+ :param func processor: Function to process the groups from the first_ts
+ regular expression.
+ :return list: Returns a list of durations/times parsed.
+ """
+ full_re = r"(" + first_ts + r"\n)"
+ if second_ts:
+ full_re += r".+(?:\n.+)+?(\n" + second_ts + r"\n)"
+
+ durations = []
+ for match in re.findall(full_re, logcat, re.MULTILINE):
+ if isinstance(match, str):
+ raise NotebookTransformOptionsError(
+ "Only one regex was provided, and it has no groups to process."
+ )
+
+ if second_ts is not None:
+ if len(match) != 2:
+ raise NotebookTransformError(
+ "More than 2 groups found. It's unclear which "
+ "to use for calculating the durations."
+ )
+ val = self._get_duration(match[0], match[1])
+ elif processor is not None:
+ # Ignore the first match (that is the full line)
+ val = processor(match[1:])
+ else:
+ val = match[1]
+
+ if val is not None:
+ durations.append(float(val))
+
+ return durations
+
+ def transform(self, data, **kwargs):
+ alltimes = self._parse_logcat(
+ data,
+ kwargs.get("first-timestamp"),
+ second_ts=kwargs.get("second-timestamp"),
+ processor=kwargs.get("processor"),
+ )
+ subtest = kwargs.get("transform-subtest-name")
+ return [
+ {
+ "data": [{"value": val, "xaxis": c} for c, val in enumerate(alltimes)],
+ "subtest": subtest if subtest else "logcat-metric",
+ }
+ ]
+
+ def merge(self, sde):
+ grouped_data = {}
+
+ for entry in sde:
+ subtest = entry["subtest"]
+ data = grouped_data.get(subtest, [])
+ data.extend(entry["data"])
+ grouped_data.update({subtest: data})
+
+ return [{"data": v, "subtest": k} for k, v in grouped_data.items()]
diff --git a/python/mozperftest/mozperftest/metrics/notebook/transforms/single_json.py b/python/mozperftest/mozperftest/metrics/notebook/transforms/single_json.py
new file mode 100644
index 0000000000..375615fb23
--- /dev/null
+++ b/python/mozperftest/mozperftest/metrics/notebook/transforms/single_json.py
@@ -0,0 +1,56 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+from mozperftest.metrics.notebook.utilities import flat
+
+
+class SingleJsonRetriever:
+ """Transforms perfherder data into the standardized data format."""
+
+ entry_number = 0
+
+ def transform(self, data):
+ self.entry_number += 1
+
+ # flat(data, ()) returns a dict that have one key per dictionary path
+ # in the original data.
+ return [
+ {
+ "data": [{"value": i, "xaxis": self.entry_number} for i in v],
+ "subtest": k,
+ }
+ for k, v in flat(data, ()).items()
+ ]
+
+ def merge(self, sde):
+ grouped_data = {}
+ for entry in sde:
+ subtest = entry["subtest"]
+ data = grouped_data.get(subtest, [])
+ data.extend(entry["data"])
+ grouped_data.update({subtest: data})
+
+ merged_data = [{"data": v, "subtest": k} for k, v in grouped_data.items()]
+
+ self.entry_number = 0
+ return merged_data
+
+ def summary(self, suite):
+ """Summarize a suite of perfherder data into a single value.
+
+ Returning None means that there's no summary. Otherwise, an integer
+ or float must be returned.
+
+ Only available in the Perfherder layer.
+ """
+ return None
+
+ def subtest_summary(self, subtest):
+ """Summarize a set of replicates for a given subtest.
+
+ By default, it returns a None so we fall back to using the
+ average of the replicates which is the default.
+
+ Only available in the Perfherder layer.
+ """
+ return None
diff --git a/python/mozperftest/mozperftest/metrics/notebook/utilities.py b/python/mozperftest/mozperftest/metrics/notebook/utilities.py
new file mode 100644
index 0000000000..7fd97fa3fa
--- /dev/null
+++ b/python/mozperftest/mozperftest/metrics/notebook/utilities.py
@@ -0,0 +1,63 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+from collections.abc import Iterable
+
+
+def flat(data, parent_dir):
+ """
+ Converts a dictionary with nested entries like this
+ {
+ "dict1": {
+ "dict2": {
+ "key1": value1,
+ "key2": value2,
+ ...
+ },
+ ...
+ },
+ ...
+ "dict3": {
+ "key3": value3,
+ "key4": value4,
+ ...
+ }
+ ...
+ }
+
+ to a "flattened" dictionary like this that has no nested entries:
+ {
+ "dict1.dict2.key1": value1,
+ "dict1.dict2.key2": value2,
+ ...
+ "dict3.key3": value3,
+ "dict3.key4": value4,
+ ...
+ }
+
+ :param Iterable data : json data.
+ :param tuple parent_dir: json fields.
+
+ :return dict: {subtest: value}
+ """
+ result = {}
+
+ if not data:
+ return result
+
+ if isinstance(data, list):
+ for item in data:
+ for k, v in flat(item, parent_dir).items():
+ result.setdefault(k, []).extend(v)
+
+ if isinstance(data, dict):
+ for k, v in data.items():
+ current_dir = parent_dir + (k,)
+ subtest = ".".join(current_dir)
+ if isinstance(v, Iterable) and not isinstance(v, str):
+ for x, y in flat(v, current_dir).items():
+ result.setdefault(x, []).extend(y)
+ elif v or v == 0:
+ result.setdefault(subtest, []).append(v)
+
+ return result
diff --git a/python/mozperftest/mozperftest/metrics/notebookupload.py b/python/mozperftest/mozperftest/metrics/notebookupload.py
new file mode 100644
index 0000000000..ec53af2b7f
--- /dev/null
+++ b/python/mozperftest/mozperftest/metrics/notebookupload.py
@@ -0,0 +1,115 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import pathlib
+
+from mozperftest.layers import Layer
+from mozperftest.metrics.common import COMMON_ARGS, filtered_metrics
+from mozperftest.metrics.notebook import PerftestNotebook
+from mozperftest.metrics.utils import is_number
+
+
+class Notebook(Layer):
+ """Post standarized data to iodide and run analysis."""
+
+ name = "notebook"
+ activated = False
+
+ arguments = COMMON_ARGS
+ arguments.update(
+ {
+ "analysis": {
+ "nargs": "*",
+ "default": [],
+ "help": "List of analyses to run in Iodide.",
+ },
+ "analyze-strings": {
+ "action": "store_true",
+ "default": False,
+ "help": (
+ "If set, strings won't be filtered out of the results to analyze in Iodide."
+ ),
+ },
+ "no-server": {
+ "action": "store_true",
+ "default": False,
+ "help": "If set, the data won't be opened in Iodide.",
+ },
+ "compare-to": {
+ "nargs": "*",
+ "default": [],
+ "help": (
+ "Compare the results from this test to the historical data in the folder(s) "
+ "specified through this option. Only JSON data can be processed for the "
+ "moment. Each folder containing those JSONs is considered as a distinct "
+ "data point to compare with the newest run."
+ ),
+ },
+ "stats": {
+ "action": "store_true",
+ "default": False,
+ "help": "If set, browsertime statistics will be reported.",
+ },
+ }
+ )
+
+ def run(self, metadata):
+ exclusions = None
+ if not self.get_arg("stats"):
+ exclusions = ["statistics."]
+
+ for result in metadata.get_results():
+ result["name"] += "- newest run"
+
+ analysis = self.get_arg("analysis")
+ dir_list = self.get_arg("compare-to")
+ if dir_list:
+ analysis.append("compare")
+ for directory in dir_list:
+ dirpath = pathlib.Path(directory)
+ if not dirpath.exists():
+ raise Exception(f"{dirpath} does not exist.")
+ if not dirpath.is_dir():
+ raise Exception(f"{dirpath} is not a directory")
+ # TODO: Handle more than just JSON data.
+ for jsonfile in dirpath.rglob("*.json"):
+ metadata.add_result(
+ {
+ "results": str(jsonfile.resolve()),
+ "name": jsonfile.parent.name,
+ }
+ )
+
+ results = filtered_metrics(
+ metadata,
+ self.get_arg("output"),
+ self.get_arg("prefix"),
+ metrics=self.get_arg("metrics"),
+ transformer=self.get_arg("transformer"),
+ exclude=exclusions,
+ split_by=self.get_arg("split-by"),
+ simplify_names=self.get_arg("simplify-names"),
+ simplify_exclude=self.get_arg("simplify-exclude"),
+ )
+
+ if not results:
+ self.warning("No results left after filtering")
+ return metadata
+
+ data_to_post = []
+ for name, res in results.items():
+ for r in res:
+ val = r["data"][0]["value"]
+ if is_number(val):
+ data_to_post.append(r)
+ elif self.get_arg("analyze-strings"):
+ data_to_post.append(r)
+
+ self.ptnb = PerftestNotebook(
+ data=data_to_post, logger=metadata, prefix=self.get_arg("prefix")
+ )
+ self.ptnb.post_to_iodide(
+ analysis, start_local_server=not self.get_arg("no-server")
+ )
+
+ return metadata
diff --git a/python/mozperftest/mozperftest/metrics/perfboard/__init__.py b/python/mozperftest/mozperftest/metrics/perfboard/__init__.py
new file mode 100644
index 0000000000..6fbe8159b2
--- /dev/null
+++ b/python/mozperftest/mozperftest/metrics/perfboard/__init__.py
@@ -0,0 +1,3 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
diff --git a/python/mozperftest/mozperftest/metrics/perfboard/dashboard.json b/python/mozperftest/mozperftest/metrics/perfboard/dashboard.json
new file mode 100644
index 0000000000..804b880b55
--- /dev/null
+++ b/python/mozperftest/mozperftest/metrics/perfboard/dashboard.json
@@ -0,0 +1,56 @@
+{
+ "annotations": {
+ "list": [
+ {
+ "builtIn": 1,
+ "datasource": "-- Grafana --",
+ "enable": true,
+ "hide": true,
+ "iconColor": "rgba(0, 211, 255, 1)",
+ "name": "Annotations & Alerts",
+ "type": "dashboard"
+ }
+ ]
+ },
+ "editable": true,
+ "gnetId": null,
+ "graphTooltip": 0,
+ "id": 1,
+ "links": [],
+ "panels": [],
+ "refresh": false,
+ "schemaVersion": 22,
+ "style": "dark",
+ "tags": [
+ "component"
+ ],
+ "templating": {
+ "list": []
+ },
+ "time": {
+ "from": "now-30d",
+ "to": "now"
+ },
+ "timepicker": {
+ "refresh_intervals": [
+ "5s",
+ "10s",
+ "30s",
+ "1m",
+ "5m",
+ "15m",
+ "30m",
+ "1h",
+ "2h",
+ "1d"
+ ]
+ },
+ "timezone": "",
+ "title": "?",
+ "uid": null,
+ "id": null,
+ "variables": {
+ "list": []
+ },
+ "version": 13
+}
diff --git a/python/mozperftest/mozperftest/metrics/perfboard/grafana.py b/python/mozperftest/mozperftest/metrics/perfboard/grafana.py
new file mode 100644
index 0000000000..1fa76ea991
--- /dev/null
+++ b/python/mozperftest/mozperftest/metrics/perfboard/grafana.py
@@ -0,0 +1,87 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import json
+import os
+
+from grafana_api.grafana_face import GrafanaFace
+
+HERE = os.path.dirname(__file__)
+
+
+with open(os.path.join(HERE, "dashboard.json")) as f:
+ template = json.loads(f.read())
+
+with open(os.path.join(HERE, "panel.json")) as f:
+ panel_template = json.loads(f.read())
+
+with open(os.path.join(HERE, "target.json")) as f:
+ target_template = json.loads(f.read())
+
+
+class Grafana:
+ def __init__(self, layer, key, host="perfboard.dev.mozaws.net", port=3000):
+ self.client = GrafanaFace(host=host, port=port, auth=key)
+ self.layer = layer
+
+ def get_dashboard(self, title):
+ existing = self.client.search.search_dashboards(tag="component")
+ existing = dict(
+ [(dashboard["title"].lower(), dashboard["uid"]) for dashboard in existing]
+ )
+ if title in existing:
+ return self.client.dashboard.get_dashboard(existing[title])
+ self.layer.debug(f"Creating dashboard {title}")
+ d = dict(template)
+ d["title"] = title.capitalize()
+ res = self.client.dashboard.update_dashboard(
+ dashboard={"dashboard": d, "folderId": 0, "overwrite": False}
+ )
+
+ return self.client.dashboard.get_dashboard(res["uid"])
+
+ def _add_panel(self, dashboard, panel_title, metrics):
+ found = None
+ ids = []
+ for panel in dashboard["dashboard"]["panels"]:
+ ids.append(panel["id"])
+
+ if panel["title"] == panel_title:
+ found = panel
+
+ ids.sort()
+
+ need_update = False
+ if found is None:
+ # create the panel
+ panel = panel_template
+ panel["title"] = panel_title
+ if ids != []:
+ panel["id"] = ids[-1] + 1
+ else:
+ panel["id"] = 1
+ self.layer.debug("Creating panel")
+ dashboard["dashboard"]["panels"].append(panel)
+ need_update = True
+ else:
+ self.layer.debug("Panel exists")
+ panel = found
+
+ # check the metrics
+ existing = [target["measurement"] for target in panel["targets"]]
+
+ for metric in metrics:
+ if metric in existing:
+ continue
+ m = dict(target_template)
+ m["measurement"] = metric
+ panel["targets"].append(m)
+ need_update = True
+
+ if need_update:
+ self.layer.debug("Updating dashboard")
+ self.client.dashboard.update_dashboard(dashboard=dashboard)
+
+ def add_panel(self, dashboard, panel, metrics):
+ dashboard = self.get_dashboard(dashboard)
+ self._add_panel(dashboard, panel, metrics)
diff --git a/python/mozperftest/mozperftest/metrics/perfboard/influx.py b/python/mozperftest/mozperftest/metrics/perfboard/influx.py
new file mode 100644
index 0000000000..4f7e27072c
--- /dev/null
+++ b/python/mozperftest/mozperftest/metrics/perfboard/influx.py
@@ -0,0 +1,188 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import datetime
+import statistics
+from collections import defaultdict
+
+from mozperftest import utils
+from mozperftest.layers import Layer
+from mozperftest.metrics.common import COMMON_ARGS, filtered_metrics
+from mozperftest.utils import get_tc_secret, install_package
+
+
+class Influx(Layer):
+ """Sends the metrics to an InfluxDB server"""
+
+ name = "perfboard"
+ activated = False
+ arguments = COMMON_ARGS
+ arguments.update(
+ {
+ "dashboard": {
+ "type": str,
+ "default": None,
+ "help": "Name of the dashboard - defaults to the script"
+ " `component` metadata. When not set, falls back to"
+ " `perftest`",
+ },
+ "influx-host": {
+ "type": str,
+ "default": "perfboard.dev.mozaws.net",
+ },
+ "influx-user": {
+ "type": str,
+ "default": "admin",
+ },
+ "influx-port": {
+ "type": int,
+ "default": 8086,
+ },
+ "influx-password": {
+ "type": str,
+ "default": None,
+ },
+ "influx-db": {
+ "type": str,
+ "default": "perf",
+ },
+ "grafana-host": {
+ "type": str,
+ "default": "perfboard.dev.mozaws.net",
+ },
+ "grafana-key": {
+ "type": str,
+ "default": None,
+ },
+ "grafana-port": {
+ "type": int,
+ "default": 3000,
+ },
+ }
+ )
+
+ def _setup(self):
+ venv = self.mach_cmd.virtualenv_manager
+ try:
+ from influxdb import InfluxDBClient
+ except ImportError:
+ install_package(venv, "influxdb", ignore_failure=False)
+ from influxdb import InfluxDBClient
+
+ try:
+ from mozperftest.metrics.perfboard.grafana import Grafana
+ except ImportError:
+ install_package(venv, "grafana_api", ignore_failure=False)
+ from mozperftest.metrics.perfboard.grafana import Grafana
+
+ if utils.ON_TRY:
+ secret = get_tc_secret()
+ i_host = secret["influx_host"]
+ i_port = secret["influx_port"]
+ i_user = secret["influx_user"]
+ i_password = secret["influx_password"]
+ i_dbname = secret["influx_db"]
+ g_key = secret["grafana_key"]
+ g_host = secret["grafana_host"]
+ g_port = secret["grafana_port"]
+ else:
+ i_host = self.get_arg("influx-host")
+ i_port = self.get_arg("influx-port")
+ i_user = self.get_arg("influx-user")
+ i_password = self.get_arg("influx-password")
+ if i_password is None:
+ raise Exception("You need to set --perfboard-influx-password")
+ i_dbname = self.get_arg("influx-db")
+ g_key = self.get_arg("grafana-key")
+ if g_key is None:
+ raise Exception("You need to set --perfboard-grafana-key")
+ g_host = self.get_arg("grafana-host")
+ g_port = self.get_arg("grafana-port")
+
+ self.client = InfluxDBClient(i_host, i_port, i_user, i_password, i_dbname)
+ # this will error out if the server is unreachable
+ self.client.ping()
+ self.grafana = Grafana(self, g_key, g_host, g_port)
+
+ def _build_point(self, name, component, values, date):
+ value = statistics.mean(values)
+ return {
+ "measurement": name,
+ "tags": {
+ "component": component,
+ },
+ "time": date,
+ "fields": {"Float_value": float(value)},
+ }
+
+ def run(self, metadata):
+ when = datetime.datetime.utcnow()
+ date = when.isoformat()
+ metrics = self.get_arg("metrics")
+
+ # Get filtered metrics
+ results = filtered_metrics(
+ metadata,
+ self.get_arg("output"),
+ self.get_arg("prefix"),
+ metrics=metrics,
+ transformer=self.get_arg("transformer"),
+ split_by=self.get_arg("split-by"),
+ simplify_names=self.get_arg("simplify-names"),
+ simplify_exclude=self.get_arg("simplify-exclude"),
+ )
+
+ if not results:
+ self.warning("No results left after filtering")
+ return metadata
+
+ # there's one thing we don't do yet is getting a timestamp
+ # for each measure that is happening in browsertime or xpcshell
+ # if we had it, we could send all 13/25 samples, each one with
+ # their timestamp, to InfluxDB, and let Grafana handle the
+ # mean() or median() part.
+ #
+ # Until we have this, here we convert the series to
+ # a single value and timestamp
+ self._setup()
+ component = self.get_arg("dashboard")
+ if component is None:
+ component = metadata.script.get("component", "perftest")
+
+ data = defaultdict(list)
+ for name, res in results.items():
+ for line in res:
+ if "subtest" not in line:
+ continue
+ metric_name = line["subtest"]
+ short_name = metric_name.split(".")[-1]
+ short_name = short_name.lower()
+ if metrics and not any(
+ [m.lower().startswith(short_name.lower()) for m in metrics]
+ ):
+ continue
+ values = [v["value"] for v in line["data"]]
+ data[short_name].extend(values)
+
+ if not data:
+ self.warning("No results left after filtering")
+ return data
+
+ points = []
+ for metric_name, values in data.items():
+ try:
+ point = self._build_point(metric_name, component, values, date)
+ except TypeError:
+ continue
+ points.append(point)
+
+ self.info("Sending data to InfluxDB")
+ self.client.write_points(points)
+
+ # making sure we expose it in Grafana
+ test_name = self.get_arg("tests")[0]
+ test_name = test_name.split("/")[-1]
+ for metric_name in data:
+ self.grafana.add_panel(component, test_name, metric_name)
+
+ return metadata
diff --git a/python/mozperftest/mozperftest/metrics/perfboard/panel.json b/python/mozperftest/mozperftest/metrics/perfboard/panel.json
new file mode 100644
index 0000000000..61deeaad8f
--- /dev/null
+++ b/python/mozperftest/mozperftest/metrics/perfboard/panel.json
@@ -0,0 +1,81 @@
+ {
+ "aliasColors": {},
+ "bars": false,
+ "dashLength": 10,
+ "dashes": false,
+ "datasource": null,
+ "fill": 1,
+ "fillGradient": 0,
+ "gridPos": {
+ "h": 10,
+ "w": 11,
+ "x": 0,
+ "y": 0
+ },
+ "hiddenSeries": false,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 1,
+ "nullPointMode": "null",
+ "options": {
+ "dataLinks": []
+ },
+ "percentage": false,
+ "pointradius": 2,
+ "points": false,
+ "renderer": "flot",
+ "seriesOverrides": [],
+ "spaceLength": 10,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ ],
+ "thresholds": [],
+ "timeFrom": null,
+ "timeRegions": [],
+ "timeShift": null,
+ "title": "BBC Link perftest",
+ "tooltip": {
+ "shared": true,
+ "sort": 0,
+ "value_type": "individual"
+ },
+ "type": "graph",
+ "xaxis": {
+ "buckets": null,
+ "mode": "time",
+ "name": null,
+ "show": true,
+ "values": []
+ },
+ "yaxes": [
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ },
+ {
+ "format": "short",
+ "label": null,
+ "logBase": 1,
+ "max": null,
+ "min": null,
+ "show": true
+ }
+ ],
+ "yaxis": {
+ "align": false,
+ "alignLevel": null
+ }
+ }
diff --git a/python/mozperftest/mozperftest/metrics/perfboard/target.json b/python/mozperftest/mozperftest/metrics/perfboard/target.json
new file mode 100644
index 0000000000..ad96488840
--- /dev/null
+++ b/python/mozperftest/mozperftest/metrics/perfboard/target.json
@@ -0,0 +1,20 @@
+
+{
+ "groupBy": [],
+ "measurement": "rumSpeedIndex",
+ "orderByTime": "ASC",
+ "policy": "default",
+ "refId": "A",
+ "resultFormat": "time_series",
+ "select": [
+ [
+ {
+ "params": [
+ "Float_value"
+ ],
+ "type": "field"
+ }
+ ]
+ ],
+ "tags": []
+ }
diff --git a/python/mozperftest/mozperftest/metrics/perfherder.py b/python/mozperftest/mozperftest/metrics/perfherder.py
new file mode 100644
index 0000000000..0521e2a205
--- /dev/null
+++ b/python/mozperftest/mozperftest/metrics/perfherder.py
@@ -0,0 +1,374 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import json
+import os
+import pathlib
+import statistics
+import sys
+
+import jsonschema
+
+from mozperftest.layers import Layer
+from mozperftest.metrics.common import COMMON_ARGS, filtered_metrics
+from mozperftest.metrics.exceptions import PerfherderValidDataError
+from mozperftest.metrics.notebook.constant import Constant
+from mozperftest.metrics.notebook.transformer import get_transformer
+from mozperftest.metrics.utils import has_callable_method, is_number, write_json
+from mozperftest.utils import strtobool
+
+PERFHERDER_SCHEMA = pathlib.Path(
+ "testing", "mozharness", "external_tools", "performance-artifact-schema.json"
+)
+
+
+class Perfherder(Layer):
+ """Output data in the perfherder format."""
+
+ name = "perfherder"
+ activated = False
+
+ arguments = COMMON_ARGS
+ arguments.update(
+ {
+ "app": {
+ "type": str,
+ "default": "firefox",
+ "choices": [
+ "firefox",
+ "chrome-m",
+ "chrome",
+ "chromium",
+ "fennec",
+ "geckoview",
+ "fenix",
+ "refbrow",
+ ],
+ "help": (
+ "Shorthand name of application that is "
+ "being tested (used in perfherder data)."
+ ),
+ },
+ "stats": {
+ "action": "store_true",
+ "default": False,
+ "help": "If set, browsertime statistics will be reported.",
+ },
+ "timestamp": {
+ "type": float,
+ "default": None,
+ "help": (
+ "Timestamp to use for the perfherder data. Can be the "
+ "current date or a past date if needed."
+ ),
+ },
+ }
+ )
+
+ def run(self, metadata):
+ """Processes the given results into a perfherder-formatted data blob.
+
+ If the `--perfherder` flag isn't provided, then the
+ results won't be processed into a perfherder-data blob. If the
+ flavor is unknown to us, then we assume that it comes from
+ browsertime.
+
+ XXX If needed, make a way to do flavor-specific processing
+
+ :param results list/dict/str: Results to process.
+ :param perfherder bool: True if results should be processed
+ into a perfherder-data blob.
+ :param flavor str: The flavor that is being processed.
+ """
+ prefix = self.get_arg("prefix")
+ output = self.get_arg("output")
+
+ # XXX Make an arugment for exclusions from metrics
+ # (or go directly to regex's for metrics)
+ exclusions = None
+ if not self.get_arg("stats"):
+ exclusions = ["statistics."]
+
+ # Get filtered metrics
+ metrics = self.get_arg("metrics")
+ results, fullsettings = filtered_metrics(
+ metadata,
+ output,
+ prefix,
+ metrics=metrics,
+ transformer=self.get_arg("transformer"),
+ settings=True,
+ exclude=exclusions,
+ split_by=self.get_arg("split-by"),
+ simplify_names=self.get_arg("simplify-names"),
+ simplify_exclude=self.get_arg("simplify-exclude"),
+ )
+
+ if not any([results[name] for name in results]):
+ self.warning("No results left after filtering")
+ return metadata
+
+ # XXX Add version info into this data
+ app_info = {"name": self.get_arg("app", default="firefox")}
+
+ # converting the metrics list into a mapping where
+ # keys are the metrics nane
+ if metrics is not None:
+ metrics = dict([(m["name"], m) for m in metrics])
+ else:
+ metrics = {}
+
+ all_perfherder_data = None
+ for name, res in results.items():
+ settings = dict(fullsettings[name])
+ # updating the settings with values provided in metrics, if any
+ if name in metrics:
+ settings.update(metrics[name])
+
+ # XXX Instead of just passing replicates here, we should build
+ # up a partial perfherder data blob (with options) and subtest
+ # overall values.
+ subtests = {}
+ for r in res:
+ vals = [v["value"] for v in r["data"] if is_number(v["value"])]
+ if vals:
+ subtests[r["subtest"]] = vals
+
+ perfherder_data = self._build_blob(
+ subtests,
+ name=name,
+ extra_options=settings.get("extraOptions"),
+ should_alert=strtobool(settings.get("shouldAlert", False)),
+ application=app_info,
+ alert_threshold=float(settings.get("alertThreshold", 2.0)),
+ lower_is_better=strtobool(settings.get("lowerIsBetter", True)),
+ unit=settings.get("unit", "ms"),
+ summary=settings.get("value"),
+ framework=settings.get("framework"),
+ metrics_info=metrics,
+ transformer=res[0].get("transformer", None),
+ )
+
+ if all_perfherder_data is None:
+ all_perfherder_data = perfherder_data
+ else:
+ all_perfherder_data["suites"].extend(perfherder_data["suites"])
+
+ if prefix:
+ # If a prefix was given, store it in the perfherder data as well
+ all_perfherder_data["prefix"] = prefix
+
+ timestamp = self.get_arg("timestamp")
+ if timestamp is not None:
+ all_perfherder_data["pushTimestamp"] = timestamp
+
+ # Validate the final perfherder data blob
+ with pathlib.Path(metadata._mach_cmd.topsrcdir, PERFHERDER_SCHEMA).open() as f:
+ schema = json.load(f)
+ jsonschema.validate(all_perfherder_data, schema)
+
+ file = "perfherder-data.json"
+ if prefix:
+ file = "{}-{}".format(prefix, file)
+ self.info("Writing perfherder results to {}".format(os.path.join(output, file)))
+
+ # XXX "suites" key error occurs when using self.info so a print
+ # is being done for now.
+
+ # print() will produce a BlockingIOError on large outputs, so we use
+ # sys.stdout
+ sys.stdout.write("PERFHERDER_DATA: ")
+ json.dump(all_perfherder_data, sys.stdout)
+ sys.stdout.write("\n")
+ sys.stdout.flush()
+
+ metadata.set_output(write_json(all_perfherder_data, output, file))
+ return metadata
+
+ def _build_blob(
+ self,
+ subtests,
+ name="browsertime",
+ test_type="pageload",
+ extra_options=None,
+ should_alert=False,
+ subtest_should_alert=None,
+ suiteshould_alert=False,
+ framework=None,
+ application=None,
+ alert_threshold=2.0,
+ lower_is_better=True,
+ unit="ms",
+ summary=None,
+ metrics_info=None,
+ transformer=None,
+ ):
+ """Build a PerfHerder data blob from the given subtests.
+
+ NOTE: This is a WIP, see the many TODOs across this file.
+
+ Given a dictionary of subtests, and the values. Build up a
+ perfherder data blob. Note that the naming convention for
+ these arguments is different then the rest of the scripts
+ to make it easier to see where they are going to in the perfherder
+ data.
+
+ For the `should_alert` field, if should_alert is True but `subtest_should_alert`
+ is empty, then all subtests along with the suite will generate alerts.
+ Otherwise, if the subtest_should_alert contains subtests to alert on, then
+ only those will alert and nothing else (including the suite). If the
+ suite value should alert, then set `suiteshould_alert` to True.
+
+ :param subtests dict: A dictionary of subtests and the values.
+ XXX TODO items for subtests:
+ (1) Allow it to contain replicates and individual settings
+ for each of the subtests.
+ (2) The geomean of the replicates will be taken for now,
+ but it should be made more flexible in some way.
+ (3) We need some way to handle making multiple suites.
+ :param name str: Name to give to the suite.
+ :param test_type str: The type of test that was run.
+ :param extra_options list: A list of extra options to store.
+ :param should_alert bool: Whether all values in the suite should
+ generate alerts or not.
+ :param subtest_should_alert list: A list of subtests to alert on. If this
+ is not empty, then it will disable the suite-level alerts.
+ :param suiteshould_alert bool: Used if `subtest_should_alert` is not
+ empty, and if True, then the suite-level value will generate
+ alerts.
+ :param framework dict: Information about the framework that
+ is being tested.
+ :param application dict: Information about the application that
+ is being tested. Must include name, and optionally a version.
+ :param alert_threshold float: The change in percentage this
+ metric must undergo to to generate an alert.
+ :param lower_is_better bool: If True, then lower values are better
+ than higher ones.
+ :param unit str: The unit of the data.
+ :param summary float: The summary value to use in the perfherder
+ data blob. By default, the mean of all the subtests will be
+ used.
+ :param metrics_info dict: Contains a mapping of metric names to the
+ options that are used on the metric.
+ :param transformer str: The name of a predefined tranformer, a module
+ path to a transform, or a path to the file containing the transformer.
+
+ :return dict: The PerfHerder data blob.
+ """
+ if extra_options is None:
+ extra_options = []
+ if subtest_should_alert is None:
+ subtest_should_alert = []
+ if framework is None:
+ framework = {"name": "mozperftest"}
+ if application is None:
+ application = {"name": "firefox", "version": "9000"}
+ if metrics_info is None:
+ metrics_info = {}
+
+ # Use the transform to produce a suite value
+ const = Constant()
+ tfm_cls = None
+ transformer_obj = None
+ if transformer and transformer in const.predefined_transformers:
+ # A pre-built transformer name was given
+ tfm_cls = const.predefined_transformers[transformer]
+ transformer_obj = tfm_cls()
+ elif transformer is not None:
+ tfm_cls = get_transformer(transformer)
+ transformer_obj = tfm_cls()
+ else:
+ self.warning(
+ "No transformer found for this suite. Cannot produce a summary value."
+ )
+
+ perf_subtests = []
+ suite = {
+ "name": name,
+ "type": test_type,
+ "unit": unit,
+ "extraOptions": extra_options,
+ "lowerIsBetter": lower_is_better,
+ "alertThreshold": alert_threshold,
+ "shouldAlert": (should_alert and not subtest_should_alert)
+ or suiteshould_alert,
+ "subtests": perf_subtests,
+ }
+
+ perfherder = {
+ "suites": [suite],
+ "framework": framework,
+ "application": application,
+ }
+
+ allvals = []
+ alert_thresholds = []
+ for measurement in subtests:
+ reps = subtests[measurement]
+ allvals.extend(reps)
+
+ if len(reps) == 0:
+ self.warning("No replicates found for {}, skipping".format(measurement))
+ continue
+
+ # Gather extra settings specified from within a metric specification
+ subtest_lower_is_better = lower_is_better
+ subtest_unit = unit
+ for met in metrics_info:
+ if met not in measurement:
+ continue
+
+ extra_options.extend(metrics_info[met].get("extraOptions", []))
+ alert_thresholds.append(
+ metrics_info[met].get("alertThreshold", alert_threshold)
+ )
+
+ subtest_unit = metrics_info[met].get("unit", unit)
+ subtest_lower_is_better = metrics_info[met].get(
+ "lowerIsBetter", lower_is_better
+ )
+
+ if metrics_info[met].get("shouldAlert", should_alert):
+ subtest_should_alert.append(measurement)
+
+ break
+
+ subtest = {
+ "name": measurement,
+ "replicates": reps,
+ "lowerIsBetter": subtest_lower_is_better,
+ "value": None,
+ "unit": subtest_unit,
+ "shouldAlert": should_alert or measurement in subtest_should_alert,
+ }
+
+ if has_callable_method(transformer_obj, "subtest_summary"):
+ subtest["value"] = transformer_obj.subtest_summary(subtest)
+ if subtest["value"] is None:
+ subtest["value"] = statistics.mean(reps)
+
+ perf_subtests.append(subtest)
+
+ if len(allvals) == 0:
+ raise PerfherderValidDataError(
+ "Could not build perfherder data blob because no valid data was provided, "
+ + "only int/float data is accepted."
+ )
+
+ alert_thresholds = list(set(alert_thresholds))
+ if len(alert_thresholds) > 1:
+ raise PerfherderValidDataError(
+ "Too many alertThreshold's were specified, expecting 1 but found "
+ + f"{len(alert_thresholds)}"
+ )
+ elif len(alert_thresholds) == 1:
+ suite["alertThreshold"] = alert_thresholds[0]
+
+ suite["extraOptions"] = list(set(suite["extraOptions"]))
+
+ if has_callable_method(transformer_obj, "summary"):
+ val = transformer_obj.summary(suite)
+ if val is not None:
+ suite["value"] = val
+
+ return perfherder
diff --git a/python/mozperftest/mozperftest/metrics/utils.py b/python/mozperftest/mozperftest/metrics/utils.py
new file mode 100644
index 0000000000..a947434684
--- /dev/null
+++ b/python/mozperftest/mozperftest/metrics/utils.py
@@ -0,0 +1,149 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import ast
+import json
+import os
+import pathlib
+import re
+
+from jsonschema import validate
+from jsonschema.exceptions import ValidationError
+
+# Get the jsonschema for intermediate results
+PARENT = pathlib.Path(__file__).parent.parent
+with pathlib.Path(PARENT, "schemas", "intermediate-results-schema.json").open() as f:
+ IR_SCHEMA = json.load(f)
+
+
+# These are the properties we know about in the schema.
+# If anything other than these is present, then we will
+# fail validation.
+KNOWN_PERFHERDER_PROPS = set(
+ ["name", "value", "unit", "lowerIsBetter", "shouldAlert", "alertThreshold"]
+)
+KNOWN_SUITE_PROPS = set(
+ set(["results", "transformer", "transformer-options", "extraOptions", "framework"])
+ | KNOWN_PERFHERDER_PROPS
+)
+KNOWN_SINGLE_MEASURE_PROPS = set(set(["values"]) | KNOWN_PERFHERDER_PROPS)
+
+
+# Regex splitter for the metric fields - used to handle
+# the case when `,` is found within the options values.
+METRIC_SPLITTER = re.compile(r",\s*(?![^\[\]]*\])")
+
+
+def is_number(value):
+ """Determines if the value is an int/float."""
+ return isinstance(value, (int, float)) and not isinstance(value, bool)
+
+
+def has_callable_method(obj, method_name):
+ """Determines if an object/class has a callable method."""
+ if obj and hasattr(obj, method_name) and callable(getattr(obj, method_name)):
+ return True
+ return False
+
+
+def open_file(path):
+ """Opens a file and returns its contents.
+
+ :param path str: Path to the file, if it's a
+ JSON, then a dict will be returned, otherwise,
+ the raw contents (not split by line) will be
+ returned.
+ :return dict/str: Returns a dict for JSON data, and
+ a str for any other type.
+ """
+ print("Reading %s" % path)
+ with open(path) as f:
+ if os.path.splitext(path)[-1] == ".json":
+ return json.load(f)
+ return f.read()
+
+
+def write_json(data, path, file):
+ """Writes data to a JSON file.
+
+ :param data dict: Data to write.
+ :param path str: Directory of where the data will be stored.
+ :param file str: Name of the JSON file.
+ :return str: Path to the output.
+ """
+ path = os.path.join(path, file)
+ with open(path, "w+") as f:
+ json.dump(data, f)
+ return path
+
+
+def validate_intermediate_results(results):
+ """Validates intermediate results coming from the browser layer.
+
+ This method exists because there is no reasonable method to implement
+ inheritance with `jsonschema` until the `unevaluatedProperties` field
+ is implemented in the validation module. Until then, this method
+ checks to make sure that only known properties are available in the
+ results. If any property found is unknown, then we raise a
+ jsonschema.ValidationError.
+
+ :param results dict: The intermediate results to validate.
+ :raises ValidationError: Raised when validation fails.
+ """
+ # Start with the standard validation
+ validate(results, IR_SCHEMA)
+
+ # Now ensure that we have no extra keys
+ suite_keys = set(list(results.keys()))
+ unknown_keys = suite_keys - KNOWN_SUITE_PROPS
+ if unknown_keys:
+ raise ValidationError(f"Found unknown suite-level keys: {list(unknown_keys)}")
+ if isinstance(results["results"], str):
+ # Nothing left to verify
+ return
+
+ # The results are split by measurement so we need to
+ # check that each of those entries have no extra keys
+ for entry in results["results"]:
+ measurement_keys = set(list(entry.keys()))
+ unknown_keys = measurement_keys - KNOWN_SINGLE_MEASURE_PROPS
+ if unknown_keys:
+ raise ValidationError(
+ "Found unknown single-measure-level keys for "
+ f"{entry['name']}: {list(unknown_keys)}"
+ )
+
+
+def metric_fields(value):
+ # old form: just the name
+ if "," not in value and ":" not in value:
+ return {"name": value}
+
+ def _check(field):
+ sfield = field.strip().partition(":")
+ if len(sfield) != 3 or not (sfield[1] and sfield[2]):
+ raise ValueError(f"Unexpected metrics definition {field}")
+ if sfield[0] not in KNOWN_SUITE_PROPS:
+ raise ValueError(
+ f"Unknown field '{sfield[0]}', should be in " f"{KNOWN_SUITE_PROPS}"
+ )
+
+ sfield = [sfield[0], sfield[2]]
+
+ try:
+ # This handles dealing with parsing lists
+ # from a string
+ sfield[1] = ast.literal_eval(sfield[1])
+ except (ValueError, SyntaxError):
+ # Ignore failures, those are from instances
+ # which don't need to be converted from a python
+ # representation
+ pass
+
+ return sfield
+
+ fields = [field.strip() for field in METRIC_SPLITTER.split(value)]
+ res = dict([_check(field) for field in fields])
+ if "name" not in res:
+ raise ValueError(f"{value} misses the 'name' field")
+ return res
diff --git a/python/mozperftest/mozperftest/metrics/visualmetrics.py b/python/mozperftest/mozperftest/metrics/visualmetrics.py
new file mode 100644
index 0000000000..068440d6f2
--- /dev/null
+++ b/python/mozperftest/mozperftest/metrics/visualmetrics.py
@@ -0,0 +1,221 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import errno
+import json
+import os
+import sys
+from pathlib import Path
+
+from mozfile import which
+
+from mozperftest.layers import Layer
+from mozperftest.utils import run_script, silence
+
+METRICS_FIELDS = (
+ "SpeedIndex",
+ "FirstVisualChange",
+ "LastVisualChange",
+ "VisualProgress",
+ "videoRecordingStart",
+)
+
+
+class VisualData:
+ def open_data(self, data):
+ res = {
+ "name": "visualmetrics",
+ "subtest": data["name"],
+ "data": [
+ {"file": "visualmetrics", "value": value, "xaxis": xaxis}
+ for xaxis, value in enumerate(data["values"])
+ ],
+ }
+ return res
+
+ def transform(self, data):
+ return data
+
+ def merge(self, data):
+ return data
+
+
+class VisualMetrics(Layer):
+ """Wrapper around Browsertime's visualmetrics.py script"""
+
+ name = "visualmetrics"
+ activated = False
+ arguments = {}
+
+ def setup(self):
+ self.metrics = {}
+ self.metrics_fields = []
+
+ # making sure we have ffmpeg and imagemagick available
+ for tool in ("ffmpeg", "convert"):
+ if sys.platform in ("win32", "msys"):
+ tool += ".exe"
+ path = which(tool)
+ if not path:
+ raise OSError(errno.ENOENT, f"Could not find {tool}")
+
+ def run(self, metadata):
+ if "VISUALMETRICS_PY" not in os.environ:
+ raise OSError(
+ "The VISUALMETRICS_PY environment variable is not set."
+ "Make sure you run the browsertime layer"
+ )
+ path = Path(os.environ["VISUALMETRICS_PY"])
+ if not path.exists():
+ raise FileNotFoundError(str(path))
+
+ self.visualmetrics = path
+ treated = 0
+
+ for result in metadata.get_results():
+ result_dir = result.get("results")
+ if result_dir is None:
+ continue
+ result_dir = Path(result_dir)
+ if not result_dir.is_dir():
+ continue
+ browsertime_json = Path(result_dir, "browsertime.json")
+ if not browsertime_json.exists():
+ continue
+ treated += self.run_visual_metrics(browsertime_json)
+
+ self.info(f"Treated {treated} videos.")
+
+ if len(self.metrics) > 0:
+ metadata.add_result(
+ {
+ "name": metadata.script["name"] + "-vm",
+ "framework": {"name": "mozperftest"},
+ "transformer": "mozperftest.metrics.visualmetrics:VisualData",
+ "results": list(self.metrics.values()),
+ }
+ )
+
+ # we also extend --perfherder-metrics and --console-metrics if they
+ # are activated
+ def add_to_option(name):
+ existing = self.get_arg(name, [])
+ for field in self.metrics_fields:
+ existing.append({"name": field, "unit": "ms"})
+ self.env.set_arg(name, existing)
+
+ if self.get_arg("perfherder"):
+ add_to_option("perfherder-metrics")
+
+ if self.get_arg("console"):
+ add_to_option("console-metrics")
+
+ else:
+ self.warning("No video was treated.")
+ return metadata
+
+ def run_visual_metrics(self, browsertime_json):
+ verbose = self.get_arg("verbose")
+ self.info(f"Looking at {browsertime_json}")
+ venv = self.mach_cmd.virtualenv_manager
+
+ class _display:
+ def __enter__(self, *args, **kw):
+ return self
+
+ __exit__ = __enter__
+
+ may_silence = not verbose and silence or _display
+
+ with browsertime_json.open() as f:
+ browsertime_json_data = json.loads(f.read())
+
+ videos = 0
+ global_options = [
+ str(self.visualmetrics),
+ "--orange",
+ "--perceptual",
+ "--contentful",
+ "--force",
+ "--renderignore",
+ "5",
+ "--viewport",
+ ]
+ if verbose:
+ global_options += ["-vvv"]
+
+ for site in browsertime_json_data:
+ # collecting metrics from browserScripts
+ # because it can be used in splitting
+ for index, bs in enumerate(site["browserScripts"]):
+ for name, val in bs.items():
+ if not isinstance(val, (str, int)):
+ continue
+ self.append_metrics(index, name, val)
+
+ extra = {"lowerIsBetter": True, "unit": "ms"}
+
+ for index, video in enumerate(site["files"]["video"]):
+ videos += 1
+ video_path = browsertime_json.parent / video
+ output = "[]"
+ with may_silence():
+ res, output = run_script(
+ venv.python_path,
+ global_options + ["--video", str(video_path), "--json"],
+ verbose=verbose,
+ label="visual metrics",
+ display=False,
+ )
+ if not res:
+ self.error(f"Failed {res}")
+ continue
+
+ output = output.strip()
+ if verbose:
+ self.info(str(output))
+ try:
+ output = json.loads(output)
+ except json.JSONDecodeError:
+ self.error("Could not read the json output from visualmetrics.py")
+ continue
+
+ for name, value in output.items():
+ if name.endswith(
+ "Progress",
+ ):
+ self._expand_visual_progress(index, name, value, **extra)
+ else:
+ self.append_metrics(index, name, value, **extra)
+
+ return videos
+
+ def _expand_visual_progress(self, index, name, value, **fields):
+ def _split_percent(val):
+ # value is of the form "567=94%"
+ val = val.split("=")
+ value, percent = val[0].strip(), val[1].strip()
+ if percent.endswith("%"):
+ percent = percent[:-1]
+ return int(percent), int(value)
+
+ percents = [_split_percent(elmt) for elmt in value.split(",")]
+
+ # we want to keep the first added value for each percent
+ # so the trick here is to create a dict() with the reversed list
+ percents = dict(reversed(percents))
+
+ # we are keeping the last 5 percents
+ percents = list(percents.items())
+ percents.sort()
+ for percent, value in percents[:5]:
+ self.append_metrics(index, f"{name}{percent}", value, **fields)
+
+ def append_metrics(self, index, name, value, **fields):
+ if name not in self.metrics_fields:
+ self.metrics_fields.append(name)
+ if name not in self.metrics:
+ self.metrics[name] = {"name": name, "values": []}
+
+ self.metrics[name]["values"].append(value)
+ self.metrics[name].update(**fields)
diff --git a/python/mozperftest/mozperftest/runner.py b/python/mozperftest/mozperftest/runner.py
new file mode 100644
index 0000000000..a4ca65eb53
--- /dev/null
+++ b/python/mozperftest/mozperftest/runner.py
@@ -0,0 +1,280 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Pure Python runner so we can execute perftest in the CI without
+depending on a full mach toolchain, that is not fully available in
+all worker environments.
+
+This runner can be executed in two different ways:
+
+- by calling run_tests() from the mach command
+- by executing this module directly
+
+When the module is executed directly, if the --on-try option is used,
+it will fetch arguments from Tascluster's parameters, that were
+populated via a local --push-to-try call.
+
+The --push-to-try flow is:
+
+- a user calls ./mach perftest --push-to-try --option1 --option2
+- a new push to try commit is made and includes all options in its parameters
+- a generic TC job triggers the perftest by calling this module with --on-try
+- run_test() grabs the parameters artifact and converts them into args for
+ perftest
+"""
+import json
+import logging
+import os
+import shutil
+import sys
+from pathlib import Path
+
+TASKCLUSTER = "TASK_ID" in os.environ.keys()
+RUNNING_TESTS = "RUNNING_TESTS" in os.environ.keys()
+HERE = Path(__file__).parent
+SRC_ROOT = Path(HERE, "..", "..", "..").resolve()
+
+
+# XXX need to make that for all systems flavors
+if "SHELL" not in os.environ:
+ os.environ["SHELL"] = "/bin/bash"
+
+
+def _activate_mach_virtualenv():
+ """Adds all available dependencies in the path.
+
+ This is done so the runner can be used with no prior
+ install in all execution environments.
+ """
+
+ # We need the "mach" module to access the logic to parse virtualenv
+ # requirements. Since that depends on "packaging" (and, transitively,
+ # "pyparsing"), we add those to the path too.
+ sys.path[0:0] = [
+ os.path.join(SRC_ROOT, module)
+ for module in (
+ os.path.join("python", "mach"),
+ os.path.join("third_party", "python", "packaging"),
+ os.path.join("third_party", "python", "pyparsing"),
+ )
+ ]
+
+ from mach.site import (
+ ExternalPythonSite,
+ MachSiteManager,
+ SitePackagesSource,
+ resolve_requirements,
+ )
+
+ mach_site = MachSiteManager(
+ str(SRC_ROOT),
+ None,
+ resolve_requirements(str(SRC_ROOT), "mach"),
+ ExternalPythonSite(sys.executable),
+ SitePackagesSource.NONE,
+ )
+ mach_site.activate()
+
+ if TASKCLUSTER:
+ # In CI, the directory structure is different: xpcshell code is in
+ # "$topsrcdir/xpcshell/" rather than "$topsrcdir/testing/xpcshell".
+ sys.path.append("xpcshell")
+
+
+def _create_artifacts_dir(kwargs, artifacts):
+ from mozperftest.utils import create_path
+
+ results_dir = kwargs.get("test_name")
+ if results_dir is None:
+ results_dir = "results"
+
+ return create_path(artifacts / "artifacts" / kwargs["tool"] / results_dir)
+
+
+def _save_params(kwargs, artifacts):
+ with open(os.path.join(str(artifacts), "side-by-side-params.json"), "w") as file:
+ json.dump(kwargs, file, indent=4)
+
+
+def run_tests(mach_cmd, kwargs, client_args):
+ """This tests runner can be used directly via main or via Mach.
+
+ When the --on-try option is used, the test runner looks at the
+ `PERFTEST_OPTIONS` environment variable that contains all options passed by
+ the user via a ./mach perftest --push-to-try call.
+ """
+ on_try = kwargs.pop("on_try", False)
+
+ # trying to get the arguments from the task params
+ if on_try:
+ try_options = json.loads(os.environ["PERFTEST_OPTIONS"])
+ print("Loading options from $PERFTEST_OPTIONS")
+ print(json.dumps(try_options, indent=4, sort_keys=True))
+ kwargs.update(try_options)
+
+ from mozperftest import MachEnvironment, Metadata
+ from mozperftest.hooks import Hooks
+ from mozperftest.script import ScriptInfo
+ from mozperftest.utils import build_test_list
+
+ hooks_file = kwargs.pop("hooks", None)
+ hooks = Hooks(mach_cmd, hooks_file)
+ verbose = kwargs.get("verbose", False)
+ log_level = logging.DEBUG if verbose else logging.INFO
+
+ # If we run through mach, we just want to set the level
+ # of the existing termminal handler.
+ # Otherwise, we're adding it.
+ if mach_cmd.log_manager.terminal_handler is not None:
+ mach_cmd.log_manager.terminal_handler.level = log_level
+ else:
+ mach_cmd.log_manager.add_terminal_logging(level=log_level)
+ mach_cmd.log_manager.enable_all_structured_loggers()
+ mach_cmd.log_manager.enable_unstructured()
+
+ try:
+ # Only pass the virtualenv to the before_iterations hook
+ # so that users can install test-specific packages if needed.
+ mach_cmd.activate_virtualenv()
+ kwargs["virtualenv"] = mach_cmd.virtualenv_manager
+ hooks.run("before_iterations", kwargs)
+ del kwargs["virtualenv"]
+
+ tests, tmp_dir = build_test_list(kwargs["tests"])
+
+ for test in tests:
+ script = ScriptInfo(test)
+
+ # update the arguments with options found in the script, if any
+ args = script.update_args(**client_args)
+ # XXX this should be the default pool for update_args
+ for key, value in kwargs.items():
+ if key not in args:
+ args[key] = value
+
+ # update the hooks, or use a copy of the general one
+ script_hooks = Hooks(mach_cmd, args.pop("hooks", hooks_file))
+
+ flavor = args["flavor"]
+ if flavor == "doc":
+ print(script)
+ continue
+
+ for iteration in range(args.get("test_iterations", 1)):
+ try:
+ env = MachEnvironment(mach_cmd, hooks=script_hooks, **args)
+ metadata = Metadata(mach_cmd, env, flavor, script)
+ script_hooks.run("before_runs", env)
+ try:
+ with env.frozen() as e:
+ e.run(metadata)
+ finally:
+ script_hooks.run("after_runs", env)
+ finally:
+ if tmp_dir is not None:
+ shutil.rmtree(tmp_dir)
+ finally:
+ hooks.cleanup()
+
+
+def run_tools(mach_cmd, kwargs):
+ """This tools runner can be used directly via main or via Mach.
+
+ **TODO**: Before adding any more tools, we need to split this logic out
+ into a separate file that runs the tools and sets them up dynamically
+ in a similar way to how we use layers.
+ """
+ from mozperftest.utils import ON_TRY, install_package
+
+ mach_cmd.activate_virtualenv()
+ install_package(mach_cmd.virtualenv_manager, "opencv-python==4.5.4.60")
+ install_package(
+ mach_cmd.virtualenv_manager,
+ "mozperftest-tools==0.2.6",
+ )
+
+ log_level = logging.INFO
+ if mach_cmd.log_manager.terminal_handler is not None:
+ mach_cmd.log_manager.terminal_handler.level = log_level
+ else:
+ mach_cmd.log_manager.add_terminal_logging(level=log_level)
+ mach_cmd.log_manager.enable_all_structured_loggers()
+ mach_cmd.log_manager.enable_unstructured()
+
+ if ON_TRY:
+ artifacts = Path(os.environ.get("MOZ_FETCHES_DIR"), "..").resolve()
+ artifacts = _create_artifacts_dir(kwargs, artifacts)
+ else:
+ artifacts = _create_artifacts_dir(kwargs, SRC_ROOT)
+
+ _save_params(kwargs, artifacts)
+
+ # Run the requested tool
+ from mozperftest.tools import TOOL_RUNNERS
+
+ tool = kwargs.pop("tool")
+ print(f"Running {tool} tool")
+
+ TOOL_RUNNERS[tool](artifacts, kwargs)
+
+
+def main(argv=sys.argv[1:]):
+ """Used when the runner is directly called from the shell"""
+ _activate_mach_virtualenv()
+
+ from mach.logging import LoggingManager
+ from mach.util import get_state_dir
+ from mozbuild.base import MachCommandBase, MozbuildObject
+ from mozbuild.mozconfig import MozconfigLoader
+
+ from mozperftest import PerftestArgumentParser, PerftestToolsArgumentParser
+
+ mozconfig = SRC_ROOT / "browser" / "config" / "mozconfig"
+ if mozconfig.exists():
+ os.environ["MOZCONFIG"] = str(mozconfig)
+
+ if "--xpcshell-mozinfo" in argv:
+ mozinfo = argv[argv.index("--xpcshell-mozinfo") + 1]
+ topobjdir = Path(mozinfo).parent
+ else:
+ topobjdir = None
+
+ config = MozbuildObject(
+ str(SRC_ROOT),
+ None,
+ LoggingManager(),
+ topobjdir=topobjdir,
+ mozconfig=MozconfigLoader.AUTODETECT,
+ )
+ config.topdir = config.topsrcdir
+ config.cwd = os.getcwd()
+ config.state_dir = get_state_dir()
+
+ # This monkey patch forces mozbuild to reuse
+ # our configuration when it tries to re-create
+ # it from the environment.
+ def _here(*args, **kw):
+ return config
+
+ MozbuildObject.from_environment = _here
+
+ mach_cmd = MachCommandBase(config)
+
+ if "tools" in argv[0]:
+ if len(argv) == 1:
+ raise SystemExit("No tool specified, cannot continue parsing")
+ PerftestToolsArgumentParser.tool = argv[1]
+ perftools_parser = PerftestToolsArgumentParser()
+ args = dict(vars(perftools_parser.parse_args(args=argv[2:])))
+ args["tool"] = argv[1]
+ run_tools(mach_cmd, args)
+ else:
+ perftest_parser = PerftestArgumentParser(description="vanilla perftest")
+ args = dict(vars(perftest_parser.parse_args(args=argv)))
+ user_args = perftest_parser.get_user_args(args)
+ run_tests(mach_cmd, args, user_args)
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/python/mozperftest/mozperftest/schemas/intermediate-results-schema.json b/python/mozperftest/mozperftest/schemas/intermediate-results-schema.json
new file mode 100644
index 0000000000..70c7468b31
--- /dev/null
+++ b/python/mozperftest/mozperftest/schemas/intermediate-results-schema.json
@@ -0,0 +1,113 @@
+{
+ "definitions": {
+ "perfherder-options-schema": {
+ "title": "Perfherder-specific Options",
+ "description": "Set these to have more control over the perfherder blob that will be created",
+ "properties": {
+ "name": {
+ "title": "Name of the metric or suite",
+ "type": "string"
+ },
+ "value": {
+ "title": "Summary value",
+ "type": "number",
+ "minimum": -1000000000000.0,
+ "maximum": 1000000000000.0
+ },
+ "unit": {
+ "title": "Measurement unit",
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 20
+ },
+ "lowerIsBetter": {
+ "description": "Whether lower values are better",
+ "title": "Lower is better",
+ "type": "boolean"
+ },
+ "shouldAlert": {
+ "description": "Whether we should alert",
+ "title": "Should alert",
+ "type": "boolean"
+ },
+ "alertThreshold": {
+ "description": "% change threshold before alerting",
+ "title": "Alert threshold",
+ "type": "number",
+ "minimum": 0.0,
+ "maximum": 1000.0
+ }
+ }
+ },
+ "single-metric-schema": {
+ "allOf": [
+ {"$ref": "#/definitions/perfherder-options-schema"},
+ {
+ "properties": {
+ "values": {
+ "description": "Contains all the measurements taken",
+ "title": "Measured values",
+ "type": "array",
+ "items": {
+ "type": "number"
+ }
+ }
+ }
+ }
+ ],
+ "required": [
+ "name",
+ "values"
+ ],
+ "type": "object"
+ },
+ "results-schema": {
+ "anyOf": [
+ {"type": "string"},
+ {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/single-metric-schema"
+ }
+ }
+ ],
+ "title": "Holds the data to be processed by the metrics modules",
+ "description": "The data can be defined within an object, or through a path to where the data can be found"
+ }
+ },
+ "id": "https://searchfox.org/mozilla-central/source/python/mozperftest/mozperftest/schemas/intermediate-results-schema.json",
+ "allOf": [
+ {"$ref": "#/definitions/perfherder-options-schema"},
+ {
+ "properties": {
+ "results": {"$ref": "#/definitions/results-schema"},
+ "transformer": {
+ "title": "Transformer to use on the data",
+ "type": "string"
+ },
+ "transformer-options": {
+ "type": "object",
+ "title": "Options used in the transformer"
+ },
+ "framework": {
+ "title": "Framework that produced the data",
+ "type": "object"
+ },
+ "extraOptions": {
+ "type": "array",
+ "title": "Extra options used in the running suite",
+ "items": {
+ "type": "string",
+ "maxLength": 100
+ },
+ "uniqueItems": true,
+ "maxItems": 8
+ }
+ }
+ }
+ ],
+ "required": ["results", "name"],
+ "description": "Intermediate results for a single type of metric or suite (i.e. browsertime, and adb results shouldn't be mixed in the same entry)",
+ "title": "MozPerftest Intermediate Results Schema",
+ "type": "object"
+}
diff --git a/python/mozperftest/mozperftest/schemas/transformer_schema.json b/python/mozperftest/mozperftest/schemas/transformer_schema.json
new file mode 100644
index 0000000000..ab156f5386
--- /dev/null
+++ b/python/mozperftest/mozperftest/schemas/transformer_schema.json
@@ -0,0 +1,55 @@
+{
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "definitions": {
+ "data": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties": {
+ "file": {
+ "type": "string"
+ },
+ "value": {},
+ "xaxis": {
+ "type": "number"
+ }
+ },
+ "required": [
+ "file",
+ "value",
+ "xaxis"
+ ]
+ }
+ },
+ "dict": {
+ "type": "object",
+ "properties": {
+ "data": {
+ "$ref": "#/definitions/data"
+ },
+ "name": {
+ "type": "string"
+ },
+ "subtest": {
+ "type": "string"
+ }
+ },
+ "required": [
+ "data",
+ "name",
+ "subtest"
+ ]
+ }
+ },
+ "oneOf": [
+ {
+ "$ref": "#/definitions/dict"
+ },
+ {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/dict"
+ }
+ }
+ ]
+}
diff --git a/python/mozperftest/mozperftest/script.py b/python/mozperftest/mozperftest/script.py
new file mode 100644
index 0000000000..e9c29c5d98
--- /dev/null
+++ b/python/mozperftest/mozperftest/script.py
@@ -0,0 +1,269 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import io
+import re
+import textwrap
+import traceback
+from collections import defaultdict
+from enum import Enum
+from pathlib import Path
+
+import esprima
+
+# list of metadata, each item is the name and if the field is mandatory
+METADATA = [
+ ("setUp", False),
+ ("tearDown", False),
+ ("test", True),
+ ("owner", True),
+ ("author", False),
+ ("name", True),
+ ("description", True),
+ ("longDescription", False),
+ ("options", False),
+ ("supportedBrowsers", False),
+ ("supportedPlatforms", False),
+ ("filename", True),
+ ("tags", False),
+]
+
+
+_INFO = """\
+%(filename)s
+%(filename_underline)s
+
+:owner: %(owner)s
+:name: %(name)s
+"""
+
+
+XPCSHELL_FUNCS = "add_task", "run_test", "run_next_test"
+
+
+class BadOptionTypeError(Exception):
+ """Raised when an option defined in a test has an incorrect type."""
+
+ pass
+
+
+class MissingFieldError(Exception):
+ def __init__(self, script, field):
+ super().__init__(f"Missing metadata {field}")
+ self.script = script
+ self.field = field
+
+
+class ParseError(Exception):
+ def __init__(self, script, exception):
+ super().__init__(f"Cannot parse {script}")
+ self.script = script
+ self.exception = exception
+
+ def __str__(self):
+ output = io.StringIO()
+ traceback.print_exception(
+ type(self.exception),
+ self.exception,
+ self.exception.__traceback__,
+ file=output,
+ )
+ return f"{self.args[0]}\n{output.getvalue()}"
+
+
+class ScriptType(Enum):
+ xpcshell = 1
+ browsertime = 2
+
+
+class ScriptInfo(defaultdict):
+ """Loads and parses a Browsertime test script."""
+
+ def __init__(self, path):
+ super(ScriptInfo, self).__init__()
+ try:
+ self._parse_file(path)
+ except Exception as e:
+ raise ParseError(path, e)
+
+ # If the fields found, don't match our known ones, then an error is raised
+ for field, required in METADATA:
+ if not required:
+ continue
+ if field not in self:
+ raise MissingFieldError(path, field)
+
+ def _parse_file(self, path):
+ self.script = Path(path).resolve()
+ self["filename"] = str(self.script)
+ self.script_type = ScriptType.browsertime
+ with self.script.open() as f:
+ self.parsed = esprima.parseScript(f.read())
+
+ # looking for the exports statement
+ for stmt in self.parsed.body:
+ # detecting if the script has add_task()
+ if (
+ stmt.type == "ExpressionStatement"
+ and stmt.expression is not None
+ and stmt.expression.callee is not None
+ and stmt.expression.callee.type == "Identifier"
+ and stmt.expression.callee.name in XPCSHELL_FUNCS
+ ):
+ self["test"] = "xpcshell"
+ self.script_type = ScriptType.xpcshell
+ continue
+
+ # plain xpcshell tests functions markers
+ if stmt.type == "FunctionDeclaration" and stmt.id.name in XPCSHELL_FUNCS:
+ self["test"] = "xpcshell"
+ self.script_type = ScriptType.xpcshell
+ continue
+
+ # is this the perfMetdatata plain var ?
+ if stmt.type == "VariableDeclaration":
+ for decl in stmt.declarations:
+ if (
+ decl.type != "VariableDeclarator"
+ or decl.id.type != "Identifier"
+ or decl.id.name != "perfMetadata"
+ or decl.init is None
+ ):
+ continue
+ self.scan_properties(decl.init.properties)
+ continue
+
+ # or the module.exports map ?
+ if (
+ stmt.type != "ExpressionStatement"
+ or stmt.expression.left is None
+ or stmt.expression.left.property is None
+ or stmt.expression.left.property.name != "exports"
+ or stmt.expression.right is None
+ or stmt.expression.right.properties is None
+ ):
+ continue
+
+ # now scanning the properties
+ self.scan_properties(stmt.expression.right.properties)
+
+ def parse_value(self, value):
+ if value.type == "Identifier":
+ return value.name
+
+ if value.type == "Literal":
+ return value.value
+
+ if value.type == "TemplateLiteral":
+ # ugly
+ value = value.quasis[0].value.cooked.replace("\n", " ")
+ return re.sub(r"\s+", " ", value).strip()
+
+ if value.type == "ArrayExpression":
+ return [self.parse_value(e) for e in value.elements]
+
+ if value.type == "ObjectExpression":
+ elements = {}
+ for prop in value.properties:
+ sub_name, sub_value = self.parse_property(prop)
+ elements[sub_name] = sub_value
+ return elements
+
+ raise ValueError(value.type)
+
+ def parse_property(self, property):
+ return property.key.name, self.parse_value(property.value)
+
+ def scan_properties(self, properties):
+ for prop in properties:
+ name, value = self.parse_property(prop)
+ self[name] = value
+
+ def __str__(self):
+ """Used to generate docs."""
+
+ def _render(value, level=0):
+ if not isinstance(value, (list, tuple, dict)):
+ if not isinstance(value, str):
+ value = str(value)
+ # line wrapping
+ return "\n".join(textwrap.wrap(value, break_on_hyphens=False))
+
+ # options
+ if isinstance(value, dict):
+ if level > 0:
+ return ",".join([f"{k}:{v}" for k, v in value.items()])
+
+ res = []
+ for key, val in value.items():
+ if isinstance(val, bool):
+ res.append(f" --{key.replace('_', '-')}")
+ else:
+ val = _render(val, level + 1)
+ res.append(f" --{key.replace('_', '-')} {val}")
+
+ return "\n".join(res)
+
+ # simple flat list
+ return ", ".join([_render(v, level + 1) for v in value])
+
+ options = ""
+ d = defaultdict(lambda: "N/A")
+ for field, value in self.items():
+ if field == "longDescription":
+ continue
+ if field == "filename":
+ d[field] = self.script.name
+ continue
+ if field == "options":
+ for plat in "default", "linux", "mac", "win":
+ if plat not in value:
+ continue
+ options += f":{plat.capitalize()} options:\n\n::\n\n{_render(value[plat])}\n"
+ else:
+ d[field] = _render(value)
+
+ d["filename_underline"] = "=" * len(d["filename"])
+ info = _INFO % d
+ if "tags" in self:
+ info += f":tags: {','.join(self['tags'])}\n"
+ info += options
+ info += f"\n**{self['description']}**\n"
+ if "longDescription" in self:
+ info += f"\n{self['longDescription']}\n"
+
+ return info
+
+ def __missing__(self, key):
+ return "N/A"
+
+ @classmethod
+ def detect_type(cls, path):
+ return cls(path).script_type
+
+ def update_args(self, **args):
+ """Updates arguments with options from the script."""
+ from mozperftest.utils import simple_platform
+
+ # Order of precedence:
+ # cli options > platform options > default options
+ options = self.get("options", {})
+ result = options.get("default", {})
+ result.update(options.get(simple_platform(), {}))
+ result.update(args)
+
+ # XXX this is going away, see https://bugzilla.mozilla.org/show_bug.cgi?id=1675102
+ for opt, val in result.items():
+ if opt.startswith("visualmetrics") or "metrics" not in opt:
+ continue
+ if not isinstance(val, list):
+ raise BadOptionTypeError("Metrics should be defined within a list")
+ for metric in val:
+ if not isinstance(metric, dict):
+ raise BadOptionTypeError(
+ "Each individual metrics must be defined within a JSON-like object"
+ )
+
+ if self.script_type == ScriptType.xpcshell:
+ result["flavor"] = "xpcshell"
+ return result
diff --git a/python/mozperftest/mozperftest/system/__init__.py b/python/mozperftest/mozperftest/system/__init__.py
new file mode 100644
index 0000000000..55deb9094d
--- /dev/null
+++ b/python/mozperftest/mozperftest/system/__init__.py
@@ -0,0 +1,35 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+from mozperftest.layers import Layers
+from mozperftest.system.android import AndroidDevice
+from mozperftest.system.android_startup import AndroidStartUp
+from mozperftest.system.macos import MacosDevice
+from mozperftest.system.pingserver import PingServer
+from mozperftest.system.profile import Profile
+from mozperftest.system.proxy import ProxyRunner
+
+
+def get_layers():
+ return PingServer, Profile, ProxyRunner, AndroidDevice, MacosDevice, AndroidStartUp
+
+
+def pick_system(env, flavor, mach_cmd):
+ if flavor in ("desktop-browser", "xpcshell"):
+ return Layers(
+ env,
+ mach_cmd,
+ (
+ PingServer, # needs to come before Profile
+ MacosDevice,
+ Profile,
+ ProxyRunner,
+ ),
+ )
+ if flavor == "mobile-browser":
+ return Layers(
+ env, mach_cmd, (Profile, ProxyRunner, AndroidDevice, AndroidStartUp)
+ )
+ if flavor == "webpagetest":
+ return Layers(env, mach_cmd, (Profile,))
+ raise NotImplementedError(flavor)
diff --git a/python/mozperftest/mozperftest/system/android.py b/python/mozperftest/mozperftest/system/android.py
new file mode 100644
index 0000000000..650b0fb29d
--- /dev/null
+++ b/python/mozperftest/mozperftest/system/android.py
@@ -0,0 +1,238 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import sys
+import tempfile
+from pathlib import Path
+
+import mozlog
+from mozdevice import ADBDevice, ADBError
+
+from mozperftest.layers import Layer
+from mozperftest.system.android_perf_tuner import tune_performance
+from mozperftest.utils import download_file
+
+HERE = Path(__file__).parent
+
+_ROOT_URL = "https://firefox-ci-tc.services.mozilla.com/api/index/v1/task/"
+_FENIX_NIGHTLY_BUILDS = (
+ "mobile.v3.firefox-android.apks.fenix-nightly.latest.{architecture}"
+ "/artifacts/public/build/fenix/{architecture}/target.apk"
+)
+_GV_BUILDS = "gecko.v2.mozilla-central.shippable.latest.mobile.android-"
+_REFBROW_BUILDS = (
+ "mobile.v2.reference-browser.nightly.latest.{architecture}"
+ "/artifacts/public/target.{architecture}.apk"
+)
+
+_PERMALINKS = {
+ "fenix_nightly_armeabi_v7a": _ROOT_URL
+ + _FENIX_NIGHTLY_BUILDS.format(architecture="armeabi-v7a"),
+ "fenix_nightly_arm64_v8a": _ROOT_URL
+ + _FENIX_NIGHTLY_BUILDS.format(architecture="arm64-v8a"),
+ # The two following aliases are used for Fenix multi-commit testing in CI
+ "fenix_nightlysim_multicommit_arm64_v8a": None,
+ "fenix_nightlysim_multicommit_armeabi_v7a": None,
+ "gve_nightly_aarch64": _ROOT_URL
+ + _GV_BUILDS
+ + "aarch64-opt/artifacts/public/build/geckoview_example.apk",
+ "gve_nightly_api16": _ROOT_URL
+ + _GV_BUILDS
+ + "arm-opt/artifacts/public/build/geckoview_example.apk",
+ "refbrow_nightly_aarch64": _ROOT_URL
+ + _REFBROW_BUILDS.format(architecture="arm64-v8a"),
+ "refbrow_nightly_api16": _ROOT_URL
+ + _REFBROW_BUILDS.format(architecture="armeabi-v7a"),
+}
+
+
+class DeviceError(Exception):
+ pass
+
+
+class ADBLoggedDevice(ADBDevice):
+ def __init__(self, *args, **kw):
+ self._provided_logger = kw.pop("logger")
+ super(ADBLoggedDevice, self).__init__(*args, **kw)
+
+ def _get_logger(self, logger_name, verbose):
+ return self._provided_logger
+
+
+class AndroidDevice(Layer):
+ """Use an android device via ADB"""
+
+ name = "android"
+ activated = False
+
+ arguments = {
+ "app-name": {
+ "type": str,
+ "default": "org.mozilla.firefox",
+ "help": "Android app name",
+ },
+ "timeout": {
+ "type": int,
+ "default": 60,
+ "help": "Timeout in seconds for adb operations",
+ },
+ "clear-logcat": {
+ "action": "store_true",
+ "default": False,
+ "help": "Clear the logcat when starting",
+ },
+ "capture-adb": {
+ "type": str,
+ "default": "stdout",
+ "help": (
+ "Captures adb calls to the provided path. "
+ "To capture to stdout, use 'stdout'."
+ ),
+ },
+ "capture-logcat": {
+ "type": str,
+ "default": None,
+ "help": "Captures the logcat to the provided path.",
+ },
+ "perf-tuning": {
+ "action": "store_true",
+ "default": False,
+ "help": (
+ "If set, device will be tuned for performance. "
+ "This helps with decreasing the noise."
+ ),
+ },
+ "intent": {"type": str, "default": None, "help": "Intent to use"},
+ "activity": {"type": str, "default": None, "help": "Activity to use"},
+ "install-apk": {
+ "nargs": "*",
+ "default": [],
+ "help": (
+ "APK to install to the device "
+ "Can be a file, an url or an alias url from "
+ " %s" % ", ".join(_PERMALINKS.keys())
+ ),
+ },
+ }
+
+ def __init__(self, env, mach_cmd):
+ super(AndroidDevice, self).__init__(env, mach_cmd)
+ self.android_activity = self.app_name = self.device = None
+ self.capture_logcat = self.capture_file = None
+ self._custom_apk_path = None
+
+ @property
+ def custom_apk_path(self):
+ if self._custom_apk_path is None:
+ custom_apk_path = Path(HERE, "..", "user_upload.apk")
+ if custom_apk_path.exists():
+ self._custom_apk_path = custom_apk_path
+ return self._custom_apk_path
+
+ def custom_apk_exists(self):
+ return self.custom_apk_path is not None
+
+ def setup(self):
+ if self.custom_apk_exists():
+ self.info(
+ f"Replacing --android-install-apk with custom APK found at "
+ f"{self.custom_apk_path}"
+ )
+ self.set_arg("android-install-apk", [self.custom_apk_path])
+
+ def teardown(self):
+ if self.capture_file is not None:
+ self.capture_file.close()
+ if self.capture_logcat is not None and self.device is not None:
+ self.info("Dumping logcat into %r" % str(self.capture_logcat))
+ with self.capture_logcat.open("wb") as f:
+ for line in self.device.get_logcat():
+ f.write(line.encode("utf8", errors="replace") + b"\n")
+
+ def _set_output_path(self, path):
+ if path in (None, "stdout"):
+ return path
+ # check if the path is absolute or relative to output
+ path = Path(path)
+ if not path.is_absolute():
+ return Path(self.get_arg("output"), path)
+ return path
+
+ def run(self, metadata):
+ self.app_name = self.get_arg("android-app-name")
+ self.android_activity = self.get_arg("android-activity")
+ self.clear_logcat = self.get_arg("clear-logcat")
+ self.metadata = metadata
+ self.verbose = self.get_arg("verbose")
+ self.capture_adb = self._set_output_path(self.get_arg("capture-adb"))
+ self.capture_logcat = self._set_output_path(self.get_arg("capture-logcat"))
+
+ # capture the logs produced by ADBDevice
+ logger_name = "mozperftest-adb"
+ logger = mozlog.structuredlog.StructuredLogger(logger_name)
+ if self.capture_adb == "stdout":
+ stream = sys.stdout
+ disable_colors = False
+ else:
+ stream = self.capture_file = self.capture_adb.open("w")
+ disable_colors = True
+
+ handler = mozlog.handlers.StreamHandler(
+ stream=stream,
+ formatter=mozlog.formatters.MachFormatter(
+ verbose=self.verbose, disable_colors=disable_colors
+ ),
+ )
+ logger.add_handler(handler)
+ try:
+ self.device = ADBLoggedDevice(
+ verbose=self.verbose, timeout=self.get_arg("timeout"), logger=logger
+ )
+ except (ADBError, AttributeError) as e:
+ self.error("Could not connect to the phone. Is it connected?")
+ raise DeviceError(str(e))
+
+ if self.clear_logcat:
+ self.device.clear_logcat()
+
+ # Install APKs
+ for apk in self.get_arg("android-install-apk"):
+ self.info("Uninstalling old version")
+ self.device.uninstall_app(self.get_arg("android-app-name"))
+ self.info("Installing %s" % apk)
+ if str(apk) in _PERMALINKS:
+ apk = _PERMALINKS[apk]
+ if str(apk).startswith("http"):
+ with tempfile.TemporaryDirectory() as tmpdirname:
+ target = Path(tmpdirname, "target.apk")
+ self.info("Downloading %s" % apk)
+ download_file(apk, target)
+ self.info("Installing downloaded APK")
+ self.device.install_app(str(target))
+ else:
+ self.device.install_app(apk, replace=True)
+ self.info("Done.")
+
+ # checking that the app is installed
+ if not self.device.is_app_installed(self.app_name):
+ raise Exception("%s is not installed" % self.app_name)
+
+ if self.get_arg("android-perf-tuning", False):
+ tune_performance(self.device)
+
+ # set up default activity with the app name if none given
+ if self.android_activity is None:
+ # guess the activity, given the app
+ if "fenix" in self.app_name:
+ self.android_activity = "org.mozilla.fenix.IntentReceiverActivity"
+ elif "geckoview_example" in self.app_name:
+ self.android_activity = (
+ "org.mozilla.geckoview_example.GeckoViewActivity"
+ )
+ self.set_arg("android_activity", self.android_activity)
+
+ self.info("Android environment:")
+ self.info("- Application name: %s" % self.app_name)
+ self.info("- Activity: %s" % self.android_activity)
+ self.info("- Intent: %s" % self.get_arg("android_intent"))
+ return metadata
diff --git a/python/mozperftest/mozperftest/system/android_perf_tuner.py b/python/mozperftest/mozperftest/system/android_perf_tuner.py
new file mode 100644
index 0000000000..924ddd0c9e
--- /dev/null
+++ b/python/mozperftest/mozperftest/system/android_perf_tuner.py
@@ -0,0 +1,193 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+def tune_performance(device, log=None, timeout=None):
+ """Set various performance-oriented parameters, to reduce jitter.
+
+ This includes some device-specific kernel tweaks.
+
+ For more information, see https://bugzilla.mozilla.org/show_bug.cgi?id=1547135.
+ """
+ PerformanceTuner(device, log=log, timeout=timeout).tune_performance()
+
+
+class PerformanceTuner:
+ def __init__(self, device, log=None, timeout=None):
+ self.device = device
+ self.log = log is not None and log or self.device._logger
+ self.timeout = timeout
+
+ def tune_performance(self):
+ self.log.info("tuning android device performance")
+ self.set_svc_power_stayon()
+ if self.device.is_rooted:
+ device_name = self.device.shell_output(
+ "getprop ro.product.model", timeout=self.timeout
+ )
+ # all commands require root shell from here on
+ self.set_scheduler()
+ self.set_virtual_memory_parameters()
+ self.turn_off_services()
+ self.set_cpu_performance_parameters(device_name)
+ self.set_gpu_performance_parameters(device_name)
+ self.set_kernel_performance_parameters()
+ self.device.clear_logcat(timeout=self.timeout)
+ self.log.info("android device performance tuning complete")
+
+ def _set_value_and_check_exitcode(self, file_name, value):
+ self.log.info("setting {} to {}".format(file_name, value))
+ if self.device.shell_bool(
+ " ".join(["echo", str(value), ">", str(file_name)]),
+ timeout=self.timeout,
+ ):
+ self.log.info("successfully set {} to {}".format(file_name, value))
+ else:
+ self.log.warning("command failed")
+
+ def set_svc_power_stayon(self):
+ self.log.info("set device to stay awake on usb")
+ self.device.shell_bool("svc power stayon usb", timeout=self.timeout)
+
+ def set_scheduler(self):
+ self.log.info("setting scheduler to noop")
+ scheduler_location = "/sys/block/sda/queue/scheduler"
+
+ self._set_value_and_check_exitcode(scheduler_location, "noop")
+
+ def turn_off_services(self):
+ services = [
+ "mpdecision",
+ "thermal-engine",
+ "thermald",
+ ]
+ for service in services:
+ self.log.info(" ".join(["turning off service:", service]))
+ self.device.shell_bool(" ".join(["stop", service]), timeout=self.timeout)
+
+ services_list_output = self.device.shell_output(
+ "service list", timeout=self.timeout
+ )
+ for service in services:
+ if service not in services_list_output:
+ self.log.info(" ".join(["successfully terminated:", service]))
+ else:
+ self.log.warning(" ".join(["failed to terminate:", service]))
+
+ def set_virtual_memory_parameters(self):
+ self.log.info("setting virtual memory parameters")
+ commands = {
+ "/proc/sys/vm/swappiness": 0,
+ "/proc/sys/vm/dirty_ratio": 85,
+ "/proc/sys/vm/dirty_background_ratio": 70,
+ }
+
+ for key, value in commands.items():
+ self._set_value_and_check_exitcode(key, value)
+
+ def set_cpu_performance_parameters(self, device_name=None):
+ self.log.info("setting cpu performance parameters")
+ commands = {}
+
+ if device_name is not None:
+ device_name = self.device.shell_output(
+ "getprop ro.product.model", timeout=self.timeout
+ )
+
+ if device_name == "Pixel 2":
+ # MSM8998 (4x 2.35GHz, 4x 1.9GHz)
+ # values obtained from:
+ # /sys/devices/system/cpu/cpufreq/policy0/scaling_available_frequencies
+ # /sys/devices/system/cpu/cpufreq/policy4/scaling_available_frequencies
+ commands.update(
+ {
+ "/sys/devices/system/cpu/cpufreq/policy0/scaling_governor": "performance",
+ "/sys/devices/system/cpu/cpufreq/policy4/scaling_governor": "performance",
+ "/sys/devices/system/cpu/cpufreq/policy0/scaling_min_freq": "1900800",
+ "/sys/devices/system/cpu/cpufreq/policy4/scaling_min_freq": "2457600",
+ }
+ )
+ elif device_name == "Moto G (5)":
+ # MSM8937(8x 1.4GHz)
+ # values obtained from:
+ # /sys/devices/system/cpu/cpufreq/policy0/scaling_available_frequencies
+ for x in range(0, 8):
+ commands.update(
+ {
+ "/sys/devices/system/cpu/cpu{}/"
+ "cpufreq/scaling_governor".format(x): "performance",
+ "/sys/devices/system/cpu/cpu{}/"
+ "cpufreq/scaling_min_freq".format(x): "1401000",
+ }
+ )
+ else:
+ self.log.info(
+ "CPU for device with ro.product.model '{}' unknown, not scaling_governor".format(
+ device_name
+ )
+ )
+
+ for key, value in commands.items():
+ self._set_value_and_check_exitcode(key, value)
+
+ def set_gpu_performance_parameters(self, device_name=None):
+ self.log.info("setting gpu performance parameters")
+ commands = {
+ "/sys/class/kgsl/kgsl-3d0/bus_split": "0",
+ "/sys/class/kgsl/kgsl-3d0/force_bus_on": "1",
+ "/sys/class/kgsl/kgsl-3d0/force_rail_on": "1",
+ "/sys/class/kgsl/kgsl-3d0/force_clk_on": "1",
+ "/sys/class/kgsl/kgsl-3d0/force_no_nap": "1",
+ "/sys/class/kgsl/kgsl-3d0/idle_timer": "1000000",
+ }
+
+ if not device_name:
+ device_name = self.device.shell_output(
+ "getprop ro.product.model", timeout=self.timeout
+ )
+
+ if device_name == "Pixel 2":
+ # Adreno 540 (710MHz)
+ # values obtained from:
+ # /sys/devices/soc/5000000.qcom,kgsl-3d0/kgsl/kgsl-3d0/max_clk_mhz
+ commands.update(
+ {
+ "/sys/devices/soc/5000000.qcom,kgsl-3d0/devfreq/"
+ "5000000.qcom,kgsl-3d0/governor": "performance",
+ "/sys/devices/soc/soc:qcom,kgsl-busmon/devfreq/"
+ "soc:qcom,kgsl-busmon/governor": "performance",
+ "/sys/devices/soc/5000000.qcom,kgsl-3d0/kgsl/kgsl-3d0/min_clock_mhz": "710",
+ }
+ )
+ elif device_name == "Moto G (5)":
+ # Adreno 505 (450MHz)
+ # values obtained from:
+ # /sys/devices/soc/1c00000.qcom,kgsl-3d0/kgsl/kgsl-3d0/max_clock_mhz
+ commands.update(
+ {
+ "/sys/devices/soc/1c00000.qcom,kgsl-3d0/devfreq/"
+ "1c00000.qcom,kgsl-3d0/governor": "performance",
+ "/sys/devices/soc/1c00000.qcom,kgsl-3d0/kgsl/kgsl-3d0/min_clock_mhz": "450",
+ }
+ )
+ else:
+ self.log.info(
+ "GPU for device with ro.product.model '{}' unknown, not setting devfreq".format(
+ device_name
+ )
+ )
+
+ for key, value in commands.items():
+ self._set_value_and_check_exitcode(key, value)
+
+ def set_kernel_performance_parameters(self):
+ self.log.info("setting kernel performance parameters")
+ commands = {
+ "/sys/kernel/debug/msm-bus-dbg/shell-client/update_request": "1",
+ "/sys/kernel/debug/msm-bus-dbg/shell-client/mas": "1",
+ "/sys/kernel/debug/msm-bus-dbg/shell-client/ab": "0",
+ "/sys/kernel/debug/msm-bus-dbg/shell-client/slv": "512",
+ }
+ for key, value in commands.items():
+ self._set_value_and_check_exitcode(key, value)
diff --git a/python/mozperftest/mozperftest/system/android_startup.py b/python/mozperftest/mozperftest/system/android_startup.py
new file mode 100644
index 0000000000..4717c638ae
--- /dev/null
+++ b/python/mozperftest/mozperftest/system/android_startup.py
@@ -0,0 +1,414 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import re
+import statistics
+import time
+from datetime import datetime, timedelta
+
+import mozdevice
+
+from .android import AndroidDevice
+
+DATETIME_FORMAT = "%Y.%m.%d"
+PAGE_START = re.compile("GeckoSession: handleMessage GeckoView:PageStart uri=")
+
+PROD_FENIX = "fenix"
+PROD_FOCUS = "focus"
+PROC_GVEX = "geckoview_example"
+
+KEY_NAME = "name"
+KEY_PRODUCT = "product"
+KEY_DATETIME = "date"
+KEY_COMMIT = "commit"
+KEY_ARCHITECTURE = "architecture"
+KEY_TEST_NAME = "test_name"
+
+MEASUREMENT_DATA = ["mean", "median", "standard_deviation"]
+OLD_VERSION_FOCUS_PAGE_START_LINE_COUNT = 3
+NEW_VERSION_FOCUS_PAGE_START_LINE_COUNT = 2
+STDOUT_LINE_COUNT = 2
+
+TEST_COLD_MAIN_FF = "cold_main_first_frame"
+TEST_COLD_MAIN_RESTORE = "cold_main_session_restore"
+TEST_COLD_VIEW_FF = "cold_view_first_frame"
+TEST_COLD_VIEW_NAV_START = "cold_view_nav_start"
+TEST_URI = "https://example.com"
+
+BASE_URL_DICT = {
+ PROD_FENIX: (
+ "https://firefox-ci-tc.services.mozilla.com/api/index/v1/task/"
+ "mobile.v3.firefox-android.apks.fenix-nightly.{date}.latest.{architecture}/artifacts/"
+ "public%2Fbuild%2Ffenix%2F{architecture}%2Ftarget.apk"
+ ),
+ PROD_FENIX
+ + "-latest": (
+ "https://firefox-ci-tc.services.mozilla.com/api/index/v1/task/"
+ "mobile.v3.firefox-android.apks.fenix-nightly.latest.{architecture}/artifacts/"
+ "public%2Fbuild%2Ffenix%2F{architecture}%2Ftarget.apk"
+ ),
+ PROD_FOCUS: (
+ "https://firefox-ci-tc.services.mozilla.com/api/index/v1/task/"
+ "mobile.v3.firefox-android.apks.focus-nightly.{date}.latest.{architecture}"
+ "/artifacts/public%2Fbuild%2Ffocus%2F{architecture}%2Ftarget.apk"
+ ),
+ PROD_FOCUS
+ + "-latest": (
+ "https://firefox-ci-tc.services.mozilla.com/api/index/v1/task/"
+ "mobile.v3.firefox-android.apks.focus-nightly.latest.{architecture}"
+ "/artifacts/public%2Fbuild%2Ffocus%2F{architecture}%2Ftarget.apk"
+ ),
+ PROC_GVEX: (
+ "https://firefox-ci-tc.services.mozilla.com/api/index/v1/task/"
+ "gecko.v2.mozilla-central.pushdate.{date}.latest.mobile.android-"
+ "{architecture}-debug/artifacts/public%2Fbuild%2Fgeckoview_example.apk"
+ ),
+ PROC_GVEX
+ + "-latest": (
+ "https://firefox-ci-tc.services.mozilla.com/api/index/v1/task/"
+ "gecko.v2.mozilla-central.shippable.latest.mobile.android-"
+ "{architecture}-opt/artifacts/public/build/geckoview_example.apk"
+ ),
+}
+PROD_TO_CHANNEL_TO_PKGID = {
+ PROD_FENIX: {
+ "nightly": "org.mozilla.fenix",
+ "beta": "org.mozilla.firefox.beta",
+ "release": "org.mozilla.firefox",
+ "debug": "org.mozilla.fenix.debug",
+ },
+ PROD_FOCUS: {
+ "nightly": "org.mozilla.focus.nightly",
+ "beta": "org.mozilla.focus.beta", # only present since post-fenix update.
+ "release": "org.mozilla.focus",
+ "debug": "org.mozilla.focus.debug",
+ },
+ PROC_GVEX: {
+ "nightly": "org.mozilla.geckoview_example",
+ },
+}
+TEST_LIST = [
+ "cold_main_first_frame",
+ "cold_view_nav_start",
+ "cold_view_first_frame",
+ "cold_main_session_restore",
+]
+# "cold_view_first_frame", "cold_main_session_restore" are 2 disabled tests(broken)
+
+
+class AndroidStartUpDownloadError(Exception):
+ """Failure downloading Firefox Nightly APK"""
+
+ pass
+
+
+class AndroidStartUpInstallError(Exception):
+ """Failure installing Firefox on the android device"""
+
+ pass
+
+
+class AndroidStartUpUnknownTestError(Exception):
+ """
+ Test name provided is not one avaiable to test, this is either because
+ the test is currently not being tested or a typo in the spelling
+ """
+
+ pass
+
+
+class AndroidStartUpMatchingError(Exception):
+ """
+ We expected a certain number of matches but did not get them
+ """
+
+ pass
+
+
+class AndroidStartUpData:
+ def open_data(self, data):
+ return {
+ "name": "AndroidStartUp",
+ "subtest": data["name"],
+ "data": [
+ {"file": "android_startup", "value": value, "xaxis": xaxis}
+ for xaxis, value in enumerate(data["values"])
+ ],
+ "shouldAlert": True,
+ }
+
+ def transform(self, data):
+ return data
+
+ merge = transform
+
+
+class AndroidStartUp(AndroidDevice):
+ name = "AndroidStartUp"
+ activated = False
+ arguments = {
+ "test-name": {
+ "type": str,
+ "default": "",
+ "help": "This is the startup android test that will be run on the a51",
+ },
+ "apk_metadata": {
+ "type": str,
+ "default": "",
+ "help": "This is the startup android test that will be run on the a51",
+ },
+ "product": {
+ "type": str,
+ "default": "",
+ "help": "This is the startup android test that will be run on the a51",
+ },
+ "release-channel": {
+ "type": str,
+ "default": "",
+ "help": "This is the startup android test that will be run on the a51",
+ },
+ }
+
+ def __init__(self, env, mach_cmd):
+ super(AndroidStartUp, self).__init__(env, mach_cmd)
+ self.android_activity = None
+ self.capture_logcat = self.capture_file = self.app_name = None
+ self.device = mozdevice.ADBDevice(use_root=False)
+
+ def run(self, metadata):
+ options = metadata.script["options"]
+ self.test_name = self.get_arg("test-name")
+ self.apk_metadata = self.get_arg("apk-metadata")
+ self.product = self.get_arg("product")
+ self.release_channel = self.get_arg("release_channel")
+ self.single_date = options["test_parameters"]["single_date"]
+ self.date_range = options["test_parameters"]["date_range"]
+ self.startup_cache = options["test_parameters"]["startup_cache"]
+ self.test_cycles = options["test_parameters"]["test_cycles"]
+ self.package_id = PROD_TO_CHANNEL_TO_PKGID[self.product][self.release_channel]
+ self.proc_start = re.compile(
+ rf"ActivityManager: Start proc \d+:{self.package_id}/"
+ )
+
+ apk_metadata = self.apk_metadata
+ self.get_measurements(apk_metadata, metadata)
+
+ # Cleanup
+ self.device.shell(f"rm {apk_metadata[KEY_NAME]}")
+
+ return metadata
+
+ def get_measurements(self, apk_metadata, metadata):
+ measurements = self.run_performance_analysis(apk_metadata)
+ self.add_to_metadata(measurements, metadata)
+
+ def get_date_array_for_range(self, start, end):
+ startdate = datetime.strptime(start, DATETIME_FORMAT)
+ enddate = datetime.strptime(end, DATETIME_FORMAT)
+ delta_dates = (enddate - startdate).days + 1
+ return [
+ (startdate + timedelta(days=i)).strftime("%Y.%m.%d")
+ for i in range(delta_dates)
+ ]
+
+ def add_to_metadata(self, measurements, metadata):
+ if measurements is not None:
+ for key, value in measurements.items():
+ metadata.add_result(
+ {
+ "name": f"AndroidStartup:{self.product}",
+ "framework": {"name": "mozperftest"},
+ "transformer": "mozperftest.system.android_startup:AndroidStartUpData",
+ "shouldAlert": True,
+ "results": [
+ {
+ "values": [value],
+ "name": key,
+ "shouldAlert": True,
+ }
+ ],
+ }
+ )
+
+ def run_performance_analysis(self, apk_metadata):
+ # Installing the application on the device and getting ready to run the tests
+ install_path = apk_metadata[KEY_NAME]
+ if self.custom_apk_exists():
+ install_path = self.custom_apk_path
+
+ self.device.uninstall_app(self.package_id)
+ self.info(f"Installing {install_path}...")
+ app_name = self.device.install_app(install_path)
+ if self.device.is_app_installed(app_name):
+ self.info(f"Successfully installed {app_name}")
+ else:
+ raise AndroidStartUpInstallError("The android app was not installed")
+ self.apk_name = apk_metadata[KEY_NAME].split(".")[0]
+
+ return self.run_tests()
+
+ def run_tests(self):
+ measurements = {}
+ # Iterate through the tests in the test list
+ self.info(f"Running {self.test_name} on {self.apk_name}...")
+ self.skip_onboarding(self.test_name)
+ time.sleep(self.get_warmup_delay_seconds())
+ test_measurements = []
+
+ for i in range(self.test_cycles):
+ start_cmd_args = self.get_start_cmd(self.test_name)
+ self.info(start_cmd_args)
+ self.device.stop_application(self.package_id)
+ time.sleep(1)
+ self.info(f"iteration {i + 1}")
+ self.device.shell("logcat -c")
+ process = self.device.shell_output(start_cmd_args).splitlines()
+ test_measurements.append(self.get_measurement(self.test_name, process))
+
+ self.info(f"{self.test_name}: {str(test_measurements)}")
+ measurements[f"{self.test_name}.{MEASUREMENT_DATA[0]}"] = statistics.mean(
+ test_measurements
+ )
+ self.info(f"Mean: {statistics.mean(test_measurements)}")
+ measurements[f"{self.test_name}.{MEASUREMENT_DATA[1]}"] = statistics.median(
+ test_measurements
+ )
+ self.info(f"Median: {statistics.median(test_measurements)}")
+ if self.test_cycles > 1:
+ measurements[f"{self.test_name}.{MEASUREMENT_DATA[2]}"] = statistics.stdev(
+ test_measurements
+ )
+ self.info(f"Standard Deviation: {statistics.stdev(test_measurements)}")
+
+ return measurements
+
+ def get_measurement(self, test_name, stdout):
+ if test_name in [TEST_COLD_MAIN_FF, TEST_COLD_VIEW_FF]:
+ return self.get_measurement_from_am_start_log(stdout)
+ elif test_name in [TEST_COLD_VIEW_NAV_START, TEST_COLD_MAIN_RESTORE]:
+ # We must sleep until the Navigation::Start event occurs. If we don't
+ # the script will fail. This can take up to 14s on the G5
+ time.sleep(17)
+ proc = self.device.shell_output("logcat -d")
+ return self.get_measurement_from_nav_start_logcat(proc)
+
+ def get_measurement_from_am_start_log(self, stdout):
+ total_time_prefix = "TotalTime: "
+ matching_lines = [line for line in stdout if line.startswith(total_time_prefix)]
+ if len(matching_lines) != 1:
+ raise AndroidStartUpMatchingError(
+ f"Each run should only have 1 {total_time_prefix}."
+ f"However, this run unexpectedly had {matching_lines} matching lines"
+ )
+ duration = int(matching_lines[0][len(total_time_prefix) :])
+ return duration
+
+ def get_measurement_from_nav_start_logcat(self, process_output):
+ def __line_to_datetime(line):
+ date_str = " ".join(line.split(" ")[:2]) # e.g. "05-18 14:32:47.366"
+ # strptime needs microseconds. logcat outputs millis so we append zeroes
+ date_str_with_micros = date_str + "000"
+ return datetime.strptime(date_str_with_micros, "%m-%d %H:%M:%S.%f")
+
+ def __get_proc_start_datetime():
+ # This regex may not work on older versions of Android: we don't care
+ # yet because supporting older versions isn't in our requirements.
+ proc_start_lines = [line for line in lines if self.proc_start.search(line)]
+ if len(proc_start_lines) != 1:
+ raise AndroidStartUpMatchingError(
+ f"Expected to match 1 process start string but matched {len(proc_start_lines)}"
+ )
+ return __line_to_datetime(proc_start_lines[0])
+
+ def __get_page_start_datetime():
+ page_start_lines = [line for line in lines if PAGE_START.search(line)]
+ page_start_line_count = len(page_start_lines)
+ page_start_assert_msg = "found len=" + str(page_start_line_count)
+
+ # In focus versions <= v8.8.2, it logs 3 PageStart lines and these include actual uris.
+ # We need to handle our assertion differently due to the different line count. In focus
+ # versions >= v8.8.3, this measurement is broken because the logcat were removed.
+ is_old_version_of_focus = (
+ "about:blank" in page_start_lines[0] and self.product == PROD_FOCUS
+ )
+ if is_old_version_of_focus:
+ assert (
+ page_start_line_count
+ == OLD_VERSION_FOCUS_PAGE_START_LINE_COUNT # should be 3
+ ), page_start_assert_msg # Lines: about:blank, target URL, target URL.
+ else:
+ assert (
+ page_start_line_count
+ == NEW_VERSION_FOCUS_PAGE_START_LINE_COUNT # Should be 2
+ ), page_start_assert_msg # Lines: about:blank, target URL.
+ return __line_to_datetime(
+ page_start_lines[1]
+ ) # 2nd PageStart is for target URL.
+
+ lines = process_output.split("\n")
+ elapsed_seconds = (
+ __get_page_start_datetime() - __get_proc_start_datetime()
+ ).total_seconds()
+ elapsed_millis = round(elapsed_seconds * 1000)
+ return elapsed_millis
+
+ def get_warmup_delay_seconds(self):
+ """
+ We've been told the start up cache is populated ~60s after first start up. As such,
+ we should measure start up with the start up cache populated. If the
+ args say we shouldn't wait, we only wait a short duration ~= visual completeness.
+ """
+ return 60 if self.startup_cache else 5
+
+ def get_start_cmd(self, test_name):
+ intent_action_prefix = "android.intent.action.{}"
+ if test_name in [TEST_COLD_MAIN_FF, TEST_COLD_MAIN_RESTORE]:
+ intent = (
+ f"-a {intent_action_prefix.format('MAIN')} "
+ f"-c android.intent.category.LAUNCHER"
+ )
+ elif test_name in [TEST_COLD_VIEW_FF, TEST_COLD_VIEW_NAV_START]:
+ intent = f"-a {intent_action_prefix.format('VIEW')} -d {TEST_URI}"
+ else:
+ raise AndroidStartUpUnknownTestError(
+ "Unknown test provided please double check the test name and spelling"
+ )
+
+ # You can't launch an app without an pkg_id/activity pair
+ component_name = self.get_component_name_for_intent(intent)
+ cmd = f"am start-activity -W -n {component_name} {intent} "
+
+ # If focus skip onboarding: it is not stateful so must be sent for every cold start intent
+ if self.product == PROD_FOCUS:
+ cmd += "--ez performancetest true"
+
+ return cmd
+
+ def get_component_name_for_intent(self, intent):
+ resolve_component_args = (
+ f"cmd package resolve-activity --brief {intent} {self.package_id}"
+ )
+ result_output = self.device.shell_output(resolve_component_args)
+ stdout = result_output.splitlines()
+ if len(stdout) != STDOUT_LINE_COUNT: # Should be 2
+ raise AndroidStartUpMatchingError(f"expected 2 lines. Got: {stdout}")
+ return stdout[1]
+
+ def skip_onboarding(self, test_name):
+ """
+ We skip onboarding for focus in measure_start_up.py because it's stateful
+ and needs to be called for every cold start intent.
+ Onboarding only visibly gets in the way of our MAIN test results.
+ """
+ if self.product == PROD_FOCUS or test_name not in {
+ TEST_COLD_MAIN_FF,
+ TEST_COLD_MAIN_RESTORE,
+ }:
+ return
+
+ # This sets mutable state so we only need to pass this flag once, before we start the test
+ self.device.shell(
+ f"am start-activity -W -a android.intent.action.MAIN --ez "
+ f"performancetest true -n{self.package_id}/org.mozilla.fenix.App"
+ )
+ time.sleep(4) # ensure skip onboarding call has time to propagate.
diff --git a/python/mozperftest/mozperftest/system/example.zip b/python/mozperftest/mozperftest/system/example.zip
new file mode 100644
index 0000000000..8c724762d3
--- /dev/null
+++ b/python/mozperftest/mozperftest/system/example.zip
Binary files differ
diff --git a/python/mozperftest/mozperftest/system/macos.py b/python/mozperftest/mozperftest/system/macos.py
new file mode 100644
index 0000000000..493fd4fc1d
--- /dev/null
+++ b/python/mozperftest/mozperftest/system/macos.py
@@ -0,0 +1,120 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import os
+import platform
+import shutil
+import subprocess
+import tempfile
+from pathlib import Path
+
+from mozperftest.layers import Layer
+
+# Add here any option that might point to a DMG file we want to extract. The key
+# is name of the option and the value, the file in the DMG we want to use for
+# the option.
+POTENTIAL_DMGS = {
+ "browsertime-binary": "Contents/MacOS/firefox",
+ "xpcshell-xre-path": "Contents/MacOS",
+}
+
+
+class MacosDevice(Layer):
+ """Runs on macOS to mount DMGs if we see one."""
+
+ name = "macos"
+ activated = platform.system() == "Darwin"
+
+ def __init__(self, env, mach_cmd):
+ super(MacosDevice, self).__init__(env, mach_cmd)
+ self._tmp_dirs = []
+
+ def _run_process(self, args):
+ p = subprocess.Popen(
+ args,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ universal_newlines=True,
+ )
+
+ stdout, stderr = p.communicate(timeout=45)
+ if p.returncode != 0:
+ raise subprocess.CalledProcessError(
+ stdout=stdout, stderr=stderr, returncode=p.returncode
+ )
+
+ return stdout
+
+ def extract_app(self, dmg, target):
+ mount = Path(tempfile.mkdtemp())
+
+ if not Path(dmg).exists():
+ raise FileNotFoundError(dmg)
+
+ # mounting the DMG with hdiutil
+ cmd = f"hdiutil attach -nobrowse -mountpoint {str(mount)} {dmg}"
+ try:
+ self._run_process(cmd.split())
+ except subprocess.CalledProcessError:
+ self.error(f"Can't mount {dmg}")
+ if mount.exists():
+ shutil.rmtree(str(mount))
+ raise
+
+ # browse the mounted volume, to look for the app.
+ found = False
+ try:
+ for f in os.listdir(str(mount)):
+ if not f.endswith(".app"):
+ continue
+ app = mount / f
+ shutil.copytree(str(app), str(target))
+ found = True
+ break
+ finally:
+ try:
+ self._run_process(f"hdiutil detach {str(mount)}".split())
+ except subprocess.CalledProcessError as e: # noqa
+ self.warning("Detach failed {e.stdout}")
+ finally:
+ if mount.exists():
+ shutil.rmtree(str(mount))
+ if not found:
+ self.error(f"No app file found in {dmg}")
+ raise IOError(dmg)
+
+ def run(self, metadata):
+ # Each DMG is mounted, then we look for the .app
+ # directory in it, which is copied in a directory
+ # alongside the .dmg file. That directory
+ # is removed during teardown.
+ for option, path_in_dmg in POTENTIAL_DMGS.items():
+ value = self.get_arg(option)
+
+ if value is None or not value.endswith(".dmg"):
+ continue
+
+ self.info(f"Mounting {value}")
+ dmg_file = Path(value)
+ if not dmg_file.exists():
+ raise FileNotFoundError(str(dmg_file))
+
+ # let's unpack the DMG in place...
+ target = dmg_file.parent / dmg_file.name.split(".")[0]
+ self._tmp_dirs.append(target)
+ self.extract_app(dmg_file, target)
+
+ # ... find a specific file or directory if needed ...
+ path = target / path_in_dmg
+ if not path.exists():
+ raise FileNotFoundError(str(path))
+
+ # ... and swap the browsertime argument
+ self.info(f"Using {path} for {option}")
+ self.env.set_arg(option, str(path))
+ return metadata
+
+ def teardown(self):
+ for dir in self._tmp_dirs:
+ if dir.exists():
+ shutil.rmtree(str(dir))
diff --git a/python/mozperftest/mozperftest/system/pingserver.py b/python/mozperftest/mozperftest/system/pingserver.py
new file mode 100644
index 0000000000..4ae6b9a113
--- /dev/null
+++ b/python/mozperftest/mozperftest/system/pingserver.py
@@ -0,0 +1,94 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import json
+import os
+import socketserver
+import threading
+import time
+from pathlib import Path
+
+from mozlog import get_proxy_logger
+
+from mozperftest.layers import Layer
+from mozperftest.utils import install_package
+
+LOG = get_proxy_logger(component="proxy")
+HERE = os.path.dirname(__file__)
+
+
+class PingServer(Layer):
+ """Runs the edgeping layer"""
+
+ name = "pingserver"
+ activated = False
+
+ arguments = {}
+
+ def setup(self):
+ # Install edgeping and requests
+ deps = ["edgeping==0.1", "requests==2.9.1"]
+ for dep in deps:
+ install_package(self.mach_cmd.virtualenv_manager, dep)
+
+ def _wait_for_server(self, endpoint):
+ import requests
+
+ start = time.monotonic()
+ while True:
+ try:
+ requests.get(endpoint, timeout=0.1)
+ return
+ except Exception:
+ # we want to wait at most 5sec.
+ if time.monotonic() - start > 5.0:
+ raise
+ time.sleep(0.01)
+
+ def run(self, metadata):
+ from edgeping.server import PingHandling
+
+ self.verbose = self.get_arg("verbose")
+ self.metadata = metadata
+ self.debug("Starting the Edgeping server")
+ self.httpd = socketserver.TCPServer(("localhost", 0), PingHandling)
+ self.server_thread = threading.Thread(target=self.httpd.serve_forever)
+ # the chosen socket gets picked in the constructor so we can grab it here
+ address = self.httpd.server_address
+ self.endpoint = f"http://{address[0]}:{address[1]}"
+ self.server_thread.start()
+ self._wait_for_server(self.endpoint + "/status")
+
+ self.debug(f"Edgeping coserver running at {self.endpoint}")
+ prefs = {
+ "toolkit.telemetry.server": self.endpoint,
+ "telemetry.fog.test.localhost_port": address[1],
+ "datareporting.healthreport.uploadEnabled": True,
+ "datareporting.policy.dataSubmissionEnabled": True,
+ "toolkit.telemetry.enabled": True,
+ "toolkit.telemetry.unified": True,
+ "toolkit.telemetry.shutdownPingSender.enabled": True,
+ "datareporting.policy.dataSubmissionPolicyBypassNotification": True,
+ "toolkit.telemetry.send.overrideOfficialCheck": True,
+ }
+ if self.verbose:
+ prefs["toolkit.telemetry.log.level"] = "Trace"
+ prefs["toolkit.telemetry.log.dump"] = True
+
+ browser_prefs = metadata.get_options("browser_prefs")
+ browser_prefs.update(prefs)
+ return metadata
+
+ def teardown(self):
+ import requests
+
+ self.info("Grabbing the pings")
+ pings = requests.get(f"{self.endpoint}/pings").json()
+ output = Path(self.get_arg("output"), "telemetry.json")
+ self.info(f"Writing in {output}")
+ with output.open("w") as f:
+ f.write(json.dumps(pings))
+
+ self.debug("Stopping the Edgeping coserver")
+ self.httpd.shutdown()
+ self.server_thread.join()
diff --git a/python/mozperftest/mozperftest/system/profile.py b/python/mozperftest/mozperftest/system/profile.py
new file mode 100644
index 0000000000..d29744a818
--- /dev/null
+++ b/python/mozperftest/mozperftest/system/profile.py
@@ -0,0 +1,122 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import os
+import shutil
+import tempfile
+from pathlib import Path
+
+from condprof.client import ProfileNotFoundError, get_profile
+from condprof.util import get_current_platform
+from mozprofile import create_profile
+from mozprofile.prefs import Preferences
+
+from mozperftest.layers import Layer
+
+HERE = os.path.dirname(__file__)
+
+
+class Profile(Layer):
+ name = "profile"
+ activated = True
+ arguments = {
+ "directory": {"type": str, "default": None, "help": "Profile to use"},
+ "user-js": {"type": str, "default": None, "help": "Custom user.js"},
+ "conditioned": {
+ "action": "store_true",
+ "default": False,
+ "help": "Use a conditioned profile.",
+ },
+ "conditioned-scenario": {
+ "type": str,
+ "default": "settled",
+ "help": "Conditioned scenario to use",
+ },
+ "conditioned-platform": {
+ "type": str,
+ "default": None,
+ "help": "Conditioned platform to use (use local by default)",
+ },
+ "conditioned-project": {
+ "type": str,
+ "default": "mozilla-central",
+ "help": "Conditioned project",
+ "choices": ["try", "mozilla-central"],
+ },
+ }
+
+ def __init__(self, env, mach_cmd):
+ super(Profile, self).__init__(env, mach_cmd)
+ self._created_dirs = []
+
+ def setup(self):
+ pass
+
+ def _cleanup(self):
+ pass
+
+ def _get_conditioned_profile(self):
+ platform = self.get_arg("conditioned-platform")
+ if platform is None:
+ platform = get_current_platform()
+ scenario = self.get_arg("conditioned-scenario")
+ project = self.get_arg("conditioned-project")
+ alternate_project = "mozilla-central" if project != "mozilla-central" else "try"
+
+ temp_dir = tempfile.mkdtemp()
+ try:
+ condprof = get_profile(temp_dir, platform, scenario, repo=project)
+ except ProfileNotFoundError:
+ condprof = get_profile(temp_dir, platform, scenario, repo=alternate_project)
+ except Exception:
+ raise
+
+ # now get the full directory path to our fetched conditioned profile
+ condprof = Path(temp_dir, condprof)
+ if not condprof.exists():
+ raise OSError(str(condprof))
+
+ return condprof
+
+ def run(self, metadata):
+ # using a conditioned profile
+ if self.get_arg("conditioned"):
+ profile_dir = self._get_conditioned_profile()
+ self.set_arg("profile-directory", str(profile_dir))
+ self._created_dirs.append(str(profile_dir))
+ return metadata
+
+ if self.get_arg("directory") is not None:
+ # no need to create one or load a conditioned one
+ return metadata
+
+ # fresh profile
+ profile = create_profile(app="firefox")
+
+ # mozprofile.Profile.__del__ silently deletes the profile
+ # it creates in a non-deterministic time (garbage collected) by
+ # calling cleanup. We override this silly behavior here.
+ profile.cleanup = self._cleanup
+
+ prefs = metadata.get_options("browser_prefs")
+
+ if prefs == {}:
+ prefs["mozperftest"] = "true"
+
+ # apply custom user prefs if any
+ user_js = self.get_arg("user-js")
+ if user_js is not None:
+ self.info("Applying use prefs from %s" % user_js)
+ default_prefs = dict(Preferences.read_prefs(user_js))
+ prefs.update(default_prefs)
+
+ profile.set_preferences(prefs)
+ self.info("Created profile at %s" % profile.profile)
+ self._created_dirs.append(profile.profile)
+ self.set_arg("profile-directory", profile.profile)
+ return metadata
+
+ def teardown(self):
+ for dir in self._created_dirs:
+ if os.path.exists(dir):
+ shutil.rmtree(dir)
diff --git a/python/mozperftest/mozperftest/system/proxy.py b/python/mozperftest/mozperftest/system/proxy.py
new file mode 100644
index 0000000000..d43a65a0eb
--- /dev/null
+++ b/python/mozperftest/mozperftest/system/proxy.py
@@ -0,0 +1,232 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import json
+import os
+import pathlib
+import re
+import signal
+import tempfile
+import threading
+
+from mozdevice import ADBDevice
+from mozlog import get_proxy_logger
+from mozprocess import ProcessHandler
+
+from mozperftest.layers import Layer
+from mozperftest.utils import ON_TRY, download_file, get_output_dir, install_package
+
+LOG = get_proxy_logger(component="proxy")
+HERE = os.path.dirname(__file__)
+
+
+class OutputHandler(object):
+ def __init__(self):
+ self.proc = None
+ self.port = None
+ self.port_event = threading.Event()
+
+ def __call__(self, line):
+ line = line.strip()
+ if not line:
+ return
+ line = line.decode("utf-8", errors="replace")
+ try:
+ data = json.loads(line)
+ except ValueError:
+ self.process_output(line)
+ return
+
+ if isinstance(data, dict) and "action" in data:
+ # Retrieve the port number for the proxy server from the logs of
+ # our subprocess.
+ m = re.match(r"Proxy running on port (\d+)", data.get("message", ""))
+ if m:
+ self.port = int(m.group(1))
+ self.port_event.set()
+ LOG.log_raw(data)
+ else:
+ self.process_output(json.dumps(data))
+
+ def finished(self):
+ self.port_event.set()
+
+ def process_output(self, line):
+ if self.proc is None:
+ LOG.process_output(line)
+ else:
+ LOG.process_output(self.proc.pid, line)
+
+ def wait_for_port(self):
+ self.port_event.wait()
+ return self.port
+
+
+class ProxyRunner(Layer):
+ """Use a proxy"""
+
+ name = "proxy"
+ activated = False
+
+ arguments = {
+ "mode": {
+ "type": str,
+ "choices": ["record", "playback"],
+ "help": "Proxy server mode. Use `playback` to replay from the provided file(s). "
+ "Use `record` to generate a new recording at the path specified by `--file`. "
+ "playback - replay from provided file. "
+ "record - generate a new recording at the specified path.",
+ },
+ "file": {
+ "type": str,
+ "nargs": "+",
+ "help": "The playback files to replay, or the file that a recording will be saved to. "
+ "For playback, it can be any combination of the following: zip file, manifest file, "
+ "or a URL to zip/manifest file. "
+ "For recording, it's a zip fle.",
+ },
+ "perftest-page": {
+ "type": str,
+ "default": None,
+ "help": "This option can be used to specify a single test to record rather than "
+ "having to continuously modify the pageload_sites.json. This flag should only be "
+ "used by the perftest team and selects items from "
+ "`testing/performance/pageload_sites.json` based on the name field. Note that "
+ "the login fields won't be checked with a request such as this (i.e. it overrides "
+ "those settings).",
+ },
+ }
+
+ def __init__(self, env, mach_cmd):
+ super(ProxyRunner, self).__init__(env, mach_cmd)
+ self.proxy = None
+ self.tmpdir = None
+
+ def setup(self):
+ try:
+ import mozproxy # noqa: F401
+ except ImportError:
+ # Install mozproxy and its vendored deps.
+ mozbase = pathlib.Path(self.mach_cmd.topsrcdir, "testing", "mozbase")
+ mozproxy_deps = ["mozinfo", "mozlog", "mozproxy"]
+ for i in mozproxy_deps:
+ install_package(
+ self.mach_cmd.virtualenv_manager, pathlib.Path(mozbase, i)
+ )
+
+ # set MOZ_HOST_BIN to find cerutil. Required to set certifcates on android
+ os.environ["MOZ_HOST_BIN"] = self.mach_cmd.bindir
+
+ def run(self, metadata):
+ self.metadata = metadata
+ replay_file = self.get_arg("file")
+
+ # Check if we have a replay file
+ if replay_file is None:
+ raise ValueError("Proxy file not provided!!")
+
+ if replay_file is not None and replay_file.startswith("http"):
+ self.tmpdir = tempfile.TemporaryDirectory()
+ target = pathlib.Path(self.tmpdir.name, "recording.zip")
+ self.info("Downloading %s" % replay_file)
+ download_file(replay_file, target)
+ replay_file = target
+
+ self.info("Setting up the proxy")
+
+ command = [
+ self.mach_cmd.virtualenv_manager.python_path,
+ "-m",
+ "mozproxy.driver",
+ "--topsrcdir=" + self.mach_cmd.topsrcdir,
+ "--objdir=" + self.mach_cmd.topobjdir,
+ "--profiledir=" + self.get_arg("profile-directory"),
+ ]
+
+ if not ON_TRY:
+ command.extend(["--local"])
+
+ if metadata.flavor == "mobile-browser":
+ command.extend(["--tool=%s" % "mitmproxy-android"])
+ command.extend(["--binary=android"])
+ else:
+ command.extend(["--tool=%s" % "mitmproxy"])
+ # XXX See bug 1712337, we need a single point where we can get the binary used from
+ # this is required to make it work localy
+ binary = self.get_arg("browsertime-binary")
+ if binary is None:
+ binary = self.mach_cmd.get_binary_path()
+ command.extend(["--binary=%s" % binary])
+
+ if self.get_arg("mode") == "record":
+ output = self.get_arg("output")
+ if output is None:
+ output = pathlib.Path(self.mach_cmd.topsrcdir, "artifacts")
+ results_dir = get_output_dir(output)
+
+ command.extend(["--mode", "record"])
+ command.append(str(pathlib.Path(results_dir, replay_file)))
+ elif self.get_arg("mode") == "playback":
+ command.extend(["--mode", "playback"])
+ command.append(str(replay_file))
+ else:
+ raise ValueError("Proxy mode not provided please provide proxy mode")
+
+ inject_deterministic = self.get_arg("deterministic")
+ if inject_deterministic:
+ command.extend(["--deterministic"])
+
+ print(" ".join(command))
+ self.output_handler = OutputHandler()
+ self.proxy = ProcessHandler(
+ command,
+ processOutputLine=self.output_handler,
+ onFinish=self.output_handler.finished,
+ )
+ self.output_handler.proc = self.proxy
+ self.proxy.run()
+
+ # Wait until we've retrieved the proxy server's port number so we can
+ # configure the browser properly.
+ port = self.output_handler.wait_for_port()
+ if port is None:
+ raise ValueError("Unable to retrieve the port number from mozproxy")
+ self.info("Received port number %s from mozproxy" % port)
+
+ prefs = {
+ "network.proxy.type": 1,
+ "network.proxy.http": "127.0.0.1",
+ "network.proxy.http_port": port,
+ "network.proxy.ssl": "127.0.0.1",
+ "network.proxy.ssl_port": port,
+ "network.proxy.no_proxies_on": "127.0.0.1",
+ }
+ browser_prefs = metadata.get_options("browser_prefs")
+ browser_prefs.update(prefs)
+
+ if metadata.flavor == "mobile-browser":
+ self.info("Setting reverse port fw for android device")
+ device = ADBDevice()
+ device.create_socket_connection("reverse", "tcp:%s" % port, "tcp:%s" % port)
+
+ return metadata
+
+ def teardown(self):
+ err = None
+ if self.proxy is not None:
+ returncode = self.proxy.wait(0)
+ if returncode is not None:
+ err = ValueError(
+ "mozproxy terminated early with return code %d" % returncode
+ )
+ else:
+ kill_signal = getattr(signal, "CTRL_BREAK_EVENT", signal.SIGINT)
+ os.kill(self.proxy.pid, kill_signal)
+ self.proxy.wait()
+ self.proxy = None
+ if self.tmpdir is not None:
+ self.tmpdir.cleanup()
+ self.tmpdir = None
+
+ if err:
+ raise err
diff --git a/python/mozperftest/mozperftest/test/__init__.py b/python/mozperftest/mozperftest/test/__init__.py
new file mode 100644
index 0000000000..c7d7d6e049
--- /dev/null
+++ b/python/mozperftest/mozperftest/test/__init__.py
@@ -0,0 +1,25 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+from mozperftest.layers import Layers
+from mozperftest.test.androidlog import AndroidLog
+from mozperftest.test.browsertime import BrowsertimeRunner
+from mozperftest.test.webpagetest import WebPageTest
+from mozperftest.test.xpcshell import XPCShell
+
+
+def get_layers():
+ return BrowsertimeRunner, AndroidLog, XPCShell, WebPageTest
+
+
+def pick_test(env, flavor, mach_cmd):
+ if flavor == "xpcshell":
+ return Layers(env, mach_cmd, (XPCShell,))
+ if flavor == "desktop-browser":
+ return Layers(env, mach_cmd, (BrowsertimeRunner,))
+ if flavor == "mobile-browser":
+ return Layers(env, mach_cmd, (BrowsertimeRunner, AndroidLog))
+ if flavor == "webpagetest":
+ return Layers(env, mach_cmd, (WebPageTest,))
+
+ raise NotImplementedError(flavor)
diff --git a/python/mozperftest/mozperftest/test/androidlog.py b/python/mozperftest/mozperftest/test/androidlog.py
new file mode 100644
index 0000000000..88bf01f2fe
--- /dev/null
+++ b/python/mozperftest/mozperftest/test/androidlog.py
@@ -0,0 +1,62 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+from pathlib import Path
+
+from mozperftest.layers import Layer
+
+
+class AndroidLog(Layer):
+ """Runs an android log test."""
+
+ name = "androidlog"
+ activated = False
+ arguments = {
+ "first-timestamp": {
+ "type": str,
+ "default": None,
+ "help": "First timestamp regexp",
+ },
+ "second-timestamp": {
+ "type": str,
+ "default": None,
+ "help": "Second timestamp regexp",
+ },
+ "subtest-name": {
+ "type": str,
+ "default": "TimeToDisplayed",
+ "help": "Name of the metric that is produced",
+ },
+ }
+
+ def _get_logcat(self):
+ logcat = self.get_arg("android-capture-logcat")
+ if logcat is None:
+ raise NotImplementedError()
+ # check if the path is absolute or relative to output
+ path = Path(logcat)
+ if not path.is_absolute():
+ return Path(self.get_arg("output"), path).resolve()
+ return path.resolve()
+
+ def __call__(self, metadata):
+ app_name = self.get_arg("android-app-name")
+ first_ts = r".*Start proc.*" + app_name.replace(".", r"\.") + ".*"
+ second_ts = r".*Fully drawn.*" + app_name.replace(".", r"\.") + ".*"
+ options = {
+ "first-timestamp": self.get_arg("first-timestamp", first_ts),
+ "second-timestamp": self.get_arg("second-timestamp", second_ts),
+ "processor": self.env.hooks.get("logcat_processor"),
+ "transform-subtest-name": self.get_arg("subtest-name"),
+ }
+
+ metadata.add_result(
+ {
+ "results": str(self._get_logcat()),
+ "transformer": "LogCatTimeTransformer",
+ "transformer-options": options,
+ "name": "LogCat",
+ }
+ )
+
+ return metadata
diff --git a/python/mozperftest/mozperftest/test/browsertime/__init__.py b/python/mozperftest/mozperftest/test/browsertime/__init__.py
new file mode 100644
index 0000000000..f5e32101cc
--- /dev/null
+++ b/python/mozperftest/mozperftest/test/browsertime/__init__.py
@@ -0,0 +1,19 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from mozperftest.test.browsertime.runner import BrowsertimeRunner # noqa
+
+
+def add_option(env, name, value, overwrite=False):
+ if not overwrite:
+ options = env.get_arg("browsertime-extra-options", "")
+ options += f",{name}={value}"
+ else:
+ options = f"{name}={value}"
+ env.set_arg("browsertime-extra-options", options)
+
+
+def add_options(env, options, overwrite=False):
+ for i, (name, value) in enumerate(options):
+ add_option(env, name, value, overwrite=overwrite and i == 0)
diff --git a/python/mozperftest/mozperftest/test/browsertime/package-lock.json b/python/mozperftest/mozperftest/test/browsertime/package-lock.json
new file mode 100644
index 0000000000..af88126fcc
--- /dev/null
+++ b/python/mozperftest/mozperftest/test/browsertime/package-lock.json
@@ -0,0 +1,1874 @@
+{
+ "name": "mozilla-central-tools-browsertime",
+ "requires": true,
+ "lockfileVersion": 1,
+ "dependencies": {
+ "@babel/runtime": {
+ "version": "7.17.0",
+ "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.17.0.tgz",
+ "integrity": "sha512-etcO/ohMNaNA2UBdaXBBSX/3aEzFMRrVfaPv8Ptc0k+cWpWW0QFiGZ2XnVqQZI1Cf734LbPGmqBKWESfW4x/dQ==",
+ "dev": true,
+ "optional": true,
+ "requires": {
+ "regenerator-runtime": "^0.13.4"
+ }
+ },
+ "@cypress/xvfb": {
+ "version": "1.2.4",
+ "resolved": "https://registry.npmjs.org/@cypress/xvfb/-/xvfb-1.2.4.tgz",
+ "integrity": "sha512-skbBzPggOVYCbnGgV+0dmBdW/s77ZkAOXIC1knS8NagwDjBrNC1LuXtQJeiN6l+m7lzmHtaoUw/ctJKdqkG57Q==",
+ "dev": true,
+ "requires": {
+ "debug": "^3.1.0",
+ "lodash.once": "^4.1.1"
+ }
+ },
+ "@devicefarmer/adbkit": {
+ "version": "2.11.3",
+ "resolved": "https://registry.npmjs.org/@devicefarmer/adbkit/-/adbkit-2.11.3.tgz",
+ "integrity": "sha512-rsgWREAvSRQjdP9/3GoAV6Tq+o97haywgbTfCgt5yUqiDpaaq3hlH9FTo9XsdG8x+Jd0VQ9nTC2IXsDu8JGRSA==",
+ "dev": true,
+ "requires": {
+ "@devicefarmer/adbkit-logcat": "^1.1.0",
+ "@devicefarmer/adbkit-monkey": "~1.0.1",
+ "bluebird": "~2.9.24",
+ "commander": "^2.3.0",
+ "debug": "~2.6.3",
+ "node-forge": "^0.10.0",
+ "split": "~0.3.3"
+ },
+ "dependencies": {
+ "debug": {
+ "version": "2.6.9",
+ "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
+ "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
+ "dev": true,
+ "requires": {
+ "ms": "2.0.0"
+ }
+ },
+ "ms": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
+ "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=",
+ "dev": true
+ }
+ }
+ },
+ "@devicefarmer/adbkit-logcat": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/@devicefarmer/adbkit-logcat/-/adbkit-logcat-1.1.0.tgz",
+ "integrity": "sha512-K90P5gUXM/w+yzLvJIRQ+tJooNU6ipUPPQkljtPJ0laR66TGtpt4Gqsjm0n9dPHK1W5KGgU1R5wnCd6RTSlPNA==",
+ "dev": true
+ },
+ "@devicefarmer/adbkit-monkey": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/@devicefarmer/adbkit-monkey/-/adbkit-monkey-1.0.1.tgz",
+ "integrity": "sha512-HilPrVrCosYWqSyjfpDtaaN1kJwdlBpS+IAflP3z+e7nsEgk3JGJf1Vg0NgHJooTf5HDfXSyZqMVg+5jvXCK0g==",
+ "dev": true,
+ "requires": {
+ "async": "~0.2.9"
+ }
+ },
+ "@jimp/bmp": {
+ "version": "0.16.1",
+ "resolved": "https://registry.npmjs.org/@jimp/bmp/-/bmp-0.16.1.tgz",
+ "integrity": "sha512-iwyNYQeBawrdg/f24x3pQ5rEx+/GwjZcCXd3Kgc+ZUd+Ivia7sIqBsOnDaMZdKCBPlfW364ekexnlOqyVa0NWg==",
+ "dev": true,
+ "optional": true,
+ "requires": {
+ "@babel/runtime": "^7.7.2",
+ "@jimp/utils": "^0.16.1",
+ "bmp-js": "^0.1.0"
+ }
+ },
+ "@jimp/core": {
+ "version": "0.16.1",
+ "resolved": "https://registry.npmjs.org/@jimp/core/-/core-0.16.1.tgz",
+ "integrity": "sha512-la7kQia31V6kQ4q1kI/uLimu8FXx7imWVajDGtwUG8fzePLWDFJyZl0fdIXVCL1JW2nBcRHidUot6jvlRDi2+g==",
+ "dev": true,
+ "optional": true,
+ "requires": {
+ "@babel/runtime": "^7.7.2",
+ "@jimp/utils": "^0.16.1",
+ "any-base": "^1.1.0",
+ "buffer": "^5.2.0",
+ "exif-parser": "^0.1.12",
+ "file-type": "^9.0.0",
+ "load-bmfont": "^1.3.1",
+ "mkdirp": "^0.5.1",
+ "phin": "^2.9.1",
+ "pixelmatch": "^4.0.2",
+ "tinycolor2": "^1.4.1"
+ },
+ "dependencies": {
+ "mkdirp": {
+ "version": "0.5.5",
+ "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz",
+ "integrity": "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==",
+ "dev": true,
+ "optional": true,
+ "requires": {
+ "minimist": "^1.2.5"
+ }
+ }
+ }
+ },
+ "@jimp/custom": {
+ "version": "0.16.1",
+ "resolved": "https://registry.npmjs.org/@jimp/custom/-/custom-0.16.1.tgz",
+ "integrity": "sha512-DNUAHNSiUI/j9hmbatD6WN/EBIyeq4AO0frl5ETtt51VN1SvE4t4v83ZA/V6ikxEf3hxLju4tQ5Pc3zmZkN/3A==",
+ "dev": true,
+ "optional": true,
+ "requires": {
+ "@babel/runtime": "^7.7.2",
+ "@jimp/core": "^0.16.1"
+ }
+ },
+ "@jimp/gif": {
+ "version": "0.16.1",
+ "resolved": "https://registry.npmjs.org/@jimp/gif/-/gif-0.16.1.tgz",
+ "integrity": "sha512-r/1+GzIW1D5zrP4tNrfW+3y4vqD935WBXSc8X/wm23QTY9aJO9Lw6PEdzpYCEY+SOklIFKaJYUAq/Nvgm/9ryw==",
+ "dev": true,
+ "optional": true,
+ "requires": {
+ "@babel/runtime": "^7.7.2",
+ "@jimp/utils": "^0.16.1",
+ "gifwrap": "^0.9.2",
+ "omggif": "^1.0.9"
+ }
+ },
+ "@jimp/jpeg": {
+ "version": "0.16.1",
+ "resolved": "https://registry.npmjs.org/@jimp/jpeg/-/jpeg-0.16.1.tgz",
+ "integrity": "sha512-8352zrdlCCLFdZ/J+JjBslDvml+fS3Z8gttdml0We759PnnZGqrnPRhkOEOJbNUlE+dD4ckLeIe6NPxlS/7U+w==",
+ "dev": true,
+ "optional": true,
+ "requires": {
+ "@babel/runtime": "^7.7.2",
+ "@jimp/utils": "^0.16.1",
+ "jpeg-js": "0.4.2"
+ }
+ },
+ "@jimp/plugin-blit": {
+ "version": "0.16.1",
+ "resolved": "https://registry.npmjs.org/@jimp/plugin-blit/-/plugin-blit-0.16.1.tgz",
+ "integrity": "sha512-fKFNARm32RoLSokJ8WZXHHH2CGzz6ire2n1Jh6u+XQLhk9TweT1DcLHIXwQMh8oR12KgjbgsMGvrMVlVknmOAg==",
+ "dev": true,
+ "optional": true,
+ "requires": {
+ "@babel/runtime": "^7.7.2",
+ "@jimp/utils": "^0.16.1"
+ }
+ },
+ "@jimp/plugin-blur": {
+ "version": "0.16.1",
+ "resolved": "https://registry.npmjs.org/@jimp/plugin-blur/-/plugin-blur-0.16.1.tgz",
+ "integrity": "sha512-1WhuLGGj9MypFKRcPvmW45ht7nXkOKu+lg3n2VBzIB7r4kKNVchuI59bXaCYQumOLEqVK7JdB4glaDAbCQCLyw==",
+ "dev": true,
+ "optional": true,
+ "requires": {
+ "@babel/runtime": "^7.7.2",
+ "@jimp/utils": "^0.16.1"
+ }
+ },
+ "@jimp/plugin-circle": {
+ "version": "0.16.1",
+ "resolved": "https://registry.npmjs.org/@jimp/plugin-circle/-/plugin-circle-0.16.1.tgz",
+ "integrity": "sha512-JK7yi1CIU7/XL8hdahjcbGA3V7c+F+Iw+mhMQhLEi7Q0tCnZ69YJBTamMiNg3fWPVfMuvWJJKOBRVpwNTuaZRg==",
+ "dev": true,
+ "optional": true,
+ "requires": {
+ "@babel/runtime": "^7.7.2",
+ "@jimp/utils": "^0.16.1"
+ }
+ },
+ "@jimp/plugin-color": {
+ "version": "0.16.1",
+ "resolved": "https://registry.npmjs.org/@jimp/plugin-color/-/plugin-color-0.16.1.tgz",
+ "integrity": "sha512-9yQttBAO5SEFj7S6nJK54f+1BnuBG4c28q+iyzm1JjtnehjqMg6Ljw4gCSDCvoCQ3jBSYHN66pmwTV74SU1B7A==",
+ "dev": true,
+ "optional": true,
+ "requires": {
+ "@babel/runtime": "^7.7.2",
+ "@jimp/utils": "^0.16.1",
+ "tinycolor2": "^1.4.1"
+ }
+ },
+ "@jimp/plugin-contain": {
+ "version": "0.16.1",
+ "resolved": "https://registry.npmjs.org/@jimp/plugin-contain/-/plugin-contain-0.16.1.tgz",
+ "integrity": "sha512-44F3dUIjBDHN+Ym/vEfg+jtjMjAqd2uw9nssN67/n4FdpuZUVs7E7wadKY1RRNuJO+WgcD5aDQcsvurXMETQTg==",
+ "dev": true,
+ "optional": true,
+ "requires": {
+ "@babel/runtime": "^7.7.2",
+ "@jimp/utils": "^0.16.1"
+ }
+ },
+ "@jimp/plugin-cover": {
+ "version": "0.16.1",
+ "resolved": "https://registry.npmjs.org/@jimp/plugin-cover/-/plugin-cover-0.16.1.tgz",
+ "integrity": "sha512-YztWCIldBAVo0zxcQXR+a/uk3/TtYnpKU2CanOPJ7baIuDlWPsG+YE4xTsswZZc12H9Kl7CiziEbDtvF9kwA/Q==",
+ "dev": true,
+ "optional": true,
+ "requires": {
+ "@babel/runtime": "^7.7.2",
+ "@jimp/utils": "^0.16.1"
+ }
+ },
+ "@jimp/plugin-crop": {
+ "version": "0.16.1",
+ "resolved": "https://registry.npmjs.org/@jimp/plugin-crop/-/plugin-crop-0.16.1.tgz",
+ "integrity": "sha512-UQdva9oQzCVadkyo3T5Tv2CUZbf0klm2cD4cWMlASuTOYgaGaFHhT9st+kmfvXjKL8q3STkBu/zUPV6PbuV3ew==",
+ "dev": true,
+ "optional": true,
+ "requires": {
+ "@babel/runtime": "^7.7.2",
+ "@jimp/utils": "^0.16.1"
+ }
+ },
+ "@jimp/plugin-displace": {
+ "version": "0.16.1",
+ "resolved": "https://registry.npmjs.org/@jimp/plugin-displace/-/plugin-displace-0.16.1.tgz",
+ "integrity": "sha512-iVAWuz2+G6Heu8gVZksUz+4hQYpR4R0R/RtBzpWEl8ItBe7O6QjORAkhxzg+WdYLL2A/Yd4ekTpvK0/qW8hTVw==",
+ "dev": true,
+ "optional": true,
+ "requires": {
+ "@babel/runtime": "^7.7.2",
+ "@jimp/utils": "^0.16.1"
+ }
+ },
+ "@jimp/plugin-dither": {
+ "version": "0.16.1",
+ "resolved": "https://registry.npmjs.org/@jimp/plugin-dither/-/plugin-dither-0.16.1.tgz",
+ "integrity": "sha512-tADKVd+HDC9EhJRUDwMvzBXPz4GLoU6s5P7xkVq46tskExYSptgj5713J5Thj3NMgH9Rsqu22jNg1H/7tr3V9Q==",
+ "dev": true,
+ "optional": true,
+ "requires": {
+ "@babel/runtime": "^7.7.2",
+ "@jimp/utils": "^0.16.1"
+ }
+ },
+ "@jimp/plugin-fisheye": {
+ "version": "0.16.1",
+ "resolved": "https://registry.npmjs.org/@jimp/plugin-fisheye/-/plugin-fisheye-0.16.1.tgz",
+ "integrity": "sha512-BWHnc5hVobviTyIRHhIy9VxI1ACf4CeSuCfURB6JZm87YuyvgQh5aX5UDKtOz/3haMHXBLP61ZBxlNpMD8CG4A==",
+ "dev": true,
+ "optional": true,
+ "requires": {
+ "@babel/runtime": "^7.7.2",
+ "@jimp/utils": "^0.16.1"
+ }
+ },
+ "@jimp/plugin-flip": {
+ "version": "0.16.1",
+ "resolved": "https://registry.npmjs.org/@jimp/plugin-flip/-/plugin-flip-0.16.1.tgz",
+ "integrity": "sha512-KdxTf0zErfZ8DyHkImDTnQBuHby+a5YFdoKI/G3GpBl3qxLBvC+PWkS2F/iN3H7wszP7/TKxTEvWL927pypT0w==",
+ "dev": true,
+ "optional": true,
+ "requires": {
+ "@babel/runtime": "^7.7.2",
+ "@jimp/utils": "^0.16.1"
+ }
+ },
+ "@jimp/plugin-gaussian": {
+ "version": "0.16.1",
+ "resolved": "https://registry.npmjs.org/@jimp/plugin-gaussian/-/plugin-gaussian-0.16.1.tgz",
+ "integrity": "sha512-u9n4wjskh3N1mSqketbL6tVcLU2S5TEaFPR40K6TDv4phPLZALi1Of7reUmYpVm8mBDHt1I6kGhuCJiWvzfGyg==",
+ "dev": true,
+ "optional": true,
+ "requires": {
+ "@babel/runtime": "^7.7.2",
+ "@jimp/utils": "^0.16.1"
+ }
+ },
+ "@jimp/plugin-invert": {
+ "version": "0.16.1",
+ "resolved": "https://registry.npmjs.org/@jimp/plugin-invert/-/plugin-invert-0.16.1.tgz",
+ "integrity": "sha512-2DKuyVXANH8WDpW9NG+PYFbehzJfweZszFYyxcaewaPLN0GxvxVLOGOPP1NuUTcHkOdMFbE0nHDuB7f+sYF/2w==",
+ "dev": true,
+ "optional": true,
+ "requires": {
+ "@babel/runtime": "^7.7.2",
+ "@jimp/utils": "^0.16.1"
+ }
+ },
+ "@jimp/plugin-mask": {
+ "version": "0.16.1",
+ "resolved": "https://registry.npmjs.org/@jimp/plugin-mask/-/plugin-mask-0.16.1.tgz",
+ "integrity": "sha512-snfiqHlVuj4bSFS0v96vo2PpqCDMe4JB+O++sMo5jF5mvGcGL6AIeLo8cYqPNpdO6BZpBJ8MY5El0Veckhr39Q==",
+ "dev": true,
+ "optional": true,
+ "requires": {
+ "@babel/runtime": "^7.7.2",
+ "@jimp/utils": "^0.16.1"
+ }
+ },
+ "@jimp/plugin-normalize": {
+ "version": "0.16.1",
+ "resolved": "https://registry.npmjs.org/@jimp/plugin-normalize/-/plugin-normalize-0.16.1.tgz",
+ "integrity": "sha512-dOQfIOvGLKDKXPU8xXWzaUeB0nvkosHw6Xg1WhS1Z5Q0PazByhaxOQkSKgUryNN/H+X7UdbDvlyh/yHf3ITRaw==",
+ "dev": true,
+ "optional": true,
+ "requires": {
+ "@babel/runtime": "^7.7.2",
+ "@jimp/utils": "^0.16.1"
+ }
+ },
+ "@jimp/plugin-print": {
+ "version": "0.16.1",
+ "resolved": "https://registry.npmjs.org/@jimp/plugin-print/-/plugin-print-0.16.1.tgz",
+ "integrity": "sha512-ceWgYN40jbN4cWRxixym+csyVymvrryuKBQ+zoIvN5iE6OyS+2d7Mn4zlNgumSczb9GGyZZESIgVcBDA1ezq0Q==",
+ "dev": true,
+ "optional": true,
+ "requires": {
+ "@babel/runtime": "^7.7.2",
+ "@jimp/utils": "^0.16.1",
+ "load-bmfont": "^1.4.0"
+ }
+ },
+ "@jimp/plugin-resize": {
+ "version": "0.16.1",
+ "resolved": "https://registry.npmjs.org/@jimp/plugin-resize/-/plugin-resize-0.16.1.tgz",
+ "integrity": "sha512-u4JBLdRI7dargC04p2Ha24kofQBk3vhaf0q8FwSYgnCRwxfvh2RxvhJZk9H7Q91JZp6wgjz/SjvEAYjGCEgAwQ==",
+ "dev": true,
+ "optional": true,
+ "requires": {
+ "@babel/runtime": "^7.7.2",
+ "@jimp/utils": "^0.16.1"
+ }
+ },
+ "@jimp/plugin-rotate": {
+ "version": "0.16.1",
+ "resolved": "https://registry.npmjs.org/@jimp/plugin-rotate/-/plugin-rotate-0.16.1.tgz",
+ "integrity": "sha512-ZUU415gDQ0VjYutmVgAYYxC9Og9ixu2jAGMCU54mSMfuIlmohYfwARQmI7h4QB84M76c9hVLdONWjuo+rip/zg==",
+ "dev": true,
+ "optional": true,
+ "requires": {
+ "@babel/runtime": "^7.7.2",
+ "@jimp/utils": "^0.16.1"
+ }
+ },
+ "@jimp/plugin-scale": {
+ "version": "0.16.1",
+ "resolved": "https://registry.npmjs.org/@jimp/plugin-scale/-/plugin-scale-0.16.1.tgz",
+ "integrity": "sha512-jM2QlgThIDIc4rcyughD5O7sOYezxdafg/2Xtd1csfK3z6fba3asxDwthqPZAgitrLgiKBDp6XfzC07Y/CefUw==",
+ "dev": true,
+ "optional": true,
+ "requires": {
+ "@babel/runtime": "^7.7.2",
+ "@jimp/utils": "^0.16.1"
+ }
+ },
+ "@jimp/plugin-shadow": {
+ "version": "0.16.1",
+ "resolved": "https://registry.npmjs.org/@jimp/plugin-shadow/-/plugin-shadow-0.16.1.tgz",
+ "integrity": "sha512-MeD2Is17oKzXLnsphAa1sDstTu6nxscugxAEk3ji0GV1FohCvpHBcec0nAq6/czg4WzqfDts+fcPfC79qWmqrA==",
+ "dev": true,
+ "optional": true,
+ "requires": {
+ "@babel/runtime": "^7.7.2",
+ "@jimp/utils": "^0.16.1"
+ }
+ },
+ "@jimp/plugin-threshold": {
+ "version": "0.16.1",
+ "resolved": "https://registry.npmjs.org/@jimp/plugin-threshold/-/plugin-threshold-0.16.1.tgz",
+ "integrity": "sha512-iGW8U/wiCSR0+6syrPioVGoSzQFt4Z91SsCRbgNKTAk7D+XQv6OI78jvvYg4o0c2FOlwGhqz147HZV5utoSLxA==",
+ "dev": true,
+ "optional": true,
+ "requires": {
+ "@babel/runtime": "^7.7.2",
+ "@jimp/utils": "^0.16.1"
+ }
+ },
+ "@jimp/plugins": {
+ "version": "0.16.1",
+ "resolved": "https://registry.npmjs.org/@jimp/plugins/-/plugins-0.16.1.tgz",
+ "integrity": "sha512-c+lCqa25b+4q6mJZSetlxhMoYuiltyS+ValLzdwK/47+aYsq+kcJNl+TuxIEKf59yr9+5rkbpsPkZHLF/V7FFA==",
+ "dev": true,
+ "optional": true,
+ "requires": {
+ "@babel/runtime": "^7.7.2",
+ "@jimp/plugin-blit": "^0.16.1",
+ "@jimp/plugin-blur": "^0.16.1",
+ "@jimp/plugin-circle": "^0.16.1",
+ "@jimp/plugin-color": "^0.16.1",
+ "@jimp/plugin-contain": "^0.16.1",
+ "@jimp/plugin-cover": "^0.16.1",
+ "@jimp/plugin-crop": "^0.16.1",
+ "@jimp/plugin-displace": "^0.16.1",
+ "@jimp/plugin-dither": "^0.16.1",
+ "@jimp/plugin-fisheye": "^0.16.1",
+ "@jimp/plugin-flip": "^0.16.1",
+ "@jimp/plugin-gaussian": "^0.16.1",
+ "@jimp/plugin-invert": "^0.16.1",
+ "@jimp/plugin-mask": "^0.16.1",
+ "@jimp/plugin-normalize": "^0.16.1",
+ "@jimp/plugin-print": "^0.16.1",
+ "@jimp/plugin-resize": "^0.16.1",
+ "@jimp/plugin-rotate": "^0.16.1",
+ "@jimp/plugin-scale": "^0.16.1",
+ "@jimp/plugin-shadow": "^0.16.1",
+ "@jimp/plugin-threshold": "^0.16.1",
+ "timm": "^1.6.1"
+ }
+ },
+ "@jimp/png": {
+ "version": "0.16.1",
+ "resolved": "https://registry.npmjs.org/@jimp/png/-/png-0.16.1.tgz",
+ "integrity": "sha512-iyWoCxEBTW0OUWWn6SveD4LePW89kO7ZOy5sCfYeDM/oTPLpR8iMIGvZpZUz1b8kvzFr27vPst4E5rJhGjwsdw==",
+ "dev": true,
+ "optional": true,
+ "requires": {
+ "@babel/runtime": "^7.7.2",
+ "@jimp/utils": "^0.16.1",
+ "pngjs": "^3.3.3"
+ }
+ },
+ "@jimp/tiff": {
+ "version": "0.16.1",
+ "resolved": "https://registry.npmjs.org/@jimp/tiff/-/tiff-0.16.1.tgz",
+ "integrity": "sha512-3K3+xpJS79RmSkAvFMgqY5dhSB+/sxhwTFA9f4AVHUK0oKW+u6r52Z1L0tMXHnpbAdR9EJ+xaAl2D4x19XShkQ==",
+ "dev": true,
+ "optional": true,
+ "requires": {
+ "@babel/runtime": "^7.7.2",
+ "utif": "^2.0.1"
+ }
+ },
+ "@jimp/types": {
+ "version": "0.16.1",
+ "resolved": "https://registry.npmjs.org/@jimp/types/-/types-0.16.1.tgz",
+ "integrity": "sha512-g1w/+NfWqiVW4CaXSJyD28JQqZtm2eyKMWPhBBDCJN9nLCN12/Az0WFF3JUAktzdsEC2KRN2AqB1a2oMZBNgSQ==",
+ "dev": true,
+ "optional": true,
+ "requires": {
+ "@babel/runtime": "^7.7.2",
+ "@jimp/bmp": "^0.16.1",
+ "@jimp/gif": "^0.16.1",
+ "@jimp/jpeg": "^0.16.1",
+ "@jimp/png": "^0.16.1",
+ "@jimp/tiff": "^0.16.1",
+ "timm": "^1.6.1"
+ }
+ },
+ "@jimp/utils": {
+ "version": "0.16.1",
+ "resolved": "https://registry.npmjs.org/@jimp/utils/-/utils-0.16.1.tgz",
+ "integrity": "sha512-8fULQjB0x4LzUSiSYG6ZtQl355sZjxbv8r9PPAuYHzS9sGiSHJQavNqK/nKnpDsVkU88/vRGcE7t3nMU0dEnVw==",
+ "dev": true,
+ "optional": true,
+ "requires": {
+ "@babel/runtime": "^7.7.2",
+ "regenerator-runtime": "^0.13.3"
+ }
+ },
+ "@sitespeed.io/chromedriver": {
+ "version": "98.0.4758-48",
+ "resolved": "https://registry.npmjs.org/@sitespeed.io/chromedriver/-/chromedriver-98.0.4758-48.tgz",
+ "integrity": "sha512-kTFFaJD0K2j59+XG4o6olv28I1gaZ19qPlIRQLP7dfhaVZQDvxtzKyVIUHlU0q4m69XnCliOcO14008ZlxSW+g==",
+ "dev": true,
+ "requires": {
+ "node-downloader-helper": "1.0.19",
+ "node-stream-zip": "1.15.0"
+ }
+ },
+ "@sitespeed.io/edgedriver": {
+ "version": "95.0.1020-30",
+ "resolved": "https://registry.npmjs.org/@sitespeed.io/edgedriver/-/edgedriver-95.0.1020-30.tgz",
+ "integrity": "sha512-5hXxNCtbX/SeG6nsyXg4QWIEKacxBJTO5T43rUXlTrUlecFfvHNhTVY5PE2bwpKcdPQ168Vp0S/+g55QJi9s/Q==",
+ "dev": true,
+ "requires": {
+ "node-downloader-helper": "1.0.18",
+ "node-stream-zip": "1.15.0"
+ },
+ "dependencies": {
+ "node-downloader-helper": {
+ "version": "1.0.18",
+ "resolved": "https://registry.npmjs.org/node-downloader-helper/-/node-downloader-helper-1.0.18.tgz",
+ "integrity": "sha512-C7hxYz/yg4d8DFVC6c4fMIOI7jywbpQHOznkax/74F8NcC8wSOLO+UxNMcwds/5wEL8W+RPXT9C389w3bDOMxw==",
+ "dev": true
+ }
+ }
+ },
+ "@sitespeed.io/geckodriver": {
+ "version": "0.29.1-3",
+ "resolved": "https://registry.npmjs.org/@sitespeed.io/geckodriver/-/geckodriver-0.29.1-3.tgz",
+ "integrity": "sha512-qHYtvH/81lPcgzFQB2qObp9M8bMIrc7O8TWm05SVfiGUKKy4Kku0huoa/IB9e0ksrrRFYtm9GQT6JF+bANZPKA==",
+ "dev": true,
+ "requires": {
+ "node-downloader-helper": "1.0.18",
+ "node-stream-zip": "1.14.0",
+ "tar": "6.1.11"
+ },
+ "dependencies": {
+ "node-downloader-helper": {
+ "version": "1.0.18",
+ "resolved": "https://registry.npmjs.org/node-downloader-helper/-/node-downloader-helper-1.0.18.tgz",
+ "integrity": "sha512-C7hxYz/yg4d8DFVC6c4fMIOI7jywbpQHOznkax/74F8NcC8wSOLO+UxNMcwds/5wEL8W+RPXT9C389w3bDOMxw==",
+ "dev": true
+ },
+ "node-stream-zip": {
+ "version": "1.14.0",
+ "resolved": "https://registry.npmjs.org/node-stream-zip/-/node-stream-zip-1.14.0.tgz",
+ "integrity": "sha512-SKXyiBy9DBemsPHf/piHT00Y+iPK+zwru1G6+8UdOBzITnmmPMHYBMV6M1znyzyhDhUFQW0HEmbGiPqtp51M6Q==",
+ "dev": true
+ }
+ }
+ },
+ "@sitespeed.io/throttle": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/@sitespeed.io/throttle/-/throttle-3.0.0.tgz",
+ "integrity": "sha512-tTAnBaoMwtdECY6SYno/OSRnzZsazg63zesRNBxQXkpDG+1FU1FTXLJQx6/2SkKJo6WvrELp8XhoUIV9SQvlCg==",
+ "dev": true,
+ "requires": {
+ "minimist": "1.2.5"
+ }
+ },
+ "@sitespeed.io/tracium": {
+ "version": "0.3.3",
+ "resolved": "https://registry.npmjs.org/@sitespeed.io/tracium/-/tracium-0.3.3.tgz",
+ "integrity": "sha512-dNZafjM93Y+F+sfwTO5gTpsGXlnc/0Q+c2+62ViqP3gkMWvHEMSKkaEHgVJLcLg3i/g19GSIPziiKpgyne07Bw==",
+ "dev": true,
+ "requires": {
+ "debug": "^4.1.1"
+ },
+ "dependencies": {
+ "debug": {
+ "version": "4.3.3",
+ "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.3.tgz",
+ "integrity": "sha512-/zxw5+vh1Tfv+4Qn7a5nsbcJKPaSvCDhojn6FEl9vupwK2VCSDtEiEtqr8DFtzYFOdz63LBkxec7DYuc2jon6Q==",
+ "dev": true,
+ "requires": {
+ "ms": "2.1.2"
+ }
+ },
+ "ms": {
+ "version": "2.1.2",
+ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
+ "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==",
+ "dev": true
+ }
+ }
+ },
+ "@types/node": {
+ "version": "17.0.14",
+ "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.14.tgz",
+ "integrity": "sha512-SbjLmERksKOGzWzPNuW7fJM7fk3YXVTFiZWB/Hs99gwhk+/dnrQRPBQjPW9aO+fi1tAffi9PrwFvsmOKmDTyng==",
+ "dev": true
+ },
+ "ansi-regex": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz",
+ "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=",
+ "dev": true
+ },
+ "ansi-styles": {
+ "version": "2.2.1",
+ "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz",
+ "integrity": "sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4=",
+ "dev": true
+ },
+ "any-base": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/any-base/-/any-base-1.1.0.tgz",
+ "integrity": "sha512-uMgjozySS8adZZYePpaWs8cxB9/kdzmpX6SgJZ+wbz1K5eYk5QMYDVJaZKhxyIHUdnnJkfR7SVgStgH7LkGUyg==",
+ "dev": true,
+ "optional": true
+ },
+ "async": {
+ "version": "0.2.10",
+ "resolved": "https://registry.npmjs.org/async/-/async-0.2.10.tgz",
+ "integrity": "sha1-trvgsGdLnXGXCMo43owjfLUmw9E=",
+ "dev": true
+ },
+ "balanced-match": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
+ "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==",
+ "dev": true
+ },
+ "base64-js": {
+ "version": "1.5.1",
+ "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz",
+ "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==",
+ "dev": true,
+ "optional": true
+ },
+ "bluebird": {
+ "version": "2.9.34",
+ "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-2.9.34.tgz",
+ "integrity": "sha1-L3tOyAIWMoqf3evfacjUlC/v99g=",
+ "dev": true
+ },
+ "bmp-js": {
+ "version": "0.1.0",
+ "resolved": "https://registry.npmjs.org/bmp-js/-/bmp-js-0.1.0.tgz",
+ "integrity": "sha1-4Fpj95amwf8l9Hcex62twUjAcjM=",
+ "dev": true,
+ "optional": true
+ },
+ "brace-expansion": {
+ "version": "1.1.11",
+ "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
+ "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
+ "dev": true,
+ "requires": {
+ "balanced-match": "^1.0.0",
+ "concat-map": "0.0.1"
+ }
+ },
+ "browsertime": {
+ "version": "https://github.com/sitespeedio/browsertime/tarball/eae18165d9d82b9a5ad38b0bd1507a2d86a70988",
+ "integrity": "sha512-UiQ2xHLHN9ISnVRfFXmWCncLn5+Huca3ykTBYPOmnLcOyx7U9+cfMwEYMioyTbgh1IdByZd2KZ1dKLs2CtoU/Q==",
+ "dev": true,
+ "requires": {
+ "@cypress/xvfb": "1.2.4",
+ "@devicefarmer/adbkit": "2.11.3",
+ "@sitespeed.io/chromedriver": "98.0.4758-48",
+ "@sitespeed.io/edgedriver": "95.0.1020-30",
+ "@sitespeed.io/geckodriver": "0.29.1-3",
+ "@sitespeed.io/throttle": "3.0.0",
+ "@sitespeed.io/tracium": "0.3.3",
+ "btoa": "1.2.1",
+ "chrome-har": "0.12.0",
+ "chrome-remote-interface": "0.31.0",
+ "dayjs": "1.10.7",
+ "execa": "5.1.1",
+ "fast-stats": "0.0.6",
+ "find-up": "5.0.0",
+ "get-port": "5.1.1",
+ "hasbin": "1.2.3",
+ "intel": "1.2.0",
+ "jimp": "0.16.1",
+ "lodash.get": "4.4.2",
+ "lodash.groupby": "4.6.0",
+ "lodash.isempty": "4.4.0",
+ "lodash.merge": "4.6.2",
+ "lodash.pick": "4.4.0",
+ "lodash.set": "4.3.2",
+ "selenium-webdriver": "4.1.0",
+ "speedline-core": "1.4.3",
+ "yargs": "17.2.1"
+ }
+ },
+ "btoa": {
+ "version": "1.2.1",
+ "resolved": "https://registry.npmjs.org/btoa/-/btoa-1.2.1.tgz",
+ "integrity": "sha512-SB4/MIGlsiVkMcHmT+pSmIPoNDoHg+7cMzmt3Uxt628MTz2487DKSqK/fuhFBrkuqrYv5UCEnACpF4dTFNKc/g==",
+ "dev": true
+ },
+ "buffer": {
+ "version": "5.7.1",
+ "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz",
+ "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==",
+ "dev": true,
+ "optional": true,
+ "requires": {
+ "base64-js": "^1.3.1",
+ "ieee754": "^1.1.13"
+ }
+ },
+ "buffer-equal": {
+ "version": "0.0.1",
+ "resolved": "https://registry.npmjs.org/buffer-equal/-/buffer-equal-0.0.1.tgz",
+ "integrity": "sha1-kbx0sR6kBbyRa8aqkI+q+ltKrEs=",
+ "dev": true,
+ "optional": true
+ },
+ "chalk": {
+ "version": "1.1.3",
+ "resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz",
+ "integrity": "sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg=",
+ "dev": true,
+ "requires": {
+ "ansi-styles": "^2.2.1",
+ "escape-string-regexp": "^1.0.2",
+ "has-ansi": "^2.0.0",
+ "strip-ansi": "^3.0.0",
+ "supports-color": "^2.0.0"
+ }
+ },
+ "chownr": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz",
+ "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==",
+ "dev": true
+ },
+ "chrome-har": {
+ "version": "0.12.0",
+ "resolved": "https://registry.npmjs.org/chrome-har/-/chrome-har-0.12.0.tgz",
+ "integrity": "sha512-VRQOsN9omU6q5/8h6eU9tkHPV2VvOCAh1JL4Hpk8ZIyrTLFWdK0A7UOsKNplvr+9Ls/8Wr71G20cuX2OsRPbwA==",
+ "dev": true,
+ "requires": {
+ "dayjs": "1.8.31",
+ "debug": "4.1.1",
+ "tough-cookie": "4.0.0",
+ "uuid": "8.0.0"
+ },
+ "dependencies": {
+ "dayjs": {
+ "version": "1.8.31",
+ "resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.8.31.tgz",
+ "integrity": "sha512-mPh1mslned+5PuIuiUfbw4CikHk6AEAf2Baxih+wP5fssv+wmlVhvgZ7mq+BhLt7Sr/Hc8leWDiwe6YnrpNt3g==",
+ "dev": true
+ },
+ "debug": {
+ "version": "4.1.1",
+ "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz",
+ "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==",
+ "dev": true,
+ "requires": {
+ "ms": "^2.1.1"
+ }
+ }
+ }
+ },
+ "chrome-remote-interface": {
+ "version": "0.31.0",
+ "resolved": "https://registry.npmjs.org/chrome-remote-interface/-/chrome-remote-interface-0.31.0.tgz",
+ "integrity": "sha512-DrD4ZACKAFT3lVldKVDRlYrI9bmZSk7kYcf+OKwFpBM9fZyCPvVKb+yGnmXBkHv7/BEkW8ouu+EHRugAOJ3pPg==",
+ "dev": true,
+ "requires": {
+ "commander": "2.11.x",
+ "ws": "^7.2.0"
+ },
+ "dependencies": {
+ "commander": {
+ "version": "2.11.0",
+ "resolved": "https://registry.npmjs.org/commander/-/commander-2.11.0.tgz",
+ "integrity": "sha512-b0553uYA5YAEGgyYIGYROzKQ7X5RAqedkfjiZxwi0kL1g3bOaBNNZfYkzt/CL0umgD5wc9Jec2FbB98CjkMRvQ==",
+ "dev": true
+ }
+ }
+ },
+ "cliui": {
+ "version": "7.0.4",
+ "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz",
+ "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==",
+ "dev": true,
+ "requires": {
+ "string-width": "^4.2.0",
+ "strip-ansi": "^6.0.0",
+ "wrap-ansi": "^7.0.0"
+ },
+ "dependencies": {
+ "ansi-regex": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
+ "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
+ "dev": true
+ },
+ "strip-ansi": {
+ "version": "6.0.1",
+ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
+ "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
+ "dev": true,
+ "requires": {
+ "ansi-regex": "^5.0.1"
+ }
+ }
+ }
+ },
+ "color-convert": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
+ "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
+ "dev": true,
+ "requires": {
+ "color-name": "~1.1.4"
+ }
+ },
+ "color-name": {
+ "version": "1.1.4",
+ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
+ "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
+ "dev": true
+ },
+ "commander": {
+ "version": "2.20.3",
+ "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz",
+ "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==",
+ "dev": true
+ },
+ "concat-map": {
+ "version": "0.0.1",
+ "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
+ "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=",
+ "dev": true
+ },
+ "core-util-is": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz",
+ "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==",
+ "dev": true
+ },
+ "cross-spawn": {
+ "version": "7.0.3",
+ "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz",
+ "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==",
+ "dev": true,
+ "requires": {
+ "path-key": "^3.1.0",
+ "shebang-command": "^2.0.0",
+ "which": "^2.0.1"
+ }
+ },
+ "dayjs": {
+ "version": "1.10.7",
+ "resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.10.7.tgz",
+ "integrity": "sha512-P6twpd70BcPK34K26uJ1KT3wlhpuOAPoMwJzpsIWUxHZ7wpmbdZL/hQqBDfz7hGurYSa5PhzdhDHtt319hL3ig==",
+ "dev": true
+ },
+ "dbug": {
+ "version": "0.4.2",
+ "resolved": "https://registry.npmjs.org/dbug/-/dbug-0.4.2.tgz",
+ "integrity": "sha1-MrSzEF6IYQQ6b5rHVdgOVC02WzE=",
+ "dev": true
+ },
+ "debug": {
+ "version": "3.2.7",
+ "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz",
+ "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==",
+ "dev": true,
+ "requires": {
+ "ms": "^2.1.1"
+ }
+ },
+ "dom-walk": {
+ "version": "0.1.2",
+ "resolved": "https://registry.npmjs.org/dom-walk/-/dom-walk-0.1.2.tgz",
+ "integrity": "sha512-6QvTW9mrGeIegrFXdtQi9pk7O/nSK6lSdXW2eqUspN5LWD7UTji2Fqw5V2YLjBpHEoU9Xl/eUWNpDeZvoyOv2w==",
+ "dev": true,
+ "optional": true
+ },
+ "emoji-regex": {
+ "version": "8.0.0",
+ "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
+ "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==",
+ "dev": true
+ },
+ "escalade": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz",
+ "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==",
+ "dev": true
+ },
+ "escape-string-regexp": {
+ "version": "1.0.5",
+ "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz",
+ "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=",
+ "dev": true
+ },
+ "execa": {
+ "version": "5.1.1",
+ "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz",
+ "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==",
+ "dev": true,
+ "requires": {
+ "cross-spawn": "^7.0.3",
+ "get-stream": "^6.0.0",
+ "human-signals": "^2.1.0",
+ "is-stream": "^2.0.0",
+ "merge-stream": "^2.0.0",
+ "npm-run-path": "^4.0.1",
+ "onetime": "^5.1.2",
+ "signal-exit": "^3.0.3",
+ "strip-final-newline": "^2.0.0"
+ }
+ },
+ "exif-parser": {
+ "version": "0.1.12",
+ "resolved": "https://registry.npmjs.org/exif-parser/-/exif-parser-0.1.12.tgz",
+ "integrity": "sha1-WKnS1ywCwfbwKg70qRZicrd2CSI=",
+ "dev": true,
+ "optional": true
+ },
+ "fast-stats": {
+ "version": "0.0.6",
+ "resolved": "https://registry.npmjs.org/fast-stats/-/fast-stats-0.0.6.tgz",
+ "integrity": "sha512-m0zkwa7Z07Wc4xm1YtcrCHmhzNxiYRrrfUyhkdhSZPzaAH/Ewbocdaq7EPVBFz19GWfIyyPcLfRHjHJYe83jlg==",
+ "dev": true
+ },
+ "file-type": {
+ "version": "9.0.0",
+ "resolved": "https://registry.npmjs.org/file-type/-/file-type-9.0.0.tgz",
+ "integrity": "sha512-Qe/5NJrgIOlwijpq3B7BEpzPFcgzggOTagZmkXQY4LA6bsXKTUstK7Wp12lEJ/mLKTpvIZxmIuRcLYWT6ov9lw==",
+ "dev": true,
+ "optional": true
+ },
+ "find-up": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz",
+ "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==",
+ "dev": true,
+ "requires": {
+ "locate-path": "^6.0.0",
+ "path-exists": "^4.0.0"
+ }
+ },
+ "fs-minipass": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz",
+ "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==",
+ "dev": true,
+ "requires": {
+ "minipass": "^3.0.0"
+ }
+ },
+ "fs.realpath": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
+ "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=",
+ "dev": true
+ },
+ "get-caller-file": {
+ "version": "2.0.5",
+ "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz",
+ "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==",
+ "dev": true
+ },
+ "get-port": {
+ "version": "5.1.1",
+ "resolved": "https://registry.npmjs.org/get-port/-/get-port-5.1.1.tgz",
+ "integrity": "sha512-g/Q1aTSDOxFpchXC4i8ZWvxA1lnPqx/JHqcpIw0/LX9T8x/GBbi6YnlN5nhaKIFkT8oFsscUKgDJYxfwfS6QsQ==",
+ "dev": true
+ },
+ "get-stream": {
+ "version": "6.0.1",
+ "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz",
+ "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==",
+ "dev": true
+ },
+ "gifwrap": {
+ "version": "0.9.2",
+ "resolved": "https://registry.npmjs.org/gifwrap/-/gifwrap-0.9.2.tgz",
+ "integrity": "sha512-fcIswrPaiCDAyO8xnWvHSZdWChjKXUanKKpAiWWJ/UTkEi/aYKn5+90e7DE820zbEaVR9CE2y4z9bzhQijZ0BA==",
+ "dev": true,
+ "optional": true,
+ "requires": {
+ "image-q": "^1.1.1",
+ "omggif": "^1.0.10"
+ }
+ },
+ "glob": {
+ "version": "7.2.0",
+ "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz",
+ "integrity": "sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==",
+ "dev": true,
+ "requires": {
+ "fs.realpath": "^1.0.0",
+ "inflight": "^1.0.4",
+ "inherits": "2",
+ "minimatch": "^3.0.4",
+ "once": "^1.3.0",
+ "path-is-absolute": "^1.0.0"
+ }
+ },
+ "global": {
+ "version": "4.4.0",
+ "resolved": "https://registry.npmjs.org/global/-/global-4.4.0.tgz",
+ "integrity": "sha512-wv/LAoHdRE3BeTGz53FAamhGlPLhlssK45usmGFThIi4XqnBmjKQ16u+RNbP7WvigRZDxUsM0J3gcQ5yicaL0w==",
+ "dev": true,
+ "optional": true,
+ "requires": {
+ "min-document": "^2.19.0",
+ "process": "^0.11.10"
+ }
+ },
+ "has-ansi": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz",
+ "integrity": "sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE=",
+ "dev": true,
+ "requires": {
+ "ansi-regex": "^2.0.0"
+ }
+ },
+ "hasbin": {
+ "version": "1.2.3",
+ "resolved": "https://registry.npmjs.org/hasbin/-/hasbin-1.2.3.tgz",
+ "integrity": "sha1-eMWSaJPIAhXCtWiuH9P8q3omlrA=",
+ "dev": true,
+ "requires": {
+ "async": "~1.5"
+ },
+ "dependencies": {
+ "async": {
+ "version": "1.5.2",
+ "resolved": "https://registry.npmjs.org/async/-/async-1.5.2.tgz",
+ "integrity": "sha1-7GphrlZIDAw8skHJVhjiCJL5Zyo=",
+ "dev": true
+ }
+ }
+ },
+ "human-signals": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz",
+ "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==",
+ "dev": true
+ },
+ "ieee754": {
+ "version": "1.2.1",
+ "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz",
+ "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==",
+ "dev": true,
+ "optional": true
+ },
+ "image-q": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/image-q/-/image-q-1.1.1.tgz",
+ "integrity": "sha1-/IQJlmRGC5DKhi2TALa/u7+/gFY=",
+ "dev": true,
+ "optional": true
+ },
+ "image-ssim": {
+ "version": "0.2.0",
+ "resolved": "https://registry.npmjs.org/image-ssim/-/image-ssim-0.2.0.tgz",
+ "integrity": "sha1-g7Qsei5uS4VQVHf+aRf128VkIOU=",
+ "dev": true
+ },
+ "immediate": {
+ "version": "3.0.6",
+ "resolved": "https://registry.npmjs.org/immediate/-/immediate-3.0.6.tgz",
+ "integrity": "sha1-nbHb0Pr43m++D13V5Wu2BigN5ps=",
+ "dev": true
+ },
+ "inflight": {
+ "version": "1.0.6",
+ "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
+ "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=",
+ "dev": true,
+ "requires": {
+ "once": "^1.3.0",
+ "wrappy": "1"
+ }
+ },
+ "inherits": {
+ "version": "2.0.4",
+ "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
+ "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==",
+ "dev": true
+ },
+ "intel": {
+ "version": "1.2.0",
+ "resolved": "https://registry.npmjs.org/intel/-/intel-1.2.0.tgz",
+ "integrity": "sha1-EdEUfraz9Fgr31M3s31UFYTp5B4=",
+ "dev": true,
+ "requires": {
+ "chalk": "^1.1.0",
+ "dbug": "~0.4.2",
+ "stack-trace": "~0.0.9",
+ "strftime": "~0.10.0",
+ "symbol": "~0.3.1",
+ "utcstring": "~0.1.0"
+ }
+ },
+ "is-fullwidth-code-point": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
+ "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==",
+ "dev": true
+ },
+ "is-function": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/is-function/-/is-function-1.0.2.tgz",
+ "integrity": "sha512-lw7DUp0aWXYg+CBCN+JKkcE0Q2RayZnSvnZBlwgxHBQhqt5pZNVy4Ri7H9GmmXkdu7LUthszM+Tor1u/2iBcpQ==",
+ "dev": true,
+ "optional": true
+ },
+ "is-stream": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz",
+ "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==",
+ "dev": true
+ },
+ "isarray": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz",
+ "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=",
+ "dev": true
+ },
+ "isexe": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
+ "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=",
+ "dev": true
+ },
+ "jimp": {
+ "version": "0.16.1",
+ "resolved": "https://registry.npmjs.org/jimp/-/jimp-0.16.1.tgz",
+ "integrity": "sha512-+EKVxbR36Td7Hfd23wKGIeEyHbxShZDX6L8uJkgVW3ESA9GiTEPK08tG1XI2r/0w5Ch0HyJF5kPqF9K7EmGjaw==",
+ "dev": true,
+ "optional": true,
+ "requires": {
+ "@babel/runtime": "^7.7.2",
+ "@jimp/custom": "^0.16.1",
+ "@jimp/plugins": "^0.16.1",
+ "@jimp/types": "^0.16.1",
+ "regenerator-runtime": "^0.13.3"
+ }
+ },
+ "jpeg-js": {
+ "version": "0.4.2",
+ "resolved": "https://registry.npmjs.org/jpeg-js/-/jpeg-js-0.4.2.tgz",
+ "integrity": "sha512-+az2gi/hvex7eLTMTlbRLOhH6P6WFdk2ITI8HJsaH2VqYO0I594zXSYEP+tf4FW+8Cy68ScDXoAsQdyQanv3sw==",
+ "dev": true
+ },
+ "jszip": {
+ "version": "3.7.1",
+ "resolved": "https://registry.npmjs.org/jszip/-/jszip-3.7.1.tgz",
+ "integrity": "sha512-ghL0tz1XG9ZEmRMcEN2vt7xabrDdqHHeykgARpmZ0BiIctWxM47Vt63ZO2dnp4QYt/xJVLLy5Zv1l/xRdh2byg==",
+ "dev": true,
+ "requires": {
+ "lie": "~3.3.0",
+ "pako": "~1.0.2",
+ "readable-stream": "~2.3.6",
+ "set-immediate-shim": "~1.0.1"
+ }
+ },
+ "lie": {
+ "version": "3.3.0",
+ "resolved": "https://registry.npmjs.org/lie/-/lie-3.3.0.tgz",
+ "integrity": "sha512-UaiMJzeWRlEujzAuw5LokY1L5ecNQYZKfmyZ9L7wDHb/p5etKaxXhohBcrw0EYby+G/NA52vRSN4N39dxHAIwQ==",
+ "dev": true,
+ "requires": {
+ "immediate": "~3.0.5"
+ }
+ },
+ "load-bmfont": {
+ "version": "1.4.1",
+ "resolved": "https://registry.npmjs.org/load-bmfont/-/load-bmfont-1.4.1.tgz",
+ "integrity": "sha512-8UyQoYmdRDy81Brz6aLAUhfZLwr5zV0L3taTQ4hju7m6biuwiWiJXjPhBJxbUQJA8PrkvJ/7Enqmwk2sM14soA==",
+ "dev": true,
+ "optional": true,
+ "requires": {
+ "buffer-equal": "0.0.1",
+ "mime": "^1.3.4",
+ "parse-bmfont-ascii": "^1.0.3",
+ "parse-bmfont-binary": "^1.0.5",
+ "parse-bmfont-xml": "^1.1.4",
+ "phin": "^2.9.1",
+ "xhr": "^2.0.1",
+ "xtend": "^4.0.0"
+ }
+ },
+ "locate-path": {
+ "version": "6.0.0",
+ "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz",
+ "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==",
+ "dev": true,
+ "requires": {
+ "p-locate": "^5.0.0"
+ }
+ },
+ "lodash.get": {
+ "version": "4.4.2",
+ "resolved": "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz",
+ "integrity": "sha1-LRd/ZS+jHpObRDjVNBSZ36OCXpk=",
+ "dev": true
+ },
+ "lodash.groupby": {
+ "version": "4.6.0",
+ "resolved": "https://registry.npmjs.org/lodash.groupby/-/lodash.groupby-4.6.0.tgz",
+ "integrity": "sha1-Cwih3PaDl8OXhVwyOXg4Mt90A9E=",
+ "dev": true
+ },
+ "lodash.isempty": {
+ "version": "4.4.0",
+ "resolved": "https://registry.npmjs.org/lodash.isempty/-/lodash.isempty-4.4.0.tgz",
+ "integrity": "sha1-b4bL7di+TsmHvpqvM8loTbGzHn4=",
+ "dev": true
+ },
+ "lodash.merge": {
+ "version": "4.6.2",
+ "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz",
+ "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==",
+ "dev": true
+ },
+ "lodash.once": {
+ "version": "4.1.1",
+ "resolved": "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz",
+ "integrity": "sha1-DdOXEhPHxW34gJd9UEyI+0cal6w=",
+ "dev": true
+ },
+ "lodash.pick": {
+ "version": "4.4.0",
+ "resolved": "https://registry.npmjs.org/lodash.pick/-/lodash.pick-4.4.0.tgz",
+ "integrity": "sha1-UvBWEP/53tQiYRRB7R/BI6AwAbM=",
+ "dev": true
+ },
+ "lodash.set": {
+ "version": "4.3.2",
+ "resolved": "https://registry.npmjs.org/lodash.set/-/lodash.set-4.3.2.tgz",
+ "integrity": "sha1-2HV7HagH3eJIFrDWqEvqGnYjCyM=",
+ "dev": true
+ },
+ "merge-stream": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz",
+ "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==",
+ "dev": true
+ },
+ "mime": {
+ "version": "1.6.0",
+ "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz",
+ "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==",
+ "dev": true,
+ "optional": true
+ },
+ "mimic-fn": {
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz",
+ "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==",
+ "dev": true
+ },
+ "min-document": {
+ "version": "2.19.0",
+ "resolved": "https://registry.npmjs.org/min-document/-/min-document-2.19.0.tgz",
+ "integrity": "sha1-e9KC4/WELtKVu3SM3Z8f+iyCRoU=",
+ "dev": true,
+ "optional": true,
+ "requires": {
+ "dom-walk": "^0.1.0"
+ }
+ },
+ "minimatch": {
+ "version": "3.0.4",
+ "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
+ "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
+ "dev": true,
+ "requires": {
+ "brace-expansion": "^1.1.7"
+ }
+ },
+ "minimist": {
+ "version": "1.2.5",
+ "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz",
+ "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==",
+ "dev": true
+ },
+ "minipass": {
+ "version": "3.1.6",
+ "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.6.tgz",
+ "integrity": "sha512-rty5kpw9/z8SX9dmxblFA6edItUmwJgMeYDZRrwlIVN27i8gysGbznJwUggw2V/FVqFSDdWy040ZPS811DYAqQ==",
+ "dev": true,
+ "requires": {
+ "yallist": "^4.0.0"
+ }
+ },
+ "minizlib": {
+ "version": "2.1.2",
+ "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz",
+ "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==",
+ "dev": true,
+ "requires": {
+ "minipass": "^3.0.0",
+ "yallist": "^4.0.0"
+ }
+ },
+ "mkdirp": {
+ "version": "1.0.4",
+ "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz",
+ "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==",
+ "dev": true
+ },
+ "ms": {
+ "version": "2.1.3",
+ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
+ "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
+ "dev": true
+ },
+ "node-downloader-helper": {
+ "version": "1.0.19",
+ "resolved": "https://registry.npmjs.org/node-downloader-helper/-/node-downloader-helper-1.0.19.tgz",
+ "integrity": "sha512-Bwp8WWDDP5ftg+FmAKU08a9+oiUTPoYzMvXgUqZZPQ7VMo1qKBzW3XdTXHeYnqjGLfkTZ2GPibgAWpApfpeS2g==",
+ "dev": true
+ },
+ "node-forge": {
+ "version": "0.10.0",
+ "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.10.0.tgz",
+ "integrity": "sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA==",
+ "dev": true
+ },
+ "node-stream-zip": {
+ "version": "1.15.0",
+ "resolved": "https://registry.npmjs.org/node-stream-zip/-/node-stream-zip-1.15.0.tgz",
+ "integrity": "sha512-LN4fydt9TqhZhThkZIVQnF9cwjU3qmUH9h78Mx/K7d3VvfRqqwthLwJEUOEL0QPZ0XQmNN7be5Ggit5+4dq3Bw==",
+ "dev": true
+ },
+ "npm-run-path": {
+ "version": "4.0.1",
+ "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz",
+ "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==",
+ "dev": true,
+ "requires": {
+ "path-key": "^3.0.0"
+ }
+ },
+ "omggif": {
+ "version": "1.0.10",
+ "resolved": "https://registry.npmjs.org/omggif/-/omggif-1.0.10.tgz",
+ "integrity": "sha512-LMJTtvgc/nugXj0Vcrrs68Mn2D1r0zf630VNtqtpI1FEO7e+O9FP4gqs9AcnBaSEeoHIPm28u6qgPR0oyEpGSw==",
+ "dev": true,
+ "optional": true
+ },
+ "once": {
+ "version": "1.4.0",
+ "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
+ "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=",
+ "dev": true,
+ "requires": {
+ "wrappy": "1"
+ }
+ },
+ "onetime": {
+ "version": "5.1.2",
+ "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz",
+ "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==",
+ "dev": true,
+ "requires": {
+ "mimic-fn": "^2.1.0"
+ }
+ },
+ "p-limit": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz",
+ "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==",
+ "dev": true,
+ "requires": {
+ "yocto-queue": "^0.1.0"
+ }
+ },
+ "p-locate": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz",
+ "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==",
+ "dev": true,
+ "requires": {
+ "p-limit": "^3.0.2"
+ }
+ },
+ "pako": {
+ "version": "1.0.11",
+ "resolved": "https://registry.npmjs.org/pako/-/pako-1.0.11.tgz",
+ "integrity": "sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw==",
+ "dev": true
+ },
+ "parse-bmfont-ascii": {
+ "version": "1.0.6",
+ "resolved": "https://registry.npmjs.org/parse-bmfont-ascii/-/parse-bmfont-ascii-1.0.6.tgz",
+ "integrity": "sha1-Eaw8P/WPfCAgqyJ2kHkQjU36AoU=",
+ "dev": true,
+ "optional": true
+ },
+ "parse-bmfont-binary": {
+ "version": "1.0.6",
+ "resolved": "https://registry.npmjs.org/parse-bmfont-binary/-/parse-bmfont-binary-1.0.6.tgz",
+ "integrity": "sha1-0Di0dtPp3Z2x4RoLDlOiJ5K2kAY=",
+ "dev": true,
+ "optional": true
+ },
+ "parse-bmfont-xml": {
+ "version": "1.1.4",
+ "resolved": "https://registry.npmjs.org/parse-bmfont-xml/-/parse-bmfont-xml-1.1.4.tgz",
+ "integrity": "sha512-bjnliEOmGv3y1aMEfREMBJ9tfL3WR0i0CKPj61DnSLaoxWR3nLrsQrEbCId/8rF4NyRF0cCqisSVXyQYWM+mCQ==",
+ "dev": true,
+ "optional": true,
+ "requires": {
+ "xml-parse-from-string": "^1.0.0",
+ "xml2js": "^0.4.5"
+ }
+ },
+ "parse-headers": {
+ "version": "2.0.4",
+ "resolved": "https://registry.npmjs.org/parse-headers/-/parse-headers-2.0.4.tgz",
+ "integrity": "sha512-psZ9iZoCNFLrgRjZ1d8mn0h9WRqJwFxM9q3x7iUjN/YT2OksthDJ5TiPCu2F38kS4zutqfW+YdVVkBZZx3/1aw==",
+ "dev": true,
+ "optional": true
+ },
+ "path-exists": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz",
+ "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==",
+ "dev": true
+ },
+ "path-is-absolute": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
+ "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=",
+ "dev": true
+ },
+ "path-key": {
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz",
+ "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==",
+ "dev": true
+ },
+ "phin": {
+ "version": "2.9.3",
+ "resolved": "https://registry.npmjs.org/phin/-/phin-2.9.3.tgz",
+ "integrity": "sha512-CzFr90qM24ju5f88quFC/6qohjC144rehe5n6DH900lgXmUe86+xCKc10ev56gRKC4/BkHUoG4uSiQgBiIXwDA==",
+ "dev": true,
+ "optional": true
+ },
+ "pixelmatch": {
+ "version": "4.0.2",
+ "resolved": "https://registry.npmjs.org/pixelmatch/-/pixelmatch-4.0.2.tgz",
+ "integrity": "sha1-j0fc7FARtHe2fbA8JDvB8wheiFQ=",
+ "dev": true,
+ "optional": true,
+ "requires": {
+ "pngjs": "^3.0.0"
+ }
+ },
+ "pngjs": {
+ "version": "3.4.0",
+ "resolved": "https://registry.npmjs.org/pngjs/-/pngjs-3.4.0.tgz",
+ "integrity": "sha512-NCrCHhWmnQklfH4MtJMRjZ2a8c80qXeMlQMv2uVp9ISJMTt562SbGd6n2oq0PaPgKm7Z6pL9E2UlLIhC+SHL3w==",
+ "dev": true,
+ "optional": true
+ },
+ "process": {
+ "version": "0.11.10",
+ "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz",
+ "integrity": "sha1-czIwDoQBYb2j5podHZGn1LwW8YI=",
+ "dev": true,
+ "optional": true
+ },
+ "process-nextick-args": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz",
+ "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==",
+ "dev": true
+ },
+ "psl": {
+ "version": "1.8.0",
+ "resolved": "https://registry.npmjs.org/psl/-/psl-1.8.0.tgz",
+ "integrity": "sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ==",
+ "dev": true
+ },
+ "punycode": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz",
+ "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==",
+ "dev": true
+ },
+ "readable-stream": {
+ "version": "2.3.7",
+ "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz",
+ "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==",
+ "dev": true,
+ "requires": {
+ "core-util-is": "~1.0.0",
+ "inherits": "~2.0.3",
+ "isarray": "~1.0.0",
+ "process-nextick-args": "~2.0.0",
+ "safe-buffer": "~5.1.1",
+ "string_decoder": "~1.1.1",
+ "util-deprecate": "~1.0.1"
+ }
+ },
+ "regenerator-runtime": {
+ "version": "0.13.9",
+ "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz",
+ "integrity": "sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA==",
+ "dev": true,
+ "optional": true
+ },
+ "require-directory": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz",
+ "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=",
+ "dev": true
+ },
+ "rimraf": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz",
+ "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==",
+ "dev": true,
+ "requires": {
+ "glob": "^7.1.3"
+ }
+ },
+ "safe-buffer": {
+ "version": "5.1.2",
+ "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz",
+ "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==",
+ "dev": true
+ },
+ "sax": {
+ "version": "1.2.4",
+ "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz",
+ "integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==",
+ "dev": true,
+ "optional": true
+ },
+ "selenium-webdriver": {
+ "version": "4.1.0",
+ "resolved": "https://registry.npmjs.org/selenium-webdriver/-/selenium-webdriver-4.1.0.tgz",
+ "integrity": "sha512-kUDH4N8WruYprTzvug4Pl73Th+WKb5YiLz8z/anOpHyUNUdM3UzrdTOxmSNaf9AczzBeY+qXihzku8D1lMaKOg==",
+ "dev": true,
+ "requires": {
+ "jszip": "^3.6.0",
+ "tmp": "^0.2.1",
+ "ws": ">=7.4.6"
+ }
+ },
+ "set-immediate-shim": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/set-immediate-shim/-/set-immediate-shim-1.0.1.tgz",
+ "integrity": "sha1-SysbJ+uAip+NzEgaWOXlb1mfP2E=",
+ "dev": true
+ },
+ "shebang-command": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz",
+ "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==",
+ "dev": true,
+ "requires": {
+ "shebang-regex": "^3.0.0"
+ }
+ },
+ "shebang-regex": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz",
+ "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==",
+ "dev": true
+ },
+ "signal-exit": {
+ "version": "3.0.6",
+ "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.6.tgz",
+ "integrity": "sha512-sDl4qMFpijcGw22U5w63KmD3cZJfBuFlVNbVMKje2keoKML7X2UzWbc4XrmEbDwg0NXJc3yv4/ox7b+JWb57kQ==",
+ "dev": true
+ },
+ "speedline-core": {
+ "version": "1.4.3",
+ "resolved": "https://registry.npmjs.org/speedline-core/-/speedline-core-1.4.3.tgz",
+ "integrity": "sha512-DI7/OuAUD+GMpR6dmu8lliO2Wg5zfeh+/xsdyJZCzd8o5JgFUjCeLsBDuZjIQJdwXS3J0L/uZYrELKYqx+PXog==",
+ "dev": true,
+ "requires": {
+ "@types/node": "*",
+ "image-ssim": "^0.2.0",
+ "jpeg-js": "^0.4.1"
+ }
+ },
+ "split": {
+ "version": "0.3.3",
+ "resolved": "https://registry.npmjs.org/split/-/split-0.3.3.tgz",
+ "integrity": "sha1-zQ7qXmOiEd//frDwkcQTPi0N0o8=",
+ "dev": true,
+ "requires": {
+ "through": "2"
+ }
+ },
+ "stack-trace": {
+ "version": "0.0.10",
+ "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.10.tgz",
+ "integrity": "sha1-VHxws0fo0ytOEI6hoqFZ5f3eGcA=",
+ "dev": true
+ },
+ "strftime": {
+ "version": "0.10.1",
+ "resolved": "https://registry.npmjs.org/strftime/-/strftime-0.10.1.tgz",
+ "integrity": "sha512-nVvH6JG8KlXFPC0f8lojLgEsPA18lRpLZ+RrJh/NkQV2tqOgZfbas8gcU8SFgnnqR3rWzZPYu6N2A3xzs/8rQg==",
+ "dev": true
+ },
+ "string-width": {
+ "version": "4.2.3",
+ "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
+ "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
+ "dev": true,
+ "requires": {
+ "emoji-regex": "^8.0.0",
+ "is-fullwidth-code-point": "^3.0.0",
+ "strip-ansi": "^6.0.1"
+ },
+ "dependencies": {
+ "ansi-regex": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
+ "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
+ "dev": true
+ },
+ "strip-ansi": {
+ "version": "6.0.1",
+ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
+ "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
+ "dev": true,
+ "requires": {
+ "ansi-regex": "^5.0.1"
+ }
+ }
+ }
+ },
+ "string_decoder": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz",
+ "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==",
+ "dev": true,
+ "requires": {
+ "safe-buffer": "~5.1.0"
+ }
+ },
+ "strip-ansi": {
+ "version": "3.0.1",
+ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz",
+ "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=",
+ "dev": true,
+ "requires": {
+ "ansi-regex": "^2.0.0"
+ }
+ },
+ "strip-final-newline": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz",
+ "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==",
+ "dev": true
+ },
+ "supports-color": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz",
+ "integrity": "sha1-U10EXOa2Nj+kARcIRimZXp3zJMc=",
+ "dev": true
+ },
+ "symbol": {
+ "version": "0.3.1",
+ "resolved": "https://registry.npmjs.org/symbol/-/symbol-0.3.1.tgz",
+ "integrity": "sha1-tvmpANSWpX8CQI8iGYwQndoGMEE=",
+ "dev": true
+ },
+ "tar": {
+ "version": "6.1.11",
+ "resolved": "https://registry.npmjs.org/tar/-/tar-6.1.11.tgz",
+ "integrity": "sha512-an/KZQzQUkZCkuoAA64hM92X0Urb6VpRhAFllDzz44U2mcD5scmT3zBc4VgVpkugF580+DQn8eAFSyoQt0tznA==",
+ "dev": true,
+ "requires": {
+ "chownr": "^2.0.0",
+ "fs-minipass": "^2.0.0",
+ "minipass": "^3.0.0",
+ "minizlib": "^2.1.1",
+ "mkdirp": "^1.0.3",
+ "yallist": "^4.0.0"
+ }
+ },
+ "through": {
+ "version": "2.3.8",
+ "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz",
+ "integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=",
+ "dev": true
+ },
+ "timm": {
+ "version": "1.7.1",
+ "resolved": "https://registry.npmjs.org/timm/-/timm-1.7.1.tgz",
+ "integrity": "sha512-IjZc9KIotudix8bMaBW6QvMuq64BrJWFs1+4V0lXwWGQZwH+LnX87doAYhem4caOEusRP9/g6jVDQmZ8XOk1nw==",
+ "dev": true,
+ "optional": true
+ },
+ "tinycolor2": {
+ "version": "1.4.2",
+ "resolved": "https://registry.npmjs.org/tinycolor2/-/tinycolor2-1.4.2.tgz",
+ "integrity": "sha512-vJhccZPs965sV/L2sU4oRQVAos0pQXwsvTLkWYdqJ+a8Q5kPFzJTuOFwy7UniPli44NKQGAglksjvOcpo95aZA==",
+ "dev": true,
+ "optional": true
+ },
+ "tmp": {
+ "version": "0.2.1",
+ "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.1.tgz",
+ "integrity": "sha512-76SUhtfqR2Ijn+xllcI5P1oyannHNHByD80W1q447gU3mp9G9PSpGdWmjUOHRDPiHYacIk66W7ubDTuPF3BEtQ==",
+ "dev": true,
+ "requires": {
+ "rimraf": "^3.0.0"
+ }
+ },
+ "tough-cookie": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.0.0.tgz",
+ "integrity": "sha512-tHdtEpQCMrc1YLrMaqXXcj6AxhYi/xgit6mZu1+EDWUn+qhUf8wMQoFIy9NXuq23zAwtcB0t/MjACGR18pcRbg==",
+ "dev": true,
+ "requires": {
+ "psl": "^1.1.33",
+ "punycode": "^2.1.1",
+ "universalify": "^0.1.2"
+ }
+ },
+ "universalify": {
+ "version": "0.1.2",
+ "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz",
+ "integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==",
+ "dev": true
+ },
+ "utcstring": {
+ "version": "0.1.0",
+ "resolved": "https://registry.npmjs.org/utcstring/-/utcstring-0.1.0.tgz",
+ "integrity": "sha1-Qw/VEKt/yVtdWRDJAteYgMIIQ2s=",
+ "dev": true
+ },
+ "utif": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/utif/-/utif-2.0.1.tgz",
+ "integrity": "sha512-Z/S1fNKCicQTf375lIP9G8Sa1H/phcysstNrrSdZKj1f9g58J4NMgb5IgiEZN9/nLMPDwF0W7hdOe9Qq2IYoLg==",
+ "dev": true,
+ "optional": true,
+ "requires": {
+ "pako": "^1.0.5"
+ }
+ },
+ "util-deprecate": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
+ "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=",
+ "dev": true
+ },
+ "uuid": {
+ "version": "8.0.0",
+ "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.0.0.tgz",
+ "integrity": "sha512-jOXGuXZAWdsTH7eZLtyXMqUb9EcWMGZNbL9YcGBJl4MH4nrxHmZJhEHvyLFrkxo+28uLb/NYRcStH48fnD0Vzw==",
+ "dev": true
+ },
+ "which": {
+ "version": "2.0.2",
+ "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
+ "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==",
+ "dev": true,
+ "requires": {
+ "isexe": "^2.0.0"
+ }
+ },
+ "wrap-ansi": {
+ "version": "7.0.0",
+ "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
+ "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
+ "dev": true,
+ "requires": {
+ "ansi-styles": "^4.0.0",
+ "string-width": "^4.1.0",
+ "strip-ansi": "^6.0.0"
+ },
+ "dependencies": {
+ "ansi-regex": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
+ "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
+ "dev": true
+ },
+ "ansi-styles": {
+ "version": "4.3.0",
+ "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
+ "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
+ "dev": true,
+ "requires": {
+ "color-convert": "^2.0.1"
+ }
+ },
+ "strip-ansi": {
+ "version": "6.0.1",
+ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
+ "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
+ "dev": true,
+ "requires": {
+ "ansi-regex": "^5.0.1"
+ }
+ }
+ }
+ },
+ "wrappy": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
+ "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=",
+ "dev": true
+ },
+ "ws": {
+ "version": "7.5.6",
+ "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.6.tgz",
+ "integrity": "sha512-6GLgCqo2cy2A2rjCNFlxQS6ZljG/coZfZXclldI8FB/1G3CCI36Zd8xy2HrFVACi8tfk5XrgLQEk+P0Tnz9UcA==",
+ "dev": true
+ },
+ "xhr": {
+ "version": "2.6.0",
+ "resolved": "https://registry.npmjs.org/xhr/-/xhr-2.6.0.tgz",
+ "integrity": "sha512-/eCGLb5rxjx5e3mF1A7s+pLlR6CGyqWN91fv1JgER5mVWg1MZmlhBvy9kjcsOdRk8RrIujotWyJamfyrp+WIcA==",
+ "dev": true,
+ "optional": true,
+ "requires": {
+ "global": "~4.4.0",
+ "is-function": "^1.0.1",
+ "parse-headers": "^2.0.0",
+ "xtend": "^4.0.0"
+ }
+ },
+ "xml-parse-from-string": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/xml-parse-from-string/-/xml-parse-from-string-1.0.1.tgz",
+ "integrity": "sha1-qQKekp09vN7RafPG4oI42VpdWig=",
+ "dev": true,
+ "optional": true
+ },
+ "xml2js": {
+ "version": "0.4.23",
+ "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.23.tgz",
+ "integrity": "sha512-ySPiMjM0+pLDftHgXY4By0uswI3SPKLDw/i3UXbnO8M/p28zqexCUoPmQFrYD+/1BzhGJSs2i1ERWKJAtiLrug==",
+ "dev": true,
+ "optional": true,
+ "requires": {
+ "sax": ">=0.6.0",
+ "xmlbuilder": "~11.0.0"
+ }
+ },
+ "xmlbuilder": {
+ "version": "11.0.1",
+ "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz",
+ "integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==",
+ "dev": true,
+ "optional": true
+ },
+ "xtend": {
+ "version": "4.0.2",
+ "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz",
+ "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==",
+ "dev": true,
+ "optional": true
+ },
+ "y18n": {
+ "version": "5.0.8",
+ "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz",
+ "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==",
+ "dev": true
+ },
+ "yallist": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
+ "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==",
+ "dev": true
+ },
+ "yargs": {
+ "version": "17.2.1",
+ "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.2.1.tgz",
+ "integrity": "sha512-XfR8du6ua4K6uLGm5S6fA+FIJom/MdJcFNVY8geLlp2v8GYbOXD4EB1tPNZsRn4vBzKGMgb5DRZMeWuFc2GO8Q==",
+ "dev": true,
+ "requires": {
+ "cliui": "^7.0.2",
+ "escalade": "^3.1.1",
+ "get-caller-file": "^2.0.5",
+ "require-directory": "^2.1.1",
+ "string-width": "^4.2.0",
+ "y18n": "^5.0.5",
+ "yargs-parser": "^20.2.2"
+ }
+ },
+ "yargs-parser": {
+ "version": "20.2.9",
+ "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz",
+ "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==",
+ "dev": true
+ },
+ "yocto-queue": {
+ "version": "0.1.0",
+ "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz",
+ "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==",
+ "dev": true
+ }
+ }
+}
diff --git a/python/mozperftest/mozperftest/test/browsertime/package.json b/python/mozperftest/mozperftest/test/browsertime/package.json
new file mode 100644
index 0000000000..493651df61
--- /dev/null
+++ b/python/mozperftest/mozperftest/test/browsertime/package.json
@@ -0,0 +1,12 @@
+{
+ "name": "mozilla-central-tools-browsertime",
+ "description": "This package file is for node modules used in mozilla-central/tools/browsertime",
+ "repository": {},
+ "license": "MPL-2.0",
+ "dependencies": {},
+ "devDependencies": {
+ "browsertime": "https://github.com/sitespeedio/browsertime/tarball/eae18165d9d82b9a5ad38b0bd1507a2d86a70988"
+ },
+ "notes(private)": "We don't want to publish to npm, so this is marked as private",
+ "private": true
+}
diff --git a/python/mozperftest/mozperftest/test/browsertime/runner.py b/python/mozperftest/mozperftest/test/browsertime/runner.py
new file mode 100644
index 0000000000..54a9ace44a
--- /dev/null
+++ b/python/mozperftest/mozperftest/test/browsertime/runner.py
@@ -0,0 +1,473 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import collections
+import json
+import os
+import pathlib
+import re
+import shutil
+import sys
+from pathlib import Path
+
+from mozperftest.test.browsertime.visualtools import get_dependencies, xvfb
+from mozperftest.test.noderunner import NodeRunner
+from mozperftest.utils import ON_TRY, get_output_dir, install_package
+
+BROWSERTIME_SRC_ROOT = Path(__file__).parent
+
+
+def matches(args, *flags):
+ """Returns True if any argument matches any of the given flags
+
+ Maybe with an argument.
+ """
+
+ for flag in flags:
+ if flag in args or any(arg.startswith(flag + "=") for arg in args):
+ return True
+ return False
+
+
+def extract_browser_name(args):
+ "Extracts the browser name if any"
+ # These are BT arguments, it's BT job to check them
+ # here we just want to extract the browser name
+ res = re.findall(r"(--browser|-b)[= ]([\w]+)", " ".join(args))
+ if res == []:
+ return None
+ return res[0][-1]
+
+
+class NodeException(Exception):
+ pass
+
+
+class BrowsertimeRunner(NodeRunner):
+ """Runs a browsertime test."""
+
+ name = "browsertime"
+ activated = True
+ user_exception = True
+
+ arguments = {
+ "cycles": {"type": int, "default": 1, "help": "Number of full cycles"},
+ "iterations": {"type": int, "default": 1, "help": "Number of iterations"},
+ "node": {"type": str, "default": None, "help": "Path to Node.js"},
+ "geckodriver": {"type": str, "default": None, "help": "Path to geckodriver"},
+ "binary": {
+ "type": str,
+ "default": None,
+ "help": "Path to the desktop browser, or Android app name.",
+ },
+ "clobber": {
+ "action": "store_true",
+ "default": False,
+ "help": "Force-update the installation.",
+ },
+ "install-url": {
+ "type": str,
+ "default": None,
+ "help": "Use this URL as the install url.",
+ },
+ "extra-options": {
+ "type": str,
+ "default": "",
+ "help": "Extra options passed to browsertime.js",
+ },
+ "xvfb": {"action": "store_true", "default": False, "help": "Use xvfb"},
+ "no-window-recorder": {
+ "action": "store_true",
+ "default": False,
+ "help": "Use the window recorder",
+ },
+ "viewport-size": {"type": str, "default": "1280x1024", "help": "Viewport size"},
+ "existing-results": {
+ "type": str,
+ "default": None,
+ "help": "Directory containing existing results to load.",
+ },
+ }
+
+ def __init__(self, env, mach_cmd):
+ super(BrowsertimeRunner, self).__init__(env, mach_cmd)
+ self.topsrcdir = mach_cmd.topsrcdir
+ self._mach_context = mach_cmd._mach_context
+ self.virtualenv_manager = mach_cmd.virtualenv_manager
+ self._created_dirs = []
+ self._test_script = None
+ self._setup_helper = None
+ self.get_binary_path = mach_cmd.get_binary_path
+
+ @property
+ def setup_helper(self):
+ if self._setup_helper is not None:
+ return self._setup_helper
+ sys.path.append(str(Path(self.topsrcdir, "tools", "lint", "eslint")))
+ import setup_helper
+
+ self._setup_helper = setup_helper
+ return self._setup_helper
+
+ @property
+ def artifact_cache_path(self):
+ """Downloaded artifacts will be kept here."""
+ # The convention is $MOZBUILD_STATE_PATH/cache/$FEATURE.
+ return Path(self._mach_context.state_dir, "cache", "browsertime")
+
+ @property
+ def state_path(self):
+ """Unpacked artifacts will be kept here."""
+ # The convention is $MOZBUILD_STATE_PATH/$FEATURE.
+ res = Path(self._mach_context.state_dir, "browsertime")
+ os.makedirs(str(res), exist_ok=True)
+ return res
+
+ @property
+ def browsertime_js(self):
+ root = os.environ.get("BROWSERTIME", self.state_path)
+ path = Path(root, "node_modules", "browsertime", "bin", "browsertime.js")
+ if path.exists():
+ os.environ["BROWSERTIME_JS"] = str(path)
+ return path
+
+ @property
+ def visualmetrics_py(self):
+ root = os.environ.get("BROWSERTIME", self.state_path)
+ path = Path(
+ root, "node_modules", "browsertime", "browsertime", "visualmetrics.py"
+ )
+ if path.exists():
+ os.environ["VISUALMETRICS_PY"] = str(path)
+ return path
+
+ def _get_browsertime_package(self):
+ with Path(
+ os.environ.get("BROWSERTIME", self.state_path),
+ "node_modules",
+ "browsertime",
+ "package.json",
+ ).open() as package:
+
+ return json.load(package)
+
+ def _get_browsertime_resolved(self):
+ try:
+ with Path(
+ os.environ.get("BROWSERTIME", self.state_path),
+ "node_modules",
+ ".package-lock.json",
+ ).open() as package_lock:
+ return json.load(package_lock)["packages"]["node_modules/browsertime"][
+ "resolved"
+ ]
+
+ except FileNotFoundError:
+ # Older versions of node/npm add this metadata to package.json
+ return self._get_browsertime_package().get("_from")
+
+ def _should_install(self):
+ # If browsertime doesn't exist, install it
+ if not self.visualmetrics_py.exists() or not self.browsertime_js.exists():
+ return True
+
+ # Browsertime exists, check if it's outdated
+ with Path(BROWSERTIME_SRC_ROOT, "package.json").open() as new:
+ new_pkg = json.load(new)
+
+ return not self._get_browsertime_resolved().endswith(
+ new_pkg["devDependencies"]["browsertime"]
+ )
+
+ def setup(self):
+ """Install browsertime and visualmetrics.py prerequisites and the Node.js package."""
+
+ node = self.get_arg("node")
+ if node is not None:
+ os.environ["NODEJS"] = node
+
+ super(BrowsertimeRunner, self).setup()
+ install_url = self.get_arg("install-url")
+
+ # installing Python deps on the fly
+ visualmetrics = self.get_arg("visualmetrics", False)
+
+ if visualmetrics:
+ # installing Python deps on the fly
+ for dep in get_dependencies():
+ install_package(self.virtualenv_manager, dep, ignore_failure=True)
+
+ # check if the browsertime package has been deployed correctly
+ # for this we just check for the browsertime directory presence
+ # we also make sure the visual metrics module is there *if*
+ # we need it
+ if not self._should_install() and not self.get_arg("clobber"):
+ return
+
+ # preparing ~/.mozbuild/browsertime
+ for file in ("package.json", "package-lock.json"):
+ src = BROWSERTIME_SRC_ROOT / file
+ target = self.state_path / file
+ # Overwrite the existing files
+ shutil.copyfile(str(src), str(target))
+
+ package_json_path = self.state_path / "package.json"
+
+ if install_url is not None:
+ self.info(
+ "Updating browsertime node module version in {package_json_path} "
+ "to {install_url}",
+ install_url=install_url,
+ package_json_path=str(package_json_path),
+ )
+
+ expr = r"/tarball/[a-f0-9]{40}$"
+ if not re.search(expr, install_url):
+ raise ValueError(
+ "New upstream URL does not end with {}: '{}'".format(
+ expr[:-1], install_url
+ )
+ )
+
+ with package_json_path.open() as f:
+ existing_body = json.loads(
+ f.read(), object_pairs_hook=collections.OrderedDict
+ )
+
+ existing_body["devDependencies"]["browsertime"] = install_url
+ updated_body = json.dumps(existing_body)
+ with package_json_path.open("w") as f:
+ f.write(updated_body)
+
+ self._setup_node_packages(package_json_path)
+
+ def _setup_node_packages(self, package_json_path):
+ # Install the browsertime Node.js requirements.
+ if not self.setup_helper.check_node_executables_valid():
+ return
+
+ should_clobber = self.get_arg("clobber")
+ # To use a custom `geckodriver`, set
+ # os.environ[b"GECKODRIVER_BASE_URL"] = bytes(url)
+ # to an endpoint with binaries named like
+ # https://github.com/sitespeedio/geckodriver/blob/master/install.js#L31.
+
+ if ON_TRY:
+ os.environ["CHROMEDRIVER_SKIP_DOWNLOAD"] = "true"
+ os.environ["GECKODRIVER_SKIP_DOWNLOAD"] = "true"
+
+ self.info(
+ "Installing browsertime node module from {package_json}",
+ package_json=str(package_json_path),
+ )
+ install_url = self.get_arg("install-url")
+
+ self.setup_helper.package_setup(
+ str(self.state_path),
+ "browsertime",
+ should_update=install_url is not None,
+ should_clobber=should_clobber,
+ no_optional=install_url or ON_TRY,
+ )
+
+ def extra_default_args(self, args=[]):
+ # Add Mozilla-specific default arguments. This is tricky because browsertime is quite
+ # loose about arguments; repeat arguments are generally accepted but then produce
+ # difficult to interpret type errors.
+ extra_args = []
+
+ # Default to Firefox. Override with `-b ...` or `--browser=...`.
+ if not matches(args, "-b", "--browser"):
+ extra_args.extend(("-b", "firefox"))
+
+ # Default to not collect HAR. Override with `--skipHar=false`.
+ if not matches(args, "--har", "--skipHar", "--gzipHar"):
+ extra_args.append("--skipHar")
+
+ extra_args.extend(["--viewPort", self.get_arg("viewport-size")])
+
+ if not matches(args, "--android"):
+ binary = self.get_arg("binary")
+ if binary is not None:
+ extra_args.extend(("--firefox.binaryPath", binary))
+ else:
+ # If --firefox.binaryPath is not specified, default to the objdir binary
+ # Note: --firefox.release is not a real browsertime option, but it will
+ # silently ignore it instead and default to a release installation.
+ if (
+ not matches(
+ args,
+ "--firefox.binaryPath",
+ "--firefox.release",
+ "--firefox.nightly",
+ "--firefox.beta",
+ "--firefox.developer",
+ )
+ and extract_browser_name(args) != "chrome"
+ ):
+ extra_args.extend(("--firefox.binaryPath", self.get_binary_path()))
+
+ geckodriver = self.get_arg("geckodriver")
+ if geckodriver is not None:
+ extra_args.extend(("--firefox.geckodriverPath", geckodriver))
+
+ if extra_args:
+ self.debug(
+ "Running browsertime with extra default arguments: {extra_args}",
+ extra_args=extra_args,
+ )
+
+ return extra_args
+
+ def _android_args(self, metadata):
+ app_name = self.get_arg("android-app-name")
+
+ args_list = [
+ "--android",
+ "--firefox.android.package",
+ app_name,
+ ]
+ activity = self.get_arg("android-activity")
+ if activity is not None:
+ args_list += ["--firefox.android.activity", activity]
+
+ return args_list
+
+ def _line_handler(self, line):
+ line_matcher = re.compile(r"(\[\d{4}-\d{2}-\d{2}.*\])\s+([a-zA-Z]+):\s+(.*)")
+ match = line_matcher.match(line)
+ if not match:
+ return
+
+ date, level, msg = match.groups()
+ msg = msg.replace("{", "{{").replace("}", "}}")
+ level = level.lower()
+ if "error" in level:
+ self.error("Mozperftest failed to run: {}".format(msg), msg)
+ elif "warning" in level:
+ self.warning(msg)
+ else:
+ self.info(msg)
+
+ def run(self, metadata):
+ self._test_script = metadata.script
+ self.setup()
+
+ existing = self.get_arg("browsertime-existing-results")
+ if existing:
+ metadata.add_result(
+ {"results": existing, "name": self._test_script["name"]}
+ )
+ return metadata
+
+ cycles = self.get_arg("cycles", 1)
+ for cycle in range(1, cycles + 1):
+
+ # Build an output directory
+ output = self.get_arg("output")
+ if output is None:
+ output = pathlib.Path(self.topsrcdir, "artifacts")
+ result_dir = get_output_dir(output, f"browsertime-results-{cycle}")
+
+ # Run the test cycle
+ metadata.run_hook(
+ "before_cycle", metadata, self.env, cycle, self._test_script
+ )
+ try:
+ metadata = self._one_cycle(metadata, result_dir)
+ finally:
+ metadata.run_hook(
+ "after_cycle", metadata, self.env, cycle, self._test_script
+ )
+ return metadata
+
+ def _one_cycle(self, metadata, result_dir):
+ profile = self.get_arg("profile-directory")
+ is_login_site = False
+
+ args = [
+ "--resultDir",
+ str(result_dir),
+ "--firefox.profileTemplate",
+ profile,
+ "--iterations",
+ str(self.get_arg("iterations")),
+ self._test_script["filename"],
+ ]
+
+ # Set *all* prefs found in browser_prefs because
+ # browsertime will override the ones found in firefox.profileTemplate
+ # with its own defaults at `firefoxPreferences.js`
+ # Using `--firefox.preference` ensures we override them.
+ # see https://github.com/sitespeedio/browsertime/issues/1427
+ browser_prefs = metadata.get_options("browser_prefs")
+ for key, value in browser_prefs.items():
+ args += ["--firefox.preference", f"{key}:{value}"]
+
+ if self.get_arg("verbose"):
+ args += ["-vvv"]
+
+ # if the visualmetrics layer is activated, we want to feed it
+ visualmetrics = self.get_arg("visualmetrics", False)
+ if visualmetrics:
+ args += ["--video", "true"]
+ if not self.get_arg("no-window-recorder"):
+ args += ["--firefox.windowRecorder", "true"]
+
+ extra_options = self.get_arg("extra-options")
+ if extra_options:
+ for option in extra_options.split(","):
+ option = option.strip()
+ if not option:
+ continue
+ option = option.split("=", 1)
+ if len(option) != 2:
+ self.warning(
+ f"Skipping browsertime option {option} as it "
+ "is missing a name/value pairing. We expect options "
+ "to be formatted as: --browsertime-extra-options "
+ "'browserRestartTries=1,timeouts.browserStart=10'"
+ )
+ continue
+ name, value = option
+
+ # Check if we have a login site
+ if name == "browsertime.login" and value:
+ is_login_site = True
+
+ self.info(f"Adding extra browsertime argument: --{name} {value}")
+ args += ["--" + name, value]
+
+ if self.get_arg("android"):
+ args.extend(self._android_args(metadata))
+
+ # Remove any possible verbose option if we are on Try and using logins
+ if is_login_site and ON_TRY:
+ self.info("Turning off verbose mode for login-logic")
+ self.info(
+ "Please contact the perftest team if you need verbose mode enabled."
+ )
+ for verbose_level in ("-v", "-vv", "-vvv", "-vvvv"):
+ try:
+ args.remove(verbose_level)
+ except ValueError:
+ pass
+
+ extra = self.extra_default_args(args=args)
+ command = [str(self.browsertime_js)] + extra + args
+ self.info("Running browsertime with this command %s" % " ".join(command))
+
+ if visualmetrics and self.get_arg("xvfb"):
+ with xvfb():
+ exit_code = self.node(command, self._line_handler)
+ else:
+ exit_code = self.node(command, self._line_handler)
+
+ if exit_code != 0:
+ raise NodeException(exit_code)
+
+ metadata.add_result(
+ {"results": str(result_dir), "name": self._test_script["name"]}
+ )
+
+ return metadata
diff --git a/python/mozperftest/mozperftest/test/browsertime/visualtools.py b/python/mozperftest/mozperftest/test/browsertime/visualtools.py
new file mode 100644
index 0000000000..d25cb131d3
--- /dev/null
+++ b/python/mozperftest/mozperftest/test/browsertime/visualtools.py
@@ -0,0 +1,196 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+""" Collects visualmetrics dependencies.
+"""
+import contextlib
+import os
+import subprocess
+import sys
+import time
+from distutils.spawn import find_executable
+
+from mozperftest.utils import host_platform
+
+_PILLOW_VERSION = "7.2.0"
+_PYSSIM_VERSION = "0.4"
+
+
+def _start_xvfb():
+ old_display = os.environ.get("DISPLAY")
+ xvfb = find_executable("Xvfb")
+ if xvfb is None:
+ raise FileNotFoundError("Xvfb")
+ cmd = [xvfb, ":99"]
+ proc = subprocess.Popen(
+ cmd, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, close_fds=True
+ )
+ os.environ["DISPLAY"] = ":99"
+ time.sleep(0.2)
+ return proc, old_display
+
+
+def _stop_xvfb(proc, old_display):
+ proc, old_display
+ if old_display is None:
+ del os.environ["DISPLAY"]
+ else:
+ os.environ["DISPLAY"] = old_display
+ if proc is not None:
+ try:
+ proc.terminate()
+ proc.wait()
+ except OSError:
+ pass
+
+
+@contextlib.contextmanager
+def xvfb():
+ proc, old_display = _start_xvfb()
+ try:
+ yield
+ finally:
+ _stop_xvfb(proc, old_display)
+
+
+def get_plat():
+ return host_platform(), f"{sys.version_info.major}.{sys.version_info.minor}"
+
+
+NUMPY = {
+ ("linux64", "3.10",): (
+ "88/cc/92815174c345015a326e3fff8beddcb951b3ef0f7c8296fcc22c622add7c"
+ "/numpy-1.23.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl"
+ ),
+ ("linux64", "3.9",): (
+ "8d/d6/cc2330e512936a904a4db1629b71d697fb309115f6d2ede94d183cdfe185"
+ "/numpy-1.23.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl"
+ ),
+ ("linux64", "3.8",): (
+ "86/c9/9f9d6812fa8a031a568c2c1c49f207a0a4030ead438644c887410fc49c8a"
+ "/numpy-1.23.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl"
+ ),
+ ("linux64", "3.7",): (
+ "d6/2e/a2dbcff6f46bb65645d18538d67183a1cf56b006ba96a12575c282a976bc/"
+ "numpy-1.19.2-cp37-cp37m-manylinux1_x86_64.whl"
+ ),
+ ("linux64", "3.6",): (
+ "b8/e5/a64ef44a85397ba3c377f6be9c02f3cb3e18023f8c89850dd319e7945521/"
+ "numpy-1.19.2-cp36-cp36m-manylinux1_x86_64.whl"
+ ),
+ ("darwin", "3.10",): (
+ "c0/c2/8d58f3ccd1aa3b1eaa5c333a6748e225b45cf8748b13f052cbb3c811c996"
+ "/numpy-1.23.1-cp310-cp310-macosx_10_9_x86_64.whl"
+ ),
+ ("darwin", "3.9",): (
+ "e5/43/b1b80cbcea9f2d0e6adadd27a8da2c71b751d5670a846b444087fab408a1"
+ "/numpy-1.23.1-cp39-cp39-macosx_10_9_x86_64.whl"
+ ),
+ ("darwin", "3.8",): (
+ "71/08/bc1e4fb7392aa0721f299c444e8c99fa97c8cb41fe33791eca8e26364639"
+ "/numpy-1.23.1-cp38-cp38-macosx_10_9_x86_64.whl"
+ ),
+ ("darwin", "3.7",): (
+ "c1/a9/f04a5b7db30cc30b41fe516b8914c5049264490a34a49d977937606fbb23/"
+ "numpy-1.19.2-cp37-cp37m-macosx_10_9_x86_64.whl"
+ ),
+ ("darwin", "3.6",): (
+ "be/8e/800113bd3a0c9195b24574b8922ad92be96278028833c389b69a8b14f657/"
+ "numpy-1.19.2-cp36-cp36m-macosx_10_9_x86_64.whl"
+ ),
+ ("win64", "3.10",): (
+ "8b/11/75a93826457f94a4c857a38ea3f178915f27ff38ffee1753e36994be7810"
+ "/numpy-1.23.1-cp310-cp310-win_amd64.whl"
+ ),
+ ("win64", "3.9",): (
+ "bd/dd/0610fb49c433fe5987ae312fe672119080fd77be484b5698d6fa7554148b"
+ "/numpy-1.23.1-cp39-cp39-win_amd64.whl"
+ ),
+ ("win64", "3.8",): (
+ "d0/19/6e81ed6fe30271ebcf25e5e2a0bdf1fa06ddee03a8cb82625503826970db"
+ "/numpy-1.23.1-cp38-cp38-win_amd64.whl"
+ ),
+ ("win64", "3.7",): (
+ "82/4e/61764556b7ec13f5bd441b04530e2f9f11bb164308ef0e6951919bb846cb/"
+ "numpy-1.19.2-cp37-cp37m-win_amd64.whl"
+ ),
+ ("win64", "3.6",): (
+ "dc/8e/a78d4e4a28adadbf693a9c056a0d5955a906889fa0dc3768b88deb236e22/"
+ "numpy-1.19.2-cp36-cp36m-win_amd64.whl"
+ ),
+}
+
+
+SCIPY = {
+ ("linux64", "3.10",): (
+ "bc/fe/72b611ba221c3367b06163992af4807515d6e0e09b3b9beee8ec22162d6f"
+ "/scipy-1.8.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl"
+ ),
+ ("linux64", "3.9",): (
+ "25/82/da07cc3bb40554f1f82d7e24bfa7ffbfb05b50c16eb8d738ebb74b68af8f"
+ "/scipy-1.8.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl"
+ ),
+ ("linux64", "3.8",): (
+ "cf/28/5ac0afe5fb473a934ef6bc7953a98a3d2eacf9a8f456524f035f3a844ca4"
+ "/scipy-1.8.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl"
+ ),
+ ("linux64", "3.7",): (
+ "65/f9/f7a7e5009711579c72da2725174825e5056741bf4001815d097eef1b2e17"
+ "/scipy-1.5.2-cp37-cp37m-manylinux1_x86_64.whl"
+ ),
+ ("linux64", "3.6",): (
+ "2b/a8/f4c66eb529bb252d50e83dbf2909c6502e2f857550f22571ed8556f62d95"
+ "/scipy-1.5.2-cp36-cp36m-manylinux1_x86_64.whl"
+ ),
+ ("darwin", "3.10",): (
+ "7c/f3/47b882f8b7a4dbc38e8bc5d7befe3ad2da582ae2229745e1eac77217f3e4"
+ "/scipy-1.8.1-cp310-cp310-macosx_10_9_x86_64.whl"
+ ),
+ ("darwin", "3.9",): (
+ "b0/de/e8d273063e1b21ec82e4a09a9654c4dcbc3215abbd59b7038c4ff4272e9e"
+ "/scipy-1.8.1-cp39-cp39-macosx_10_9_x86_64.whl"
+ ),
+ ("darwin", "3.8",): (
+ "dd/cc/bb5a9705dd30e7f558358168c793084f80de7cca88b06c82dca9d765b225"
+ "/scipy-1.8.1-cp38-cp38-macosx_10_9_x86_64.whl"
+ ),
+ ("darwin", "3.7",): (
+ "bc/47/e71e7f198a0b547fe861520a0240e3171256822dae81fcc97a36b772303e"
+ "/scipy-1.5.2-cp37-cp37m-macosx_10_9_x86_64.whl"
+ ),
+ ("darwin", "3.6",): (
+ "00/c0/ddf03baa7ee2a3540d8fbab0fecff7cdd0595dffd91cda746caa95cb686d"
+ "/scipy-1.5.2-cp36-cp36m-macosx_10_9_x86_64.whl"
+ ),
+ ("win64", "3.10"): (
+ "31/c2/0b8758ebaeb43e089eb56168390824a830f9f419ae07d755d99a46e5a937"
+ "/scipy-1.8.1-cp310-cp310-win_amd64.whl"
+ ),
+ ("win64", "3.9"): (
+ "ba/a1/a8fa291b8ae6523866dd099af377bc508c280c8ca43a42483c76775ce3cd"
+ "/scipy-1.8.1-cp39-cp39-win_amd64.whl"
+ ),
+ ("win64", "3.8"): (
+ "8d/3e/e6f6fa6458e03ecd456ae6178529d4bd610a7c4999189f34d0668e4e69a6"
+ "/scipy-1.8.1-cp38-cp38-win_amd64.whl"
+ ),
+ ("win64", "3.7",): (
+ "66/80/d8a5050df5b4d8229e018f3222fe603ce7f92c026b78f4e05d69c3a6c43b"
+ "/scipy-1.5.2-cp37-cp37m-win_amd64.whl"
+ ),
+ ("win64", "3.6",): (
+ "fc/f6/3d455f8b376a0faf1081dbba38bbd594c074292bdec08feaac589f53bc06"
+ "/scipy-1.5.2-cp36-cp36m-win_amd64.whl"
+ ),
+}
+
+
+def get_dependencies():
+ return (
+ "https://files.pythonhosted.org/packages/" + NUMPY[get_plat()],
+ "https://files.pythonhosted.org/packages/" + SCIPY[get_plat()],
+ "Pillow==%s" % _PILLOW_VERSION,
+ "pyssim==%s" % _PYSSIM_VERSION,
+ "influxdb==5.3.0",
+ "grafana_api==1.0.3",
+ )
diff --git a/python/mozperftest/mozperftest/test/noderunner.py b/python/mozperftest/mozperftest/test/noderunner.py
new file mode 100644
index 0000000000..4304609bff
--- /dev/null
+++ b/python/mozperftest/mozperftest/test/noderunner.py
@@ -0,0 +1,75 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import os
+import sys
+
+import mozpack.path as mozpath
+
+from mozperftest.layers import Layer
+from mozperftest.utils import silence
+
+
+class NodeRunner(Layer):
+ name = "node"
+
+ def __init__(self, env, mach_cmd):
+ super(NodeRunner, self).__init__(env, mach_cmd)
+ self.topsrcdir = mach_cmd.topsrcdir
+ self._mach_context = mach_cmd._mach_context
+ self.python_path = mach_cmd.virtualenv_manager.python_path
+
+ from mozbuild.nodeutil import find_node_executable
+
+ self.node_path = os.path.abspath(find_node_executable()[0])
+
+ def setup(self):
+ """Install the Node.js package."""
+ self.verify_node_install()
+
+ def node(self, args, line_handler=None):
+ """Invoke node (interactively) with the given arguments."""
+ return self.run_process(
+ [self.node_path] + args,
+ append_env=self.append_env(),
+ pass_thru=False, # Allow user to run Node interactively.
+ ensure_exit_code=False, # Don't throw on non-zero exit code.
+ cwd=mozpath.join(self.topsrcdir),
+ line_handler=line_handler,
+ )
+
+ def append_env(self, append_path=True):
+ # Ensure that bare `node` and `npm` in scripts, including post-install
+ # scripts, finds the binary we're invoking with. Without this, it's
+ # easy for compiled extensions to get mismatched versions of the Node.js
+ # extension API.
+ path = os.environ.get("PATH", "").split(os.pathsep) if append_path else []
+ node_dir = os.path.dirname(self.node_path)
+ path = [node_dir] + path
+
+ return {
+ "PATH": os.pathsep.join(path),
+ # Bug 1560193: The JS library browsertime uses to execute commands
+ # (execa) will muck up the PATH variable and put the directory that
+ # node is in first in path. If this is globally-installed node,
+ # that means `/usr/bin` will be inserted first which means that we
+ # will get `/usr/bin/python` for `python`.
+ #
+ # Our fork of browsertime supports a `PYTHON` environment variable
+ # that points to the exact python executable to use.
+ "PYTHON": self.python_path,
+ }
+
+ def verify_node_install(self):
+ # check if Node is installed
+ sys.path.append(mozpath.join(self.topsrcdir, "tools", "lint", "eslint"))
+ import setup_helper
+
+ with silence():
+ node_valid = setup_helper.check_node_executables_valid()
+ if not node_valid:
+ # running again to get details printed out
+ setup_helper.check_node_executables_valid()
+ raise ValueError("Can't find Node. did you run ./mach bootstrap ?")
+
+ return True
diff --git a/python/mozperftest/mozperftest/test/webpagetest.py b/python/mozperftest/mozperftest/test/webpagetest.py
new file mode 100644
index 0000000000..2dc6698b8d
--- /dev/null
+++ b/python/mozperftest/mozperftest/test/webpagetest.py
@@ -0,0 +1,413 @@
+import json
+import pathlib
+import re
+import time
+import traceback
+from threading import Thread
+
+import requests
+
+import mozperftest.utils as utils
+from mozperftest.layers import Layer
+from mozperftest.runner import HERE
+
+ACCEPTED_BROWSERS = ["Chrome", "Firefox"]
+
+ACCEPTED_CONNECTIONS = [
+ "DSL",
+ "Cable",
+ "FIOS",
+ "Dial",
+ "Edge",
+ "2G",
+ "3GSlow",
+ "3GFast",
+ "3G",
+ "4G",
+ "LTE",
+ "Native",
+ "custom",
+]
+
+ACCEPTED_STATISTICS = ["average", "median", "standardDeviation"]
+WPT_KEY_FILE = "WPT_key.txt"
+WPT_API_EXPIRED_MESSAGE = "API key expired"
+
+
+class WPTTimeOutError(Exception):
+ """
+ This error is raised if a request that you have made has not returned results within a
+ specified time, for this code that timeout is ~6 hours.
+ """
+
+ pass
+
+
+class WPTBrowserSelectionError(Exception):
+ """
+ This error is raised if you provide an invalid browser option when requesting a test
+ The only browsers allowed are specified the ACCEPTED_BROWSERS list at the top of the code
+ browser must be a case-sensitive match in the list.
+ """
+
+ pass
+
+
+class WPTLocationSelectionError(Exception):
+ """
+ This error is raised if you provide an invalid testing location option when requesting a test
+ Acceptable locations are specified here: https://www.webpagetest.org/getTesters.php?f=html
+ Connection type must be a case-sensitive match
+ For example to test in Virginia, USA you would put ec2-us-east1 as your location.
+ """
+
+ pass
+
+
+class WPTInvalidConnectionSelection(Exception):
+ """
+ This error is raised if you provide an invalid connection option when requesting a test
+ The only connection allowed are specified the ACCEPTED_CONNECTIONS list at the top of the code
+ Connection type must be a case-sensitive match in the list.
+ """
+
+ pass
+
+
+class WPTDataProcessingError(Exception):
+ """
+ This error is raised when a value you were expecting in your webpagetest result is not there.
+ """
+
+ pass
+
+
+class WPTInvalidURLError(Exception):
+ """
+ This error is raised if you provide an invalid website url when requesting a test
+ A website must be in the format {domain_name}.{top_level_domain}
+ for example "google.ca" and "youtube.com" both work and are valid website urls, but
+ "google" and "youtube" are not.
+ """
+
+ pass
+
+
+class WPTErrorWithWebsite(Exception):
+ """
+ This error is raised if the first and repeat view results of the test you requested
+ is not in-line with what is returned. For instance if you request 3 runs with first
+ and repeat view and results show 3 first view and 2 repeat view tests this exception
+ is raised.
+ """
+
+ pass
+
+
+class WPTInvalidStatisticsError(Exception):
+ """
+ This error is raised if the first and repeat view results of the test you requested
+ is not in-line with what is returned. For instance if you request 3 runs with first
+ and repeat view and results show 3 first view and 2 repeat view tests this exception
+ is raised.
+ """
+
+ pass
+
+
+class WPTExpiredAPIKeyError(Exception):
+ """
+ This error is raised if we get a notification from WPT that our API key has expired
+ """
+
+ pass
+
+
+class PropagatingErrorThread(Thread):
+ def run(self):
+ self.exc = None
+ try:
+ self._target(*self._args, **self._kwargs)
+ except Exception as e:
+ self.exc = e
+
+ def join(self, timeout=None):
+ super(PropagatingErrorThread, self).join()
+ if self.exc:
+ raise self.exc
+
+
+class WebPageTestData:
+ def open_data(self, data):
+ return {
+ "name": "webpagetest",
+ "subtest": data["name"],
+ "data": [
+ {"file": "webpagetest", "value": value, "xaxis": xaxis}
+ for xaxis, value in enumerate(data["values"])
+ ],
+ "shouldAlert": True,
+ }
+
+ def transform(self, data):
+ return data
+
+ merge = transform
+
+
+class WebPageTest(Layer):
+ """
+ This is the webpagetest layer, it is responsible for sending requests to run a webpagetest
+ pageload test, receiving the results as well processing them into a useful data format.
+ """
+
+ name = "webpagetest"
+ activated = False
+ arguments = {
+ "no-video": {
+ "action": "store_true",
+ "default": False,
+ "help": "Disable video, required for calculating Speed Index and filmstrip view",
+ },
+ "no-images": {
+ "action": "store_true",
+ "default": False,
+ "help": "Set to True to disable screenshot capturing, False by default",
+ },
+ }
+
+ def __init__(self, env, mach_cmd):
+ super(WebPageTest, self).__init__(env, mach_cmd)
+ if utils.ON_TRY:
+ self.WPT_key = utils.get_tc_secret(wpt=True)["wpt_key"]
+ else:
+ self.WPT_key = pathlib.Path(HERE, WPT_KEY_FILE).open().read()
+ self.statistic_types = ["average", "median", "standardDeviation"]
+ self.timeout_limit = 21600
+ self.wait_between_requests = 180
+
+ def run(self, metadata):
+ options = metadata.script["options"]
+ test_list = options["test_list"]
+ self.statistic_types = options["test_parameters"]["statistics"]
+ self.wpt_browser_metrics = options["browser_metrics"]
+ self.pre_run_error_checks(options["test_parameters"], test_list)
+ self.create_and_run_wpt_threaded_tests(test_list, metadata)
+ try:
+ self.test_runs_left_this_month()
+ except Exception:
+ self.warning("testBalance check had an issue, please investigate")
+ return metadata
+
+ def pre_run_error_checks(self, options, test_list):
+ if options["browser"] not in ACCEPTED_BROWSERS:
+ raise WPTBrowserSelectionError(
+ "Invalid Browser Option Selected, please choose one of the following: "
+ f"{ACCEPTED_BROWSERS}"
+ )
+ if options["connection"] not in ACCEPTED_CONNECTIONS:
+ raise WPTInvalidConnectionSelection(
+ "Invalid Connection Option Selected, please choose one of the following: "
+ f"{ACCEPTED_CONNECTIONS}"
+ )
+ if not len(self.statistic_types):
+ raise WPTInvalidStatisticsError(
+ "No statistics provided please provide some"
+ )
+ for stat in self.statistic_types:
+ if stat not in ACCEPTED_STATISTICS:
+ raise WPTInvalidStatisticsError(
+ f"This is an invalid statistic, statistics can only be from "
+ f"the following list: {ACCEPTED_STATISTICS}"
+ )
+
+ if "timeout_limit" in options.keys():
+ self.timeout_limit = options["timeout_limit"]
+ if "wait_between_requests" in options.keys():
+ self.wait_between_requests = options["wait_between_requests"]
+ if "statistics" in options.keys():
+ self.statistic_types = options["statistics"]
+
+ options["capture_video"] = 0 if self.get_arg("no-video") else options["video"]
+ options["noimages"] = 1 if self.get_arg("no-images") else options["noimages"]
+ self.location_queue(options["location"])
+ self.check_urls_are_valid(test_list)
+
+ def location_queue(self, location):
+ location_list = {}
+ try:
+ location_list = self.request_with_timeout(
+ "https://www.webpagetest.org/getLocations.php?f=json"
+ )["data"]
+ except Exception:
+ self.error(
+ "Error with getting location queue data, see below for more details"
+ )
+ self.info(traceback.format_exc())
+ if location and location not in location_list.keys():
+ raise WPTLocationSelectionError(
+ "Invalid location selected please choose one of the locations here: "
+ f"{location_list.keys()}"
+ )
+ self.info(
+ f"Test queue at {location}({location_list[location]['Label']}) is "
+ f"{location_list[location]['PendingTests']['Queued']}"
+ )
+
+ def request_with_timeout(self, url):
+ requested_results = requests.get(url)
+ results_of_request = json.loads(requested_results.text)
+ start = time.monotonic()
+ if (
+ "statusText" in results_of_request.keys()
+ and results_of_request["statusText"] == WPT_API_EXPIRED_MESSAGE
+ ):
+ raise WPTExpiredAPIKeyError("The API key has expired")
+ while (
+ requested_results.status_code == 200
+ and time.monotonic() - start < self.timeout_limit
+ and (
+ "statusCode" in results_of_request.keys()
+ and results_of_request["statusCode"] != 200
+ )
+ ):
+ requested_results = requests.get(url)
+ results_of_request = json.loads(requested_results.text)
+ time.sleep(self.wait_between_requests)
+ if time.monotonic() - start > self.timeout_limit:
+ raise WPTTimeOutError(
+ f"{url} test timed out after {self.timeout_limit} seconds"
+ )
+ return results_of_request
+
+ def check_urls_are_valid(self, test_list):
+ for url in test_list:
+ if "." not in url:
+ raise WPTInvalidURLError(f"{url} is an invalid url")
+
+ def create_and_run_wpt_threaded_tests(self, test_list, metadata):
+ threads = []
+ for website in test_list:
+ t = PropagatingErrorThread(
+ target=self.create_and_run_wpt_tests, args=(website, metadata)
+ )
+ t.start()
+ threads.append(t)
+ for thread in threads:
+ thread.join()
+
+ def create_and_run_wpt_tests(self, website_to_be_tested, metadata):
+ wpt_run = self.get_WPT_results(
+ website_to_be_tested, metadata.script["options"]["test_parameters"]
+ )
+ self.post_run_error_checks(
+ wpt_run, metadata.script["options"], website_to_be_tested
+ )
+ self.add_wpt_run_to_metadata(wpt_run, metadata, website_to_be_tested)
+
+ def get_WPT_results(self, website, options):
+ self.info(f"Testing: {website}")
+ wpt_test_request_link = self.create_wpt_request_link(options, website)
+ send_wpt_test_request = self.request_with_timeout(wpt_test_request_link)[
+ "data"
+ ]["jsonUrl"]
+ results_of_test = self.request_with_timeout(send_wpt_test_request)
+ return results_of_test
+
+ def create_wpt_request_link(self, options, website_to_be_tested):
+ test_parameters = ""
+ for key_value_pair in list(options.items())[6:]:
+ test_parameters += "&{}={}".format(*key_value_pair)
+ return (
+ f"https://www.webpagetest.org/runtest.php?url={website_to_be_tested}&k={self.WPT_key}&"
+ f"location={options['location']}:{options['browser']}.{options['connection']}&"
+ f"f=json{test_parameters}"
+ )
+
+ def post_run_error_checks(self, results_of_test, options, url):
+ self.info(f"{url} test can be found here: {results_of_test['data']['summary']}")
+
+ if results_of_test["data"]["testRuns"] != results_of_test["data"][
+ "successfulFVRuns"
+ ] or (
+ not results_of_test["data"]["fvonly"]
+ and results_of_test["data"]["testRuns"]
+ != results_of_test["data"]["successfulRVRuns"]
+ ):
+ """
+ This error is raised in 2 conditions:
+ 1) If the testRuns requested does not equal the successfulFVRuns(Firstview runs)
+ 2) If repeat view is enabled and if testRuns requested does not equal successfulFVRuns
+ and successfulRVRuns
+ """
+ # TODO: establish a threshold for failures, and consider failing see bug 1762470
+ self.warning(
+ f"Something went wrong with firstview/repeat view runs for: {url}"
+ )
+ self.confirm_correct_browser_and_location(
+ results_of_test["data"], options["test_parameters"]
+ )
+
+ def confirm_correct_browser_and_location(self, data, options):
+ if data["location"] != f"{options['location']}:{options['browser']}":
+ raise WPTBrowserSelectionError(
+ "Resulting browser & location are not aligned with submitted browser & location"
+ )
+
+ def add_wpt_run_to_metadata(self, wbt_run, metadata, website):
+ requested_values = self.extract_desired_values_from_wpt_run(wbt_run)
+ if requested_values is not None:
+ metadata.add_result(
+ {
+ "name": ("WebPageTest:" + re.match(r"(^.\w+)", website)[0]),
+ "framework": {"name": "mozperftest"},
+ "transformer": "mozperftest.test.webpagetest:WebPageTestData",
+ "shouldAlert": True,
+ "results": [
+ {
+ "values": [metric_value],
+ "name": metric_name,
+ "shouldAlert": True,
+ }
+ for metric_name, metric_value in requested_values.items()
+ ],
+ }
+ )
+
+ def extract_desired_values_from_wpt_run(self, wpt_run):
+ view_types = ["firstView"]
+ if not wpt_run["data"]["fvonly"]:
+ view_types.append("repeatView")
+ desired_values = {}
+ for statistic in self.statistic_types:
+ for view in view_types:
+ for value in self.wpt_browser_metrics:
+ if isinstance(wpt_run["data"][statistic][view], list):
+ self.error(f"Fail {wpt_run['data']['url']}")
+ return
+ if value not in wpt_run["data"][statistic][view].keys():
+ raise WPTDataProcessingError(
+ f"{value} not found {wpt_run['data']['url']}"
+ )
+ desired_values[f"{value}.{view}.{statistic}"] = int(
+ wpt_run["data"][statistic][view][value]
+ )
+ try:
+ desired_values["browserVersion"] = float(
+ re.match(
+ r"\d+.\d+",
+ wpt_run["data"]["runs"]["1"]["firstView"]["browserVersion"],
+ )[0]
+ )
+ desired_values["webPagetestVersion"] = float(wpt_run["webPagetestVersion"])
+ except Exception:
+ self.error("Issue found with processing browser/WPT version")
+ return desired_values
+
+ def test_runs_left_this_month(self):
+ tests_left_this_month = self.request_with_timeout(
+ f"https://www.webpagetest.org/testBalance.php?k={self.WPT_key}&f=json"
+ )
+ self.info(
+ f"There are {tests_left_this_month['data']['remaining']} tests remaining"
+ )
diff --git a/python/mozperftest/mozperftest/test/xpcshell.py b/python/mozperftest/mozperftest/test/xpcshell.py
new file mode 100644
index 0000000000..0f4d4ea490
--- /dev/null
+++ b/python/mozperftest/mozperftest/test/xpcshell.py
@@ -0,0 +1,189 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import os
+from collections import defaultdict
+from distutils.dir_util import copy_tree
+from pathlib import Path
+
+from mozperftest.layers import Layer
+from mozperftest.utils import temp_dir
+
+
+class XPCShellTestError(Exception):
+ pass
+
+
+class NoPerfMetricsError(Exception):
+ pass
+
+
+class XPCShellData:
+ def open_data(self, data):
+ return {
+ "name": "xpcshell",
+ "subtest": data["name"],
+ "data": [
+ {"file": "xpcshell", "value": value, "xaxis": xaxis}
+ for xaxis, value in enumerate(data["values"])
+ ],
+ }
+
+ def transform(self, data):
+ return data
+
+ merge = transform
+
+
+class XPCShell(Layer):
+ """Runs an xpcshell test."""
+
+ name = "xpcshell"
+ activated = True
+
+ arguments = {
+ "cycles": {"type": int, "default": 13, "help": "Number of full cycles"},
+ "binary": {
+ "type": str,
+ "default": None,
+ "help": (
+ "xpcshell binary path. If not provided, "
+ "looks for it in the source tree."
+ ),
+ },
+ "mozinfo": {
+ "type": str,
+ "default": None,
+ "help": (
+ "mozinfo binary path. If not provided, looks for it in the obj tree."
+ ),
+ },
+ "xre-path": {"type": str, "default": None, "help": "XRE path."},
+ "nodejs": {"type": str, "default": None, "help": "nodejs binary path."},
+ }
+
+ def __init__(self, env, mach_cmd):
+ super(XPCShell, self).__init__(env, mach_cmd)
+ self.topsrcdir = mach_cmd.topsrcdir
+ self._mach_context = mach_cmd._mach_context
+ self.python_path = mach_cmd.virtualenv_manager.python_path
+ self.topobjdir = mach_cmd.topobjdir
+ self.distdir = mach_cmd.distdir
+ self.bindir = mach_cmd.bindir
+ self.statedir = mach_cmd.statedir
+ self.metrics = []
+ self.topsrcdir = mach_cmd.topsrcdir
+
+ def setup(self):
+ pass
+
+ def run(self, metadata):
+ test = Path(metadata.script["filename"])
+
+ # let's grab the manifest
+ manifest = Path(test.parent, "xpcshell.ini")
+ if not manifest.exists():
+ raise FileNotFoundError(str(manifest))
+
+ nodejs = self.get_arg("nodejs")
+ if nodejs is not None:
+ os.environ["MOZ_NODE_PATH"] = nodejs
+
+ import runxpcshelltests
+
+ verbose = self.get_arg("verbose")
+ xpcshell = runxpcshelltests.XPCShellTests(log=self)
+ kwargs = {}
+ kwargs["testPaths"] = test.name
+ kwargs["verbose"] = verbose
+ binary = self.get_arg("binary")
+ if binary is None:
+ binary = self.mach_cmd.get_binary_path("xpcshell")
+ kwargs["xpcshell"] = binary
+ binary = Path(binary)
+ mozinfo = self.get_arg("mozinfo")
+ if mozinfo is None:
+ mozinfo = binary.parent / ".." / "mozinfo.json"
+ if not mozinfo.exists():
+ mozinfo = Path(self.topobjdir, "mozinfo.json")
+ else:
+ mozinfo = Path(mozinfo)
+
+ kwargs["mozInfo"] = str(mozinfo)
+ kwargs["symbolsPath"] = str(Path(self.distdir, "crashreporter-symbols"))
+ kwargs["logfiles"] = True
+ kwargs["profileName"] = "firefox"
+ plugins = binary.parent / "plugins"
+ if not plugins.exists():
+ plugins = Path(self.distdir, "plugins")
+ kwargs["pluginsPath"] = str(plugins)
+
+ modules = Path(self.topobjdir, "_tests", "modules")
+ if not modules.exists():
+ modules = binary.parent / "modules"
+
+ kwargs["testingModulesDir"] = str(modules)
+ kwargs["utility_path"] = self.bindir
+ kwargs["manifest"] = str(manifest)
+ kwargs["totalChunks"] = 1
+ xre_path = self.get_arg("xre-path")
+ if xre_path is not None:
+ self.info(f"Copying {xre_path} elements to {binary.parent}")
+ copy_tree(xre_path, str(binary.parent), update=True)
+
+ http3server = binary.parent / "http3server"
+ if http3server.exists():
+ kwargs["http3server"] = str(http3server)
+
+ cycles = self.get_arg("cycles", 1)
+ self.info("Running %d cycles" % cycles)
+
+ for cycle in range(cycles):
+ self.info("Cycle %d" % (cycle + 1))
+ with temp_dir() as tmp:
+ kwargs["tempDir"] = tmp
+ if not xpcshell.runTests(kwargs):
+ raise XPCShellTestError()
+
+ self.info("tests done.")
+
+ results = defaultdict(list)
+ for m in self.metrics:
+ for key, val in m.items():
+ results[key].append(val)
+
+ if len(results.items()) == 0:
+ raise NoPerfMetricsError(
+ "No perftest results were found in the xpcshell test. Results must be "
+ 'reported using:\n info("perfMetrics", { metricName: metricValue });'
+ )
+
+ metadata.add_result(
+ {
+ "name": test.name,
+ "framework": {"name": "mozperftest"},
+ "transformer": "mozperftest.test.xpcshell:XPCShellData",
+ "results": [
+ {"values": measures, "name": subtest}
+ for subtest, measures in results.items()
+ ],
+ }
+ )
+
+ return metadata
+
+ def log_raw(self, data, **kw):
+ if data["action"] != "log":
+ return
+ if data["message"].strip('"') != "perfMetrics":
+ self.info(data["message"])
+ return
+ self.metrics.append(data["extra"])
+
+ def process_output(self, procid, line, command):
+ self.info(line)
+
+ def dummy(self, *args, **kw):
+ pass
+
+ test_end = suite_start = suite_end = test_start = dummy
diff --git a/python/mozperftest/mozperftest/tests/__init__.py b/python/mozperftest/mozperftest/tests/__init__.py
new file mode 100644
index 0000000000..792d600548
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/__init__.py
@@ -0,0 +1 @@
+#
diff --git a/python/mozperftest/mozperftest/tests/conftest.py b/python/mozperftest/mozperftest/tests/conftest.py
new file mode 100644
index 0000000000..19435c5369
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/conftest.py
@@ -0,0 +1,153 @@
+import json
+import os
+import pathlib
+import sys
+from unittest import mock
+
+import pytest
+
+from mozperftest.metrics.notebook.perftestetl import PerftestETL
+from mozperftest.metrics.notebook.perftestnotebook import PerftestNotebook
+from mozperftest.tests.support import HERE, get_running_env
+from mozperftest.utils import temp_dir
+
+
+@pytest.fixture
+def patched_mozperftest_tools():
+ tools_mock = mock.MagicMock(name="tools-mock")
+ _module = mock.MagicMock(name="mozperftest_tools")
+ _module.SideBySide.return_value = tools_mock
+
+ try:
+ sys.modules["mozperftest_tools.side_by_side"] = _module
+ yield tools_mock
+ finally:
+ del sys.modules["mozperftest_tools.side_by_side"]
+
+
+@pytest.fixture(scope="session", autouse=True)
+def data():
+ data_1 = {
+ "browserScripts": [
+ {"timings": {"firstPaint": 101}},
+ {"timings": {"firstPaint": 102}},
+ {"timings": {"firstPaint": 103}},
+ ],
+ }
+
+ data_2 = {
+ "browserScripts": [
+ {"timings": {"firstPaint": 201}},
+ {"timings": {"firstPaint": 202}},
+ {"timings": {"firstPaint": 203}},
+ ],
+ }
+
+ data_3 = {
+ "browserScripts": [
+ {"timings": {"firstPaint": 301}},
+ {"timings": {"firstPaint": 302}},
+ {"timings": {"firstPaint": 303}},
+ ],
+ }
+
+ yield {"data_1": data_1, "data_2": data_2, "data_3": data_3}
+
+
+@pytest.fixture(scope="session", autouse=True)
+def standarized_data():
+ return {
+ "browsertime": [
+ {
+ "data": [
+ {"value": 1, "xaxis": 1, "file": "file_1"},
+ {"value": 2, "xaxis": 2, "file": "file_2"},
+ ],
+ "name": "name",
+ "subtest": "subtest",
+ }
+ ]
+ }
+
+
+@pytest.fixture(scope="session", autouse=True)
+def files(data):
+ # Create a temporary directory.
+ with temp_dir() as td:
+ tmp_path = pathlib.Path(td)
+
+ dirs = {
+ "resources": tmp_path / "resources",
+ "output": tmp_path / "output",
+ }
+
+ for d in dirs.values():
+ d.mkdir(parents=True, exist_ok=True)
+
+ # Create temporary data files for tests.
+ def _create_temp_files(path, data):
+ path.touch(exist_ok=True)
+ path.write_text(data)
+ return path.resolve().as_posix()
+
+ resources = {}
+ json_1 = dirs["resources"] / "file_1.json"
+ resources["file_1"] = _create_temp_files(json_1, json.dumps(data["data_1"]))
+
+ json_2 = dirs["resources"] / "file_2.json"
+ resources["file_2"] = _create_temp_files(json_2, json.dumps(data["data_2"]))
+
+ txt_3 = dirs["resources"] / "file_3.txt"
+ resources["file_3"] = _create_temp_files(txt_3, str(data["data_3"]))
+
+ output = dirs["output"] / "output.json"
+
+ yield {
+ "resources": resources,
+ "dirs": dirs,
+ "output": output,
+ }
+
+
+@pytest.fixture(scope="session", autouse=True)
+def ptetls(files):
+ resources, dirs, output = files["resources"], files["dirs"], files["output"]
+ _, metadata, _ = get_running_env()
+ config = {"output": output}
+ file_group_list = {"group_1": list(resources.values())}
+ file_group_str = {"group_1": dirs["resources"].resolve().as_posix()}
+
+ yield {
+ "ptetl_list": PerftestETL(
+ file_groups=file_group_list,
+ config=config,
+ prefix="PerftestETL",
+ logger=metadata,
+ sort_files=True,
+ ),
+ "ptetl_str": PerftestETL(
+ file_groups=file_group_str,
+ config=config,
+ prefix="PerftestETL",
+ logger=metadata,
+ sort_files=True,
+ ),
+ }
+
+
+@pytest.fixture(scope="session", autouse=True)
+def ptnb(standarized_data):
+ _, metadata, _ = get_running_env()
+ return PerftestNotebook(standarized_data, metadata, "PerftestNotebook")
+
+
+@pytest.fixture(scope="function", autouse=True)
+def perftestetl_plugin():
+
+ ret = HERE / "data" / "perftestetl_plugin"
+
+ os.environ["PERFTESTETL_PLUGIN"] = ret.resolve().as_posix()
+
+ yield ret
+
+ del os.environ["PERFTESTETL_PLUGIN"]
diff --git a/python/mozperftest/mozperftest/tests/data/WPT_fakekey.txt b/python/mozperftest/mozperftest/tests/data/WPT_fakekey.txt
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/data/WPT_fakekey.txt
diff --git a/python/mozperftest/mozperftest/tests/data/browsertime-results-video/browsertime.json b/python/mozperftest/mozperftest/tests/data/browsertime-results-video/browsertime.json
new file mode 100644
index 0000000000..d2b2f83f2d
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/data/browsertime-results-video/browsertime.json
@@ -0,0 +1,991 @@
+[
+ {
+ "info": {
+ "browsertime": {
+ "version": "8.3.0"
+ },
+ "url": "https://www.bbc.com/news/world-middle-east-53598965",
+ "timestamp": "2020-08-03T11:18:37+02:00",
+ "connectivity": {
+ "engine": "external",
+ "profile": "native"
+ },
+ "extra": {},
+ "alias": "pageload"
+ },
+ "files": {
+ "video": [
+ "pages/www.bbc.com/news/world-middle-east-53598965/data/video/1.mp4"
+ ],
+ "screenshot": [],
+ "timeline": [],
+ "consoleLog": [],
+ "netLog": [],
+ "perfLog": []
+ },
+ "cdp": {
+ "performance": []
+ },
+ "timestamps": [
+ "2020-08-03T11:18:12+02:00"
+ ],
+ "browserScripts": [
+ {
+ "browser": {
+ "appConstants": {
+ "ACCESSIBILITY": true,
+ "ANDROID_PACKAGE_NAME": "org.mozilla.firefox",
+ "ASAN": false,
+ "ASAN_REPORTER": false,
+ "BROWSER_CHROME_URL": "chrome://browser/content/browser.xhtml",
+ "DEBUG": false,
+ "DEBUG_JS_MODULES": "",
+ "DLL_PREFIX": "lib",
+ "DLL_SUFFIX": ".dylib",
+ "EARLY_BETA_OR_EARLIER": true,
+ "HAVE_SHELL_SERVICE": true,
+ "HAVE_USR_LIB64_DIR": false,
+ "MENUBAR_CAN_AUTOHIDE": false,
+ "MOZILLA_OFFICIAL": false,
+ "MOZ_ALLOW_ADDON_SIDELOAD": false,
+ "MOZ_ALLOW_LEGACY_EXTENSIONS": false,
+ "MOZ_ANDROID_HISTORY": false,
+ "MOZ_APP_BASENAME": "Firefox",
+ "MOZ_APP_NAME": "firefox",
+ "MOZ_APP_VERSION": "81.0a1",
+ "MOZ_APP_VERSION_DISPLAY": "81.0a1",
+ "MOZ_BING_API_CLIENTID": "no-bing-api-clientid",
+ "MOZ_BING_API_KEY": "no-bing-api-key",
+ "MOZ_BITS_DOWNLOAD": false,
+ "MOZ_BUILDID": "2020073017",
+ "MOZ_BUILD_APP": "browser",
+ "MOZ_CODE_COVERAGE": false,
+ "MOZ_CRASHREPORTER": true,
+ "MOZ_DATA_REPORTING": true,
+ "MOZ_DEV_EDITION": false,
+ "MOZ_GECKO_PROFILER": true,
+ "MOZ_GOOGLE_LOCATION_SERVICE_API_KEY": "no-google-location-service-api-key",
+ "MOZ_GOOGLE_SAFEBROWSING_API_KEY": "no-google-safebrowsing-api-key",
+ "MOZ_MACBUNDLE_NAME": "Nightly.app",
+ "MOZ_MAINTENANCE_SERVICE": false,
+ "MOZ_MOZILLA_API_KEY": "no-mozilla-api-key",
+ "MOZ_NEW_NOTIFICATION_STORE": true,
+ "MOZ_NEW_XULSTORE": true,
+ "MOZ_NORMANDY": true,
+ "MOZ_OFFICIAL_BRANDING": false,
+ "MOZ_PLACES": true,
+ "MOZ_REQUIRE_SIGNING": false,
+ "MOZ_SANDBOX": false,
+ "MOZ_SERVICES_HEALTHREPORT": true,
+ "MOZ_SERVICES_SYNC": false,
+ "MOZ_SWITCHBOARD": false,
+ "MOZ_SYSTEM_NSS": false,
+ "MOZ_TELEMETRY_ON_BY_DEFAULT": false,
+ "MOZ_TELEMETRY_REPORTING": false,
+ "MOZ_UNSIGNED_SCOPES": 0,
+ "MOZ_UPDATER": true,
+ "MOZ_UPDATE_AGENT": false,
+ "MOZ_UPDATE_CHANNEL": "default",
+ "MOZ_WEBRTC": true,
+ "MOZ_WIDGET_GTK": false,
+ "MOZ_WIDGET_TOOLKIT": "cocoa",
+ "NIGHTLY_BUILD": true,
+ "OMNIJAR_NAME": "omni.ja",
+ "RELEASE_OR_BETA": false,
+ "SOURCE_REVISION_URL": "",
+ "TELEMETRY_PING_FORMAT_VERSION": 4,
+ "TSAN": false,
+ "XP_UNIX": true,
+ "isPlatformAndVersionAtLeast": {},
+ "isPlatformAndVersionAtMost": {},
+ "platform": "macosx",
+ "unixstyle": "other"
+ },
+ "asyncAppConstants": {
+ "ACCESSIBILITY": true,
+ "ANDROID_PACKAGE_NAME": "org.mozilla.firefox",
+ "ASAN": false,
+ "ASAN_REPORTER": false,
+ "BROWSER_CHROME_URL": "chrome://browser/content/browser.xhtml",
+ "DEBUG": false,
+ "DEBUG_JS_MODULES": "",
+ "DLL_PREFIX": "lib",
+ "DLL_SUFFIX": ".dylib",
+ "EARLY_BETA_OR_EARLIER": true,
+ "HAVE_SHELL_SERVICE": true,
+ "HAVE_USR_LIB64_DIR": false,
+ "MENUBAR_CAN_AUTOHIDE": false,
+ "MOZILLA_OFFICIAL": false,
+ "MOZ_ALLOW_ADDON_SIDELOAD": false,
+ "MOZ_ALLOW_LEGACY_EXTENSIONS": false,
+ "MOZ_ANDROID_HISTORY": false,
+ "MOZ_APP_BASENAME": "Firefox",
+ "MOZ_APP_NAME": "firefox",
+ "MOZ_APP_VERSION": "81.0a1",
+ "MOZ_APP_VERSION_DISPLAY": "81.0a1",
+ "MOZ_BING_API_CLIENTID": "no-bing-api-clientid",
+ "MOZ_BING_API_KEY": "no-bing-api-key",
+ "MOZ_BITS_DOWNLOAD": false,
+ "MOZ_BUILDID": "200730174322",
+ "MOZ_BUILD_APP": "browser",
+ "MOZ_CODE_COVERAGE": false,
+ "MOZ_CRASHREPORTER": true,
+ "MOZ_DATA_REPORTING": true,
+ "MOZ_DEV_EDITION": false,
+ "MOZ_GECKO_PROFILER": true,
+ "MOZ_GOOGLE_LOCATION_SERVICE_API_KEY": "no-google-location-service-api-key",
+ "MOZ_GOOGLE_SAFEBROWSING_API_KEY": "no-google-safebrowsing-api-key",
+ "MOZ_MACBUNDLE_NAME": "Nightly.app",
+ "MOZ_MAINTENANCE_SERVICE": false,
+ "MOZ_MOZILLA_API_KEY": "no-mozilla-api-key",
+ "MOZ_NEW_NOTIFICATION_STORE": true,
+ "MOZ_NEW_XULSTORE": true,
+ "MOZ_NORMANDY": true,
+ "MOZ_OFFICIAL_BRANDING": false,
+ "MOZ_PLACES": true,
+ "MOZ_REQUIRE_SIGNING": false,
+ "MOZ_SANDBOX": false,
+ "MOZ_SERVICES_HEALTHREPORT": true,
+ "MOZ_SERVICES_SYNC": false,
+ "MOZ_SWITCHBOARD": false,
+ "MOZ_SYSTEM_NSS": false,
+ "MOZ_TELEMETRY_ON_BY_DEFAULT": false,
+ "MOZ_TELEMETRY_REPORTING": false,
+ "MOZ_UNSIGNED_SCOPES": 0,
+ "MOZ_UPDATER": true,
+ "MOZ_UPDATE_AGENT": false,
+ "MOZ_UPDATE_CHANNEL": "default",
+ "MOZ_WEBRTC": true,
+ "MOZ_WIDGET_GTK": false,
+ "MOZ_WIDGET_TOOLKIT": "cocoa",
+ "NIGHTLY_BUILD": true,
+ "OMNIJAR_NAME": "omni.ja",
+ "RELEASE_OR_BETA": false,
+ "SOURCE_REVISION_URL": "",
+ "TELEMETRY_PING_FORMAT_VERSION": 4,
+ "TSAN": false,
+ "XP_UNIX": true,
+ "isPlatformAndVersionAtLeast": {},
+ "isPlatformAndVersionAtMost": {},
+ "platform": "macosx",
+ "unixstyle": "other"
+ },
+ "userAgent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:81.0) Gecko/20100101 Firefox/81.0",
+ "windowSize": "1366x768"
+ },
+ "pageinfo": {
+ "documentHeight": 8937,
+ "documentSize": {
+ "decodedBodySize": 270461,
+ "encodedBodySize": 67482,
+ "transferSize": 68337
+ },
+ "documentTitle": "Coronavirus: Iran cover-up of deaths revealed by data leak - BBC News",
+ "documentWidth": 1366,
+ "domElements": 1348,
+ "navigationStartTime": 1596629,
+ "nextHopProtocol": "h2",
+ "resources": {
+ "count": 91,
+ "duration": 26360.459999999992
+ },
+ "responsive": true,
+ "url": "https://www.bbc.com/news/world-middle-east-53598965",
+ "visualElements": {
+ "heroes": [
+ {
+ "filename": "_113766981_iranhospital.jpg",
+ "height": 363,
+ "name": "LargestImage",
+ "width": 646,
+ "x": 195,
+ "y": 403
+ },
+ {
+ "filename": null,
+ "height": 72,
+ "name": "Heading",
+ "width": 645,
+ "x": 195,
+ "y": 196
+ }
+ ],
+ "viewport": {
+ "height": 694,
+ "width": 1366
+ }
+ }
+ },
+ "timings": {
+ "firstPaint": 1084,
+ "loadEventEnd": 8274,
+ "navigationTiming": {
+ "connectStart": 20,
+ "domComplete": 8238,
+ "domContentLoadedEventEnd": 4165,
+ "domContentLoadedEventStart": 4159,
+ "domInteractive": 1415,
+ "domainLookupEnd": 20,
+ "domainLookupStart": 20,
+ "duration": 8274,
+ "fetchStart": 20,
+ "loadEventEnd": 8274,
+ "loadEventStart": 8264,
+ "redirectEnd": 0,
+ "redirectStart": 0,
+ "requestStart": 29,
+ "responseEnd": 117,
+ "responseStart": 117,
+ "secureConnectionStart": 0,
+ "startTime": 0,
+ "unloadEventEnd": 124,
+ "unloadEventStart": 120,
+ "workerStart": 0
+ },
+ "pageTimings": {
+ "backEndTime": 117,
+ "domContentLoadedTime": 4159,
+ "domInteractiveTime": 1415,
+ "domainLookupTime": 0,
+ "frontEndTime": 8147,
+ "pageDownloadTime": 0,
+ "pageLoadTime": 8264,
+ "redirectionTime": 0,
+ "serverConnectionTime": 0,
+ "serverResponseTime": 88
+ },
+ "rumSpeedIndex": 5542,
+ "serverTimings": [],
+ "timeToContentfulPaint": 1124,
+ "timeToDomContentFlushed": 4158,
+ "timeToFirstInteractive": 11858,
+ "userTimings": {
+ "marks": [],
+ "measures": []
+ }
+ }
+ }
+ ],
+ "visualMetrics": [],
+ "cpu": [],
+ "extras": [
+ {}
+ ],
+ "fullyLoaded": [],
+ "errors": [
+ []
+ ],
+ "statistics": {
+ "browser": {
+ "appConstants": {
+ "MOZ_BUILDID": {
+ "median": 2020073017,
+ "mean": 2020073017,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 2020073017,
+ "p10": 2020073017,
+ "p90": 2020073017,
+ "p99": 2020073017,
+ "max": 2020073017
+ },
+ "MOZ_UNSIGNED_SCOPES": {
+ "median": 0,
+ "mean": 0,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 0,
+ "p10": 0,
+ "p90": 0,
+ "p99": 0,
+ "max": 0
+ },
+ "TELEMETRY_PING_FORMAT_VERSION": {
+ "median": 4,
+ "mean": 4,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 4,
+ "p10": 4,
+ "p90": 4,
+ "p99": 4,
+ "max": 4
+ }
+ },
+ "asyncAppConstants": {
+ "MOZ_BUILDID": {
+ "median": 2020073017,
+ "mean": 2020073017,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 2020073017,
+ "p10": 2020073017,
+ "p90": 2020073017,
+ "p99": 2020073017,
+ "max": 2020073017
+ },
+ "MOZ_UNSIGNED_SCOPES": {
+ "median": 0,
+ "mean": 0,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 0,
+ "p10": 0,
+ "p90": 0,
+ "p99": 0,
+ "max": 0
+ },
+ "TELEMETRY_PING_FORMAT_VERSION": {
+ "median": 4,
+ "mean": 4,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 4,
+ "p10": 4,
+ "p90": 4,
+ "p99": 4,
+ "max": 4
+ }
+ }
+ },
+ "pageinfo": {
+ "documentHeight": {
+ "median": 8937,
+ "mean": 8937,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 8937,
+ "p10": 8937,
+ "p90": 8937,
+ "p99": 8937,
+ "max": 8937
+ },
+ "documentSize": {
+ "decodedBodySize": {
+ "median": 270461,
+ "mean": 270461,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 270461,
+ "p10": 270461,
+ "p90": 270461,
+ "p99": 270461,
+ "max": 270461
+ },
+ "encodedBodySize": {
+ "median": 67482,
+ "mean": 67482,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 67482,
+ "p10": 67482,
+ "p90": 67482,
+ "p99": 67482,
+ "max": 67482
+ },
+ "transferSize": {
+ "median": 68337,
+ "mean": 68337,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 68337,
+ "p10": 68337,
+ "p90": 68337,
+ "p99": 68337,
+ "max": 68337
+ }
+ },
+ "documentWidth": {
+ "median": 1366,
+ "mean": 1366,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 1366,
+ "p10": 1366,
+ "p90": 1366,
+ "p99": 1366,
+ "max": 1366
+ },
+ "domElements": {
+ "median": 1348,
+ "mean": 1348,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 1348,
+ "p10": 1348,
+ "p90": 1348,
+ "p99": 1348,
+ "max": 1348
+ },
+ "navigationStartTime": {
+ "median": 1596629,
+ "mean": 1596629,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 1596629,
+ "p10": 1596629,
+ "p90": 1596629,
+ "p99": 1596629,
+ "max": 1596629
+ },
+ "resources": {
+ "count": {
+ "median": 91,
+ "mean": 91,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 91,
+ "p10": 91,
+ "p90": 91,
+ "p99": 91,
+ "max": 91
+ },
+ "duration": {
+ "median": 26360,
+ "mean": 26360,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 26360,
+ "p10": 26360,
+ "p90": 26360,
+ "p99": 26360,
+ "max": 26360
+ }
+ },
+ "visualElements": {
+ "heroes": [
+ {
+ "height": {
+ "median": 363,
+ "mean": 363,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 363,
+ "p10": 363,
+ "p90": 363,
+ "p99": 363,
+ "max": 363
+ },
+ "width": {
+ "median": 646,
+ "mean": 646,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 646,
+ "p10": 646,
+ "p90": 646,
+ "p99": 646,
+ "max": 646
+ },
+ "x": {
+ "median": 195,
+ "mean": 195,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 195,
+ "p10": 195,
+ "p90": 195,
+ "p99": 195,
+ "max": 195
+ },
+ "y": {
+ "median": 403,
+ "mean": 403,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 403,
+ "p10": 403,
+ "p90": 403,
+ "p99": 403,
+ "max": 403
+ }
+ },
+ {
+ "height": {
+ "median": 72,
+ "mean": 72,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 72,
+ "p10": 72,
+ "p90": 72,
+ "p99": 72,
+ "max": 72
+ },
+ "width": {
+ "median": 645,
+ "mean": 645,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 645,
+ "p10": 645,
+ "p90": 645,
+ "p99": 645,
+ "max": 645
+ },
+ "x": {
+ "median": 195,
+ "mean": 195,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 195,
+ "p10": 195,
+ "p90": 195,
+ "p99": 195,
+ "max": 195
+ },
+ "y": {
+ "median": 196,
+ "mean": 196,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 196,
+ "p10": 196,
+ "p90": 196,
+ "p99": 196,
+ "max": 196
+ }
+ }
+ ],
+ "viewport": {
+ "height": {
+ "median": 694,
+ "mean": 694,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 694,
+ "p10": 694,
+ "p90": 694,
+ "p99": 694,
+ "max": 694
+ },
+ "width": {
+ "median": 1366,
+ "mean": 1366,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 1366,
+ "p10": 1366,
+ "p90": 1366,
+ "p99": 1366,
+ "max": 1366
+ }
+ }
+ }
+ },
+ "timings": {
+ "firstPaint": {
+ "median": 1084,
+ "mean": 1084,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 1084,
+ "p10": 1084,
+ "p90": 1084,
+ "p99": 1084,
+ "max": 1084
+ },
+ "loadEventEnd": {
+ "median": 8274,
+ "mean": 8274,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 8274,
+ "p10": 8274,
+ "p90": 8274,
+ "p99": 8274,
+ "max": 8274
+ },
+ "navigationTiming": {
+ "connectStart": {
+ "median": 20,
+ "mean": 20,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 20,
+ "p10": 20,
+ "p90": 20,
+ "p99": 20,
+ "max": 20
+ },
+ "domComplete": {
+ "median": 8238,
+ "mean": 8238,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 8238,
+ "p10": 8238,
+ "p90": 8238,
+ "p99": 8238,
+ "max": 8238
+ },
+ "domContentLoadedEventEnd": {
+ "median": 4165,
+ "mean": 4165,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 4165,
+ "p10": 4165,
+ "p90": 4165,
+ "p99": 4165,
+ "max": 4165
+ },
+ "domContentLoadedEventStart": {
+ "median": 4159,
+ "mean": 4159,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 4159,
+ "p10": 4159,
+ "p90": 4159,
+ "p99": 4159,
+ "max": 4159
+ },
+ "domInteractive": {
+ "median": 1415,
+ "mean": 1415,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 1415,
+ "p10": 1415,
+ "p90": 1415,
+ "p99": 1415,
+ "max": 1415
+ },
+ "domainLookupEnd": {
+ "median": 20,
+ "mean": 20,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 20,
+ "p10": 20,
+ "p90": 20,
+ "p99": 20,
+ "max": 20
+ },
+ "domainLookupStart": {
+ "median": 20,
+ "mean": 20,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 20,
+ "p10": 20,
+ "p90": 20,
+ "p99": 20,
+ "max": 20
+ },
+ "duration": {
+ "median": 8274,
+ "mean": 8274,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 8274,
+ "p10": 8274,
+ "p90": 8274,
+ "p99": 8274,
+ "max": 8274
+ },
+ "fetchStart": {
+ "median": 20,
+ "mean": 20,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 20,
+ "p10": 20,
+ "p90": 20,
+ "p99": 20,
+ "max": 20
+ },
+ "loadEventEnd": {
+ "median": 8274,
+ "mean": 8274,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 8274,
+ "p10": 8274,
+ "p90": 8274,
+ "p99": 8274,
+ "max": 8274
+ },
+ "loadEventStart": {
+ "median": 8264,
+ "mean": 8264,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 8264,
+ "p10": 8264,
+ "p90": 8264,
+ "p99": 8264,
+ "max": 8264
+ },
+ "redirectEnd": {
+ "median": 0,
+ "mean": 0,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 0,
+ "p10": 0,
+ "p90": 0,
+ "p99": 0,
+ "max": 0
+ },
+ "redirectStart": {
+ "median": 0,
+ "mean": 0,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 0,
+ "p10": 0,
+ "p90": 0,
+ "p99": 0,
+ "max": 0
+ },
+ "requestStart": {
+ "median": 29,
+ "mean": 29,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 29,
+ "p10": 29,
+ "p90": 29,
+ "p99": 29,
+ "max": 29
+ },
+ "responseEnd": {
+ "median": 117,
+ "mean": 117,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 117,
+ "p10": 117,
+ "p90": 117,
+ "p99": 117,
+ "max": 117
+ },
+ "responseStart": {
+ "median": 117,
+ "mean": 117,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 117,
+ "p10": 117,
+ "p90": 117,
+ "p99": 117,
+ "max": 117
+ },
+ "secureConnectionStart": {
+ "median": 0,
+ "mean": 0,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 0,
+ "p10": 0,
+ "p90": 0,
+ "p99": 0,
+ "max": 0
+ },
+ "startTime": {
+ "median": 0,
+ "mean": 0,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 0,
+ "p10": 0,
+ "p90": 0,
+ "p99": 0,
+ "max": 0
+ },
+ "unloadEventEnd": {
+ "median": 124,
+ "mean": 124,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 124,
+ "p10": 124,
+ "p90": 124,
+ "p99": 124,
+ "max": 124
+ },
+ "unloadEventStart": {
+ "median": 120,
+ "mean": 120,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 120,
+ "p10": 120,
+ "p90": 120,
+ "p99": 120,
+ "max": 120
+ },
+ "workerStart": {
+ "median": 0,
+ "mean": 0,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 0,
+ "p10": 0,
+ "p90": 0,
+ "p99": 0,
+ "max": 0
+ }
+ },
+ "pageTimings": {
+ "backEndTime": {
+ "median": 117,
+ "mean": 117,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 117,
+ "p10": 117,
+ "p90": 117,
+ "p99": 117,
+ "max": 117
+ },
+ "domContentLoadedTime": {
+ "median": 4159,
+ "mean": 4159,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 4159,
+ "p10": 4159,
+ "p90": 4159,
+ "p99": 4159,
+ "max": 4159
+ },
+ "domInteractiveTime": {
+ "median": 1415,
+ "mean": 1415,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 1415,
+ "p10": 1415,
+ "p90": 1415,
+ "p99": 1415,
+ "max": 1415
+ },
+ "domainLookupTime": {
+ "median": 0,
+ "mean": 0,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 0,
+ "p10": 0,
+ "p90": 0,
+ "p99": 0,
+ "max": 0
+ },
+ "frontEndTime": {
+ "median": 8147,
+ "mean": 8147,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 8147,
+ "p10": 8147,
+ "p90": 8147,
+ "p99": 8147,
+ "max": 8147
+ },
+ "pageDownloadTime": {
+ "median": 0,
+ "mean": 0,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 0,
+ "p10": 0,
+ "p90": 0,
+ "p99": 0,
+ "max": 0
+ },
+ "pageLoadTime": {
+ "median": 8264,
+ "mean": 8264,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 8264,
+ "p10": 8264,
+ "p90": 8264,
+ "p99": 8264,
+ "max": 8264
+ },
+ "redirectionTime": {
+ "median": 0,
+ "mean": 0,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 0,
+ "p10": 0,
+ "p90": 0,
+ "p99": 0,
+ "max": 0
+ },
+ "serverConnectionTime": {
+ "median": 0,
+ "mean": 0,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 0,
+ "p10": 0,
+ "p90": 0,
+ "p99": 0,
+ "max": 0
+ },
+ "serverResponseTime": {
+ "median": 88,
+ "mean": 88,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 88,
+ "p10": 88,
+ "p90": 88,
+ "p99": 88,
+ "max": 88
+ }
+ },
+ "rumSpeedIndex": {
+ "median": 5542,
+ "mean": 5542,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 5542,
+ "p10": 5542,
+ "p90": 5542,
+ "p99": 5542,
+ "max": 5542
+ },
+ "timeToContentfulPaint": {
+ "median": 1124,
+ "mean": 1124,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 1124,
+ "p10": 1124,
+ "p90": 1124,
+ "p99": 1124,
+ "max": 1124
+ },
+ "timeToDomContentFlushed": {
+ "median": 4158,
+ "mean": 4158,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 4158,
+ "p10": 4158,
+ "p90": 4158,
+ "p99": 4158,
+ "max": 4158
+ },
+ "timeToFirstInteractive": {
+ "median": 11858,
+ "mean": 11858,
+ "mdev": 0,
+ "stddev": 0,
+ "min": 11858,
+ "p10": 11858,
+ "p90": 11858,
+ "p99": 11858,
+ "max": 11858
+ }
+ }
+ }
+ }
+]
diff --git a/python/mozperftest/mozperftest/tests/data/browsertime-results-video/pages/www.bbc.com/data/video/1.mp4 b/python/mozperftest/mozperftest/tests/data/browsertime-results-video/pages/www.bbc.com/data/video/1.mp4
new file mode 100644
index 0000000000..69cd3c5cde
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/data/browsertime-results-video/pages/www.bbc.com/data/video/1.mp4
Binary files differ
diff --git a/python/mozperftest/mozperftest/tests/data/browsertime-results-video/pages/www.bbc.com/news/world-middle-east-53598965/data/video/1.mp4 b/python/mozperftest/mozperftest/tests/data/browsertime-results-video/pages/www.bbc.com/news/world-middle-east-53598965/data/video/1.mp4
new file mode 100644
index 0000000000..2bf1c13ba5
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/data/browsertime-results-video/pages/www.bbc.com/news/world-middle-east-53598965/data/video/1.mp4
Binary files differ
diff --git a/python/mozperftest/mozperftest/tests/data/browsertime-results-video/pages/www.bbc.com/news/world-us-canada-53599363/data/video/1.mp4 b/python/mozperftest/mozperftest/tests/data/browsertime-results-video/pages/www.bbc.com/news/world-us-canada-53599363/data/video/1.mp4
new file mode 100644
index 0000000000..01da00def4
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/data/browsertime-results-video/pages/www.bbc.com/news/world-us-canada-53599363/data/video/1.mp4
Binary files differ
diff --git a/python/mozperftest/mozperftest/tests/data/browsertime-results/browsertime.json b/python/mozperftest/mozperftest/tests/data/browsertime-results/browsertime.json
new file mode 100644
index 0000000000..8b760ae4b2
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/data/browsertime-results/browsertime.json
@@ -0,0 +1 @@
+[{"info":{"browsertime":{"version":"8.2.0"},"url":"https://www.sitespeed.io/","timestamp":"2020-04-01T13:44:11+02:00","connectivity":{"engine":"external","profile":"native"},"extra":{}},"files":{"video":[],"screenshot":[],"timeline":[],"consoleLog":[],"netLog":[],"perfLog":[]},"cdp":{"performance":[]},"timestamps":["2020-04-01T13:43:53+02:00"],"browserScripts":[{"browser":{"appConstants":{"ACCESSIBILITY":true,"ANDROID_PACKAGE_NAME":"org.mozilla.firefox","ASAN":false,"ASAN_REPORTER":false,"BROWSER_CHROME_URL":"chrome://browser/content/browser.xhtml","DEBUG":false,"DEBUG_JS_MODULES":"","DLL_PREFIX":"lib","DLL_SUFFIX":".dylib","EARLY_BETA_OR_EARLIER":false,"FENNEC_NIGHTLY":false,"HAVE_SHELL_SERVICE":true,"HAVE_USR_LIB64_DIR":false,"MENUBAR_CAN_AUTOHIDE":false,"MOZILLA_OFFICIAL":true,"MOZ_ALLOW_LEGACY_EXTENSIONS":false,"MOZ_ANDROID_ACTIVITY_STREAM":false,"MOZ_ANDROID_HISTORY":false,"MOZ_ANDROID_MOZILLA_ONLINE":false,"MOZ_APP_NAME":"firefox","MOZ_APP_VERSION":"69.0","MOZ_APP_VERSION_DISPLAY":"69.0","MOZ_BING_API_CLIENTID":"no-bing-api-clientid","MOZ_BING_API_KEY":"no-bing-api-key","MOZ_BITS_DOWNLOAD":false,"MOZ_BUILD_APP":"browser","MOZ_CODE_COVERAGE":false,"MOZ_CRASHREPORTER":true,"MOZ_DATA_REPORTING":true,"MOZ_DEV_EDITION":false,"MOZ_GECKO_PROFILER":true,"MOZ_GOOGLE_LOCATION_SERVICE_API_KEY":"AIzaSyB2h2OuRcUgy5N-5hsZqiPW6sH3n_rptiQ","MOZ_GOOGLE_SAFEBROWSING_API_KEY":"AIzaSyC7jsptDS3am4tPx4r3nxis7IMjBc5Dovo","MOZ_GRAPHENE":false,"MOZ_MACBUNDLE_NAME":"Firefox.app","MOZ_MAINTENANCE_SERVICE":false,"MOZ_MOZILLA_API_KEY":"7e40f68c-7938-4c5d-9f95-e61647c213eb","MOZ_NEW_CERT_STORAGE":false,"MOZ_NEW_NOTIFICATION_STORE":false,"MOZ_NEW_XULSTORE":false,"MOZ_OFFICIAL_BRANDING":true,"MOZ_PLACES":true,"MOZ_REQUIRE_SIGNING":true,"MOZ_SANDBOX":true,"MOZ_SERVICES_HEALTHREPORT":true,"MOZ_SERVICES_SYNC":false,"MOZ_SWITCHBOARD":false,"MOZ_SYSTEM_NSS":false,"MOZ_TELEMETRY_ON_BY_DEFAULT":false,"MOZ_TELEMETRY_REPORTING":true,"MOZ_UNSIGNED_SCOPES":0,"MOZ_UPDATER":true,"MOZ_UPDATE_CHANNEL":"release","MOZ_WEBRTC":true,"MOZ_WIDGET_GTK":false,"MOZ_WIDGET_TOOLKIT":"cocoa","NIGHTLY_BUILD":false,"OMNIJAR_NAME":"omni.ja","RELEASE_OR_BETA":true,"SOURCE_REVISION_URL":"https://hg.mozilla.org/releases/mozilla-release/rev/cce4622026ab8e0130a0afc03f829f9b19ca38c2","TELEMETRY_PING_FORMAT_VERSION":4,"XP_UNIX":true,"isPlatformAndVersionAtLeast":{},"isPlatformAndVersionAtMost":{},"platform":"macosx"},"asyncAppConstants":{"ACCESSIBILITY":true,"ANDROID_PACKAGE_NAME":"org.mozilla.firefox","ASAN":false,"ASAN_REPORTER":false,"BROWSER_CHROME_URL":"chrome://browser/content/browser.xhtml","DEBUG":false,"DEBUG_JS_MODULES":"","DLL_PREFIX":"lib","DLL_SUFFIX":".dylib","EARLY_BETA_OR_EARLIER":false,"FENNEC_NIGHTLY":false,"HAVE_SHELL_SERVICE":true,"HAVE_USR_LIB64_DIR":false,"MENUBAR_CAN_AUTOHIDE":false,"MOZILLA_OFFICIAL":true,"MOZ_ALLOW_LEGACY_EXTENSIONS":false,"MOZ_ANDROID_ACTIVITY_STREAM":false,"MOZ_ANDROID_HISTORY":false,"MOZ_ANDROID_MOZILLA_ONLINE":false,"MOZ_APP_NAME":"firefox","MOZ_APP_VERSION":"69.0","MOZ_APP_VERSION_DISPLAY":"69.0","MOZ_BING_API_CLIENTID":"no-bing-api-clientid","MOZ_BING_API_KEY":"no-bing-api-key","MOZ_BITS_DOWNLOAD":false,"MOZ_BUILD_APP":"browser","MOZ_CODE_COVERAGE":false,"MOZ_CRASHREPORTER":true,"MOZ_DATA_REPORTING":true,"MOZ_DEV_EDITION":false,"MOZ_GECKO_PROFILER":true,"MOZ_GOOGLE_LOCATION_SERVICE_API_KEY":"AIzaSyB2h2OuRcUgy5N-5hsZqiPW6sH3n_rptiQ","MOZ_GOOGLE_SAFEBROWSING_API_KEY":"AIzaSyC7jsptDS3am4tPx4r3nxis7IMjBc5Dovo","MOZ_GRAPHENE":false,"MOZ_MACBUNDLE_NAME":"Firefox.app","MOZ_MAINTENANCE_SERVICE":false,"MOZ_MOZILLA_API_KEY":"7e40f68c-7938-4c5d-9f95-e61647c213eb","MOZ_NEW_CERT_STORAGE":false,"MOZ_NEW_NOTIFICATION_STORE":false,"MOZ_NEW_XULSTORE":false,"MOZ_OFFICIAL_BRANDING":true,"MOZ_PLACES":true,"MOZ_REQUIRE_SIGNING":true,"MOZ_SANDBOX":true,"MOZ_SERVICES_HEALTHREPORT":true,"MOZ_SERVICES_SYNC":false,"MOZ_SWITCHBOARD":false,"MOZ_SYSTEM_NSS":false,"MOZ_TELEMETRY_ON_BY_DEFAULT":false,"MOZ_TELEMETRY_REPORTING":true,"MOZ_UNSIGNED_SCOPES":0,"MOZ_UPDATER":true,"MOZ_UPDATE_CHANNEL":"release","MOZ_WEBRTC":true,"MOZ_WIDGET_GTK":false,"MOZ_WIDGET_TOOLKIT":"cocoa","NIGHTLY_BUILD":false,"OMNIJAR_NAME":"omni.ja","RELEASE_OR_BETA":true,"SOURCE_REVISION_URL":"https://hg.mozilla.org/releases/mozilla-release/rev/cce4622026ab8e0130a0afc03f829f9b19ca38c2","TELEMETRY_PING_FORMAT_VERSION":4,"XP_UNIX":true,"isPlatformAndVersionAtLeast":{},"isPlatformAndVersionAtMost":{},"platform":"macosx"},"userAgent":"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:69.0) Gecko/20100101 Firefox/69.0","windowSize":"1366x768"},"pageinfo":{"documentHeight":3836,"documentSize":{"decodedBodySize":34427,"encodedBodySize":9101,"transferSize":9772},"documentTitle":"Welcome to the wonderful world of Web Performance","documentWidth":1366,"domElements":347,"navigationStartTime":1585741433948,"nextHopProtocol":"h2","resources":{"count":13,"duration":6890.724426},"responsive":true,"url":"https://www.sitespeed.io/","visualElements":{"heroes":[{"filename":"team.png","height":433,"name":"LargestImage","width":500,"x":153,"y":50},{"filename":null,"height":173,"name":"Heading","width":510,"x":703,"y":66},{"filename":"sitespeed-logo-2c.png","height":50,"name":"header-logo","width":162,"x":153,"y":0},{"filename":"team.png","height":433,"name":"logo","width":500,"x":153,"y":50}],"viewport":{"height":694,"width":1366}}},"timings":{"firstPaint":1718,"loadEventEnd":1842,"navigationTiming":{"connectStart":213,"domComplete":1842,"domContentLoadedEventEnd":1565,"domContentLoadedEventStart":1565,"domInteractive":1183,"domainLookupEnd":213,"domainLookupStart":2,"duration":1842,"fetchStart":0,"loadEventEnd":1842,"loadEventStart":1842,"redirectEnd":0,"redirectStart":0,"requestStart":933,"responseEnd":1168,"responseStart":1025,"secureConnectionStart":399,"startTime":0,"unloadEventEnd":0,"unloadEventStart":0,"workerStart":0},"pageTimings":{"backEndTime":1025,"domContentLoadedTime":1565,"domInteractiveTime":1183,"domainLookupTime":211,"frontEndTime":674,"pageDownloadTime":143,"pageLoadTime":1842,"redirectionTime":0,"serverConnectionTime":720,"serverResponseTime":235},"rumSpeedIndex":1669,"serverTimings":[],"timeToContentfulPaint":1771,"timeToDomContentFlushed":1733,"timeToFirstInteractive":1771,"userTimings":{"marks":[],"measures":[]}}}],"visualMetrics":[],"cpu":[],"extras":[{}],"fullyLoaded":[],"errors":[[]],"statistics":{"browser":{"appConstants":{"MOZ_APP_VERSION":{"median":69,"mean":69,"mdev":0,"stddev":0,"min":69,"p10":69,"p90":69,"p99":69,"max":69},"MOZ_APP_VERSION_DISPLAY":{"median":69,"mean":69,"mdev":0,"stddev":0,"min":69,"p10":69,"p90":69,"p99":69,"max":69},"MOZ_UNSIGNED_SCOPES":{"median":0,"mean":0,"mdev":0,"stddev":0,"min":0,"p10":0,"p90":0,"p99":0,"max":0},"TELEMETRY_PING_FORMAT_VERSION":{"median":4,"mean":4,"mdev":0,"stddev":0,"min":4,"p10":4,"p90":4,"p99":4,"max":4}},"asyncAppConstants":{"MOZ_APP_VERSION":{"median":69,"mean":69,"mdev":0,"stddev":0,"min":69,"p10":69,"p90":69,"p99":69,"max":69},"MOZ_APP_VERSION_DISPLAY":{"median":69,"mean":69,"mdev":0,"stddev":0,"min":69,"p10":69,"p90":69,"p99":69,"max":69},"MOZ_UNSIGNED_SCOPES":{"median":0,"mean":0,"mdev":0,"stddev":0,"min":0,"p10":0,"p90":0,"p99":0,"max":0},"TELEMETRY_PING_FORMAT_VERSION":{"median":4,"mean":4,"mdev":0,"stddev":0,"min":4,"p10":4,"p90":4,"p99":4,"max":4}}},"pageinfo":{"documentHeight":{"median":3836,"mean":3836,"mdev":0,"stddev":0,"min":3836,"p10":3836,"p90":3836,"p99":3836,"max":3836},"documentSize":{"decodedBodySize":{"median":34427,"mean":34427,"mdev":0,"stddev":0,"min":34427,"p10":34427,"p90":34427,"p99":34427,"max":34427},"encodedBodySize":{"median":9101,"mean":9101,"mdev":0,"stddev":0,"min":9101,"p10":9101,"p90":9101,"p99":9101,"max":9101},"transferSize":{"median":9772,"mean":9772,"mdev":0,"stddev":0,"min":9772,"p10":9772,"p90":9772,"p99":9772,"max":9772}},"documentWidth":{"median":1366,"mean":1366,"mdev":0,"stddev":0,"min":1366,"p10":1366,"p90":1366,"p99":1366,"max":1366},"domElements":{"median":347,"mean":347,"mdev":0,"stddev":0,"min":347,"p10":347,"p90":347,"p99":347,"max":347},"navigationStartTime":{"median":1585741433948,"mean":1585741433948,"mdev":0,"stddev":0,"min":1585741433948,"p10":1585741433948,"p90":1585741433948,"p99":1585741433948,"max":1585741433948},"resources":{"count":{"median":13,"mean":13,"mdev":0,"stddev":0,"min":13,"p10":13,"p90":13,"p99":13,"max":13},"duration":{"median":6891,"mean":6891,"mdev":0,"stddev":0,"min":6891,"p10":6891,"p90":6891,"p99":6891,"max":6891}},"visualElements":{"heroes":[{"height":{"median":433,"mean":433,"mdev":0,"stddev":0,"min":433,"p10":433,"p90":433,"p99":433,"max":433},"width":{"median":500,"mean":500,"mdev":0,"stddev":0,"min":500,"p10":500,"p90":500,"p99":500,"max":500},"x":{"median":153,"mean":153,"mdev":0,"stddev":0,"min":153,"p10":153,"p90":153,"p99":153,"max":153},"y":{"median":50,"mean":50,"mdev":0,"stddev":0,"min":50,"p10":50,"p90":50,"p99":50,"max":50}},{"height":{"median":173,"mean":173,"mdev":0,"stddev":0,"min":173,"p10":173,"p90":173,"p99":173,"max":173},"width":{"median":510,"mean":510,"mdev":0,"stddev":0,"min":510,"p10":510,"p90":510,"p99":510,"max":510},"x":{"median":703,"mean":703,"mdev":0,"stddev":0,"min":703,"p10":703,"p90":703,"p99":703,"max":703},"y":{"median":66,"mean":66,"mdev":0,"stddev":0,"min":66,"p10":66,"p90":66,"p99":66,"max":66}},{"height":{"median":50,"mean":50,"mdev":0,"stddev":0,"min":50,"p10":50,"p90":50,"p99":50,"max":50},"width":{"median":162,"mean":162,"mdev":0,"stddev":0,"min":162,"p10":162,"p90":162,"p99":162,"max":162},"x":{"median":153,"mean":153,"mdev":0,"stddev":0,"min":153,"p10":153,"p90":153,"p99":153,"max":153},"y":{"median":0,"mean":0,"mdev":0,"stddev":0,"min":0,"p10":0,"p90":0,"p99":0,"max":0}},{"height":{"median":433,"mean":433,"mdev":0,"stddev":0,"min":433,"p10":433,"p90":433,"p99":433,"max":433},"width":{"median":500,"mean":500,"mdev":0,"stddev":0,"min":500,"p10":500,"p90":500,"p99":500,"max":500},"x":{"median":153,"mean":153,"mdev":0,"stddev":0,"min":153,"p10":153,"p90":153,"p99":153,"max":153},"y":{"median":50,"mean":50,"mdev":0,"stddev":0,"min":50,"p10":50,"p90":50,"p99":50,"max":50}}],"viewport":{"height":{"median":694,"mean":694,"mdev":0,"stddev":0,"min":694,"p10":694,"p90":694,"p99":694,"max":694},"width":{"median":1366,"mean":1366,"mdev":0,"stddev":0,"min":1366,"p10":1366,"p90":1366,"p99":1366,"max":1366}}}},"timings":{"firstPaint":{"median":1718,"mean":1718,"mdev":0,"stddev":0,"min":1718,"p10":1718,"p90":1718,"p99":1718,"max":1718},"loadEventEnd":{"median":1842,"mean":1842,"mdev":0,"stddev":0,"min":1842,"p10":1842,"p90":1842,"p99":1842,"max":1842},"navigationTiming":{"connectStart":{"median":213,"mean":213,"mdev":0,"stddev":0,"min":213,"p10":213,"p90":213,"p99":213,"max":213},"domComplete":{"median":1842,"mean":1842,"mdev":0,"stddev":0,"min":1842,"p10":1842,"p90":1842,"p99":1842,"max":1842},"domContentLoadedEventEnd":{"median":1565,"mean":1565,"mdev":0,"stddev":0,"min":1565,"p10":1565,"p90":1565,"p99":1565,"max":1565},"domContentLoadedEventStart":{"median":1565,"mean":1565,"mdev":0,"stddev":0,"min":1565,"p10":1565,"p90":1565,"p99":1565,"max":1565},"domInteractive":{"median":1183,"mean":1183,"mdev":0,"stddev":0,"min":1183,"p10":1183,"p90":1183,"p99":1183,"max":1183},"domainLookupEnd":{"median":213,"mean":213,"mdev":0,"stddev":0,"min":213,"p10":213,"p90":213,"p99":213,"max":213},"domainLookupStart":{"median":2,"mean":2,"mdev":0,"stddev":0,"min":2,"p10":2,"p90":2,"p99":2,"max":2},"duration":{"median":1842,"mean":1842,"mdev":0,"stddev":0,"min":1842,"p10":1842,"p90":1842,"p99":1842,"max":1842},"fetchStart":{"median":0,"mean":0,"mdev":0,"stddev":0,"min":0,"p10":0,"p90":0,"p99":0,"max":0},"loadEventEnd":{"median":1842,"mean":1842,"mdev":0,"stddev":0,"min":1842,"p10":1842,"p90":1842,"p99":1842,"max":1842},"loadEventStart":{"median":1842,"mean":1842,"mdev":0,"stddev":0,"min":1842,"p10":1842,"p90":1842,"p99":1842,"max":1842},"redirectEnd":{"median":0,"mean":0,"mdev":0,"stddev":0,"min":0,"p10":0,"p90":0,"p99":0,"max":0},"redirectStart":{"median":0,"mean":0,"mdev":0,"stddev":0,"min":0,"p10":0,"p90":0,"p99":0,"max":0},"requestStart":{"median":933,"mean":933,"mdev":0,"stddev":0,"min":933,"p10":933,"p90":933,"p99":933,"max":933},"responseEnd":{"median":1168,"mean":1168,"mdev":0,"stddev":0,"min":1168,"p10":1168,"p90":1168,"p99":1168,"max":1168},"responseStart":{"median":1025,"mean":1025,"mdev":0,"stddev":0,"min":1025,"p10":1025,"p90":1025,"p99":1025,"max":1025},"secureConnectionStart":{"median":399,"mean":399,"mdev":0,"stddev":0,"min":399,"p10":399,"p90":399,"p99":399,"max":399},"startTime":{"median":0,"mean":0,"mdev":0,"stddev":0,"min":0,"p10":0,"p90":0,"p99":0,"max":0},"unloadEventEnd":{"median":0,"mean":0,"mdev":0,"stddev":0,"min":0,"p10":0,"p90":0,"p99":0,"max":0},"unloadEventStart":{"median":0,"mean":0,"mdev":0,"stddev":0,"min":0,"p10":0,"p90":0,"p99":0,"max":0},"workerStart":{"median":0,"mean":0,"mdev":0,"stddev":0,"min":0,"p10":0,"p90":0,"p99":0,"max":0}},"pageTimings":{"backEndTime":{"median":1025,"mean":1025,"mdev":0,"stddev":0,"min":1025,"p10":1025,"p90":1025,"p99":1025,"max":1025},"domContentLoadedTime":{"median":1565,"mean":1565,"mdev":0,"stddev":0,"min":1565,"p10":1565,"p90":1565,"p99":1565,"max":1565},"domInteractiveTime":{"median":1183,"mean":1183,"mdev":0,"stddev":0,"min":1183,"p10":1183,"p90":1183,"p99":1183,"max":1183},"domainLookupTime":{"median":211,"mean":211,"mdev":0,"stddev":0,"min":211,"p10":211,"p90":211,"p99":211,"max":211},"frontEndTime":{"median":674,"mean":674,"mdev":0,"stddev":0,"min":674,"p10":674,"p90":674,"p99":674,"max":674},"pageDownloadTime":{"median":143,"mean":143,"mdev":0,"stddev":0,"min":143,"p10":143,"p90":143,"p99":143,"max":143},"pageLoadTime":{"median":1842,"mean":1842,"mdev":0,"stddev":0,"min":1842,"p10":1842,"p90":1842,"p99":1842,"max":1842},"redirectionTime":{"median":0,"mean":0,"mdev":0,"stddev":0,"min":0,"p10":0,"p90":0,"p99":0,"max":0},"serverConnectionTime":{"median":720,"mean":720,"mdev":0,"stddev":0,"min":720,"p10":720,"p90":720,"p99":720,"max":720},"serverResponseTime":{"median":235,"mean":235,"mdev":0,"stddev":0,"min":235,"p10":235,"p90":235,"p99":235,"max":235}},"rumSpeedIndex":{"median":1669,"mean":1669,"mdev":0,"stddev":0,"min":1669,"p10":1669,"p90":1669,"p99":1669,"max":1669},"timeToContentfulPaint":{"median":1771,"mean":1771,"mdev":0,"stddev":0,"min":1771,"p10":1771,"p90":1771,"p99":1771,"max":1771},"timeToDomContentFlushed":{"median":1733,"mean":1733,"mdev":0,"stddev":0,"min":1733,"p10":1733,"p90":1733,"p99":1733,"max":1733},"timeToFirstInteractive":{"median":1771,"mean":1771,"mdev":0,"stddev":0,"min":1771,"p10":1771,"p90":1771,"p99":1771,"max":1771}}}},{"info":{"browsertime":{"version":"8.2.0"},"url":"https://www.mozilla.org/en-US/","timestamp":"2020-04-01T13:44:11+02:00","connectivity":{"engine":"external","profile":"native"},"extra":{}},"files":{"video":[],"screenshot":[],"timeline":[],"consoleLog":[],"netLog":[],"perfLog":[]},"cdp":{"performance":[]},"timestamps":["2020-04-01T13:44:02+02:00"],"browserScripts":[{"browser":{"appConstants":{"ACCESSIBILITY":true,"ANDROID_PACKAGE_NAME":"org.mozilla.firefox","ASAN":false,"ASAN_REPORTER":false,"BROWSER_CHROME_URL":"chrome://browser/content/browser.xhtml","DEBUG":false,"DEBUG_JS_MODULES":"","DLL_PREFIX":"lib","DLL_SUFFIX":".dylib","EARLY_BETA_OR_EARLIER":false,"FENNEC_NIGHTLY":false,"HAVE_SHELL_SERVICE":true,"HAVE_USR_LIB64_DIR":false,"MENUBAR_CAN_AUTOHIDE":false,"MOZILLA_OFFICIAL":true,"MOZ_ALLOW_LEGACY_EXTENSIONS":false,"MOZ_ANDROID_ACTIVITY_STREAM":false,"MOZ_ANDROID_HISTORY":false,"MOZ_ANDROID_MOZILLA_ONLINE":false,"MOZ_APP_NAME":"firefox","MOZ_APP_VERSION":"69.0","MOZ_APP_VERSION_DISPLAY":"69.0","MOZ_BING_API_CLIENTID":"no-bing-api-clientid","MOZ_BING_API_KEY":"no-bing-api-key","MOZ_BITS_DOWNLOAD":false,"MOZ_BUILD_APP":"browser","MOZ_CODE_COVERAGE":false,"MOZ_CRASHREPORTER":true,"MOZ_DATA_REPORTING":true,"MOZ_DEV_EDITION":false,"MOZ_GECKO_PROFILER":true,"MOZ_GOOGLE_LOCATION_SERVICE_API_KEY":"AIzaSyB2h2OuRcUgy5N-5hsZqiPW6sH3n_rptiQ","MOZ_GOOGLE_SAFEBROWSING_API_KEY":"AIzaSyC7jsptDS3am4tPx4r3nxis7IMjBc5Dovo","MOZ_GRAPHENE":false,"MOZ_MACBUNDLE_NAME":"Firefox.app","MOZ_MAINTENANCE_SERVICE":false,"MOZ_MOZILLA_API_KEY":"7e40f68c-7938-4c5d-9f95-e61647c213eb","MOZ_NEW_CERT_STORAGE":false,"MOZ_NEW_NOTIFICATION_STORE":false,"MOZ_NEW_XULSTORE":false,"MOZ_OFFICIAL_BRANDING":true,"MOZ_PLACES":true,"MOZ_REQUIRE_SIGNING":true,"MOZ_SANDBOX":true,"MOZ_SERVICES_HEALTHREPORT":true,"MOZ_SERVICES_SYNC":false,"MOZ_SWITCHBOARD":false,"MOZ_SYSTEM_NSS":false,"MOZ_TELEMETRY_ON_BY_DEFAULT":false,"MOZ_TELEMETRY_REPORTING":true,"MOZ_UNSIGNED_SCOPES":0,"MOZ_UPDATER":true,"MOZ_UPDATE_CHANNEL":"release","MOZ_WEBRTC":true,"MOZ_WIDGET_GTK":false,"MOZ_WIDGET_TOOLKIT":"cocoa","NIGHTLY_BUILD":false,"OMNIJAR_NAME":"omni.ja","RELEASE_OR_BETA":true,"SOURCE_REVISION_URL":"https://hg.mozilla.org/releases/mozilla-release/rev/cce4622026ab8e0130a0afc03f829f9b19ca38c2","TELEMETRY_PING_FORMAT_VERSION":4,"XP_UNIX":true,"isPlatformAndVersionAtLeast":{},"isPlatformAndVersionAtMost":{},"platform":"macosx"},"asyncAppConstants":{"ACCESSIBILITY":true,"ANDROID_PACKAGE_NAME":"org.mozilla.firefox","ASAN":false,"ASAN_REPORTER":false,"BROWSER_CHROME_URL":"chrome://browser/content/browser.xhtml","DEBUG":false,"DEBUG_JS_MODULES":"","DLL_PREFIX":"lib","DLL_SUFFIX":".dylib","EARLY_BETA_OR_EARLIER":false,"FENNEC_NIGHTLY":false,"HAVE_SHELL_SERVICE":true,"HAVE_USR_LIB64_DIR":false,"MENUBAR_CAN_AUTOHIDE":false,"MOZILLA_OFFICIAL":true,"MOZ_ALLOW_LEGACY_EXTENSIONS":false,"MOZ_ANDROID_ACTIVITY_STREAM":false,"MOZ_ANDROID_HISTORY":false,"MOZ_ANDROID_MOZILLA_ONLINE":false,"MOZ_APP_NAME":"firefox","MOZ_APP_VERSION":"69.0","MOZ_APP_VERSION_DISPLAY":"69.0","MOZ_BING_API_CLIENTID":"no-bing-api-clientid","MOZ_BING_API_KEY":"no-bing-api-key","MOZ_BITS_DOWNLOAD":false,"MOZ_BUILD_APP":"browser","MOZ_CODE_COVERAGE":false,"MOZ_CRASHREPORTER":true,"MOZ_DATA_REPORTING":true,"MOZ_DEV_EDITION":false,"MOZ_GECKO_PROFILER":true,"MOZ_GOOGLE_LOCATION_SERVICE_API_KEY":"AIzaSyB2h2OuRcUgy5N-5hsZqiPW6sH3n_rptiQ","MOZ_GOOGLE_SAFEBROWSING_API_KEY":"AIzaSyC7jsptDS3am4tPx4r3nxis7IMjBc5Dovo","MOZ_GRAPHENE":false,"MOZ_MACBUNDLE_NAME":"Firefox.app","MOZ_MAINTENANCE_SERVICE":false,"MOZ_MOZILLA_API_KEY":"7e40f68c-7938-4c5d-9f95-e61647c213eb","MOZ_NEW_CERT_STORAGE":false,"MOZ_NEW_NOTIFICATION_STORE":false,"MOZ_NEW_XULSTORE":false,"MOZ_OFFICIAL_BRANDING":true,"MOZ_PLACES":true,"MOZ_REQUIRE_SIGNING":true,"MOZ_SANDBOX":true,"MOZ_SERVICES_HEALTHREPORT":true,"MOZ_SERVICES_SYNC":false,"MOZ_SWITCHBOARD":false,"MOZ_SYSTEM_NSS":false,"MOZ_TELEMETRY_ON_BY_DEFAULT":false,"MOZ_TELEMETRY_REPORTING":true,"MOZ_UNSIGNED_SCOPES":0,"MOZ_UPDATER":true,"MOZ_UPDATE_CHANNEL":"release","MOZ_WEBRTC":true,"MOZ_WIDGET_GTK":false,"MOZ_WIDGET_TOOLKIT":"cocoa","NIGHTLY_BUILD":false,"OMNIJAR_NAME":"omni.ja","RELEASE_OR_BETA":true,"SOURCE_REVISION_URL":"https://hg.mozilla.org/releases/mozilla-release/rev/cce4622026ab8e0130a0afc03f829f9b19ca38c2","TELEMETRY_PING_FORMAT_VERSION":4,"XP_UNIX":true,"isPlatformAndVersionAtLeast":{},"isPlatformAndVersionAtMost":{},"platform":"macosx"},"userAgent":"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:69.0) Gecko/20100101 Firefox/69.0","windowSize":"1366x768"},"pageinfo":{"documentHeight":6826,"documentSize":{"decodedBodySize":181123,"encodedBodySize":181123,"transferSize":183229},"documentTitle":"Internet for people, not profit — Mozilla","documentWidth":1366,"domElements":1456,"navigationStartTime":1585741442119,"nextHopProtocol":"h2","resources":{"count":39,"duration":10094.41237},"responsive":true,"url":"https://www.mozilla.org/en-US/","visualElements":{"heroes":[{"filename":"corona.ce92523a561d.jpg","height":434,"name":"LargestImage","width":771,"x":96,"y":570},{"filename":null,"height":352,"name":"Heading","width":1,"x":-1,"y":95}],"viewport":{"height":694,"width":1366}}},"timings":{"firstPaint":966,"loadEventEnd":2082,"navigationTiming":{"connectStart":178,"domComplete":2079,"domContentLoadedEventEnd":1083,"domContentLoadedEventStart":1082,"domInteractive":1081,"domainLookupEnd":178,"domainLookupStart":3,"duration":2082,"fetchStart":1,"loadEventEnd":2082,"loadEventStart":2079,"redirectEnd":0,"redirectStart":0,"requestStart":693,"responseEnd":745,"responseStart":745,"secureConnectionStart":322,"startTime":0,"unloadEventEnd":0,"unloadEventStart":0,"workerStart":0},"pageTimings":{"backEndTime":745,"domContentLoadedTime":1082,"domInteractiveTime":1081,"domainLookupTime":174,"frontEndTime":1334,"pageDownloadTime":0,"pageLoadTime":2079,"redirectionTime":0,"serverConnectionTime":515,"serverResponseTime":52},"rumSpeedIndex":1224,"serverTimings":[],"timeToContentfulPaint":1009,"timeToDomContentFlushed":1084,"timeToFirstInteractive":1082,"userTimings":{"marks":[],"measures":[]}}}],"visualMetrics":[],"cpu":[],"extras":[{}],"fullyLoaded":[],"errors":[[]],"statistics":{"browser":{"appConstants":{"MOZ_APP_VERSION":{"median":69,"mean":69,"mdev":0,"stddev":0,"min":69,"p10":69,"p90":69,"p99":69,"max":69},"MOZ_APP_VERSION_DISPLAY":{"median":69,"mean":69,"mdev":0,"stddev":0,"min":69,"p10":69,"p90":69,"p99":69,"max":69},"MOZ_UNSIGNED_SCOPES":{"median":0,"mean":0,"mdev":0,"stddev":0,"min":0,"p10":0,"p90":0,"p99":0,"max":0},"TELEMETRY_PING_FORMAT_VERSION":{"median":4,"mean":4,"mdev":0,"stddev":0,"min":4,"p10":4,"p90":4,"p99":4,"max":4}},"asyncAppConstants":{"MOZ_APP_VERSION":{"median":69,"mean":69,"mdev":0,"stddev":0,"min":69,"p10":69,"p90":69,"p99":69,"max":69},"MOZ_APP_VERSION_DISPLAY":{"median":69,"mean":69,"mdev":0,"stddev":0,"min":69,"p10":69,"p90":69,"p99":69,"max":69},"MOZ_UNSIGNED_SCOPES":{"median":0,"mean":0,"mdev":0,"stddev":0,"min":0,"p10":0,"p90":0,"p99":0,"max":0},"TELEMETRY_PING_FORMAT_VERSION":{"median":4,"mean":4,"mdev":0,"stddev":0,"min":4,"p10":4,"p90":4,"p99":4,"max":4}}},"pageinfo":{"documentHeight":{"median":6826,"mean":6826,"mdev":0,"stddev":0,"min":6826,"p10":6826,"p90":6826,"p99":6826,"max":6826},"documentSize":{"decodedBodySize":{"median":181123,"mean":181123,"mdev":0,"stddev":0,"min":181123,"p10":181123,"p90":181123,"p99":181123,"max":181123},"encodedBodySize":{"median":181123,"mean":181123,"mdev":0,"stddev":0,"min":181123,"p10":181123,"p90":181123,"p99":181123,"max":181123},"transferSize":{"median":183229,"mean":183229,"mdev":0,"stddev":0,"min":183229,"p10":183229,"p90":183229,"p99":183229,"max":183229}},"documentWidth":{"median":1366,"mean":1366,"mdev":0,"stddev":0,"min":1366,"p10":1366,"p90":1366,"p99":1366,"max":1366},"domElements":{"median":1456,"mean":1456,"mdev":0,"stddev":0,"min":1456,"p10":1456,"p90":1456,"p99":1456,"max":1456},"navigationStartTime":{"median":1585741442119,"mean":1585741442119,"mdev":0,"stddev":0,"min":1585741442119,"p10":1585741442119,"p90":1585741442119,"p99":1585741442119,"max":1585741442119},"resources":{"count":{"median":39,"mean":39,"mdev":0,"stddev":0,"min":39,"p10":39,"p90":39,"p99":39,"max":39},"duration":{"median":10094,"mean":10094,"mdev":0,"stddev":0,"min":10094,"p10":10094,"p90":10094,"p99":10094,"max":10094}},"visualElements":{"heroes":[{"height":{"median":434,"mean":434,"mdev":0,"stddev":0,"min":434,"p10":434,"p90":434,"p99":434,"max":434},"width":{"median":771,"mean":771,"mdev":0,"stddev":0,"min":771,"p10":771,"p90":771,"p99":771,"max":771},"x":{"median":96,"mean":96,"mdev":0,"stddev":0,"min":96,"p10":96,"p90":96,"p99":96,"max":96},"y":{"median":570,"mean":570,"mdev":0,"stddev":0,"min":570,"p10":570,"p90":570,"p99":570,"max":570}},{"height":{"median":352,"mean":352,"mdev":0,"stddev":0,"min":352,"p10":352,"p90":352,"p99":352,"max":352},"width":{"median":1,"mean":1,"mdev":0,"stddev":0,"min":1,"p10":1,"p90":1,"p99":1,"max":1},"x":{"median":-1,"mean":-1,"mdev":0,"stddev":0,"min":-1,"p10":-1,"p90":-1,"p99":-1,"max":-1},"y":{"median":95,"mean":95,"mdev":0,"stddev":0,"min":95,"p10":95,"p90":95,"p99":95,"max":95}}],"viewport":{"height":{"median":694,"mean":694,"mdev":0,"stddev":0,"min":694,"p10":694,"p90":694,"p99":694,"max":694},"width":{"median":1366,"mean":1366,"mdev":0,"stddev":0,"min":1366,"p10":1366,"p90":1366,"p99":1366,"max":1366}}}},"timings":{"firstPaint":{"median":966,"mean":966,"mdev":0,"stddev":0,"min":966,"p10":966,"p90":966,"p99":966,"max":966},"loadEventEnd":{"median":2082,"mean":2082,"mdev":0,"stddev":0,"min":2082,"p10":2082,"p90":2082,"p99":2082,"max":2082},"navigationTiming":{"connectStart":{"median":178,"mean":178,"mdev":0,"stddev":0,"min":178,"p10":178,"p90":178,"p99":178,"max":178},"domComplete":{"median":2079,"mean":2079,"mdev":0,"stddev":0,"min":2079,"p10":2079,"p90":2079,"p99":2079,"max":2079},"domContentLoadedEventEnd":{"median":1083,"mean":1083,"mdev":0,"stddev":0,"min":1083,"p10":1083,"p90":1083,"p99":1083,"max":1083},"domContentLoadedEventStart":{"median":1082,"mean":1082,"mdev":0,"stddev":0,"min":1082,"p10":1082,"p90":1082,"p99":1082,"max":1082},"domInteractive":{"median":1081,"mean":1081,"mdev":0,"stddev":0,"min":1081,"p10":1081,"p90":1081,"p99":1081,"max":1081},"domainLookupEnd":{"median":178,"mean":178,"mdev":0,"stddev":0,"min":178,"p10":178,"p90":178,"p99":178,"max":178},"domainLookupStart":{"median":3,"mean":3,"mdev":0,"stddev":0,"min":3,"p10":3,"p90":3,"p99":3,"max":3},"duration":{"median":2082,"mean":2082,"mdev":0,"stddev":0,"min":2082,"p10":2082,"p90":2082,"p99":2082,"max":2082},"fetchStart":{"median":1,"mean":1,"mdev":0,"stddev":0,"min":1,"p10":1,"p90":1,"p99":1,"max":1},"loadEventEnd":{"median":2082,"mean":2082,"mdev":0,"stddev":0,"min":2082,"p10":2082,"p90":2082,"p99":2082,"max":2082},"loadEventStart":{"median":2079,"mean":2079,"mdev":0,"stddev":0,"min":2079,"p10":2079,"p90":2079,"p99":2079,"max":2079},"redirectEnd":{"median":0,"mean":0,"mdev":0,"stddev":0,"min":0,"p10":0,"p90":0,"p99":0,"max":0},"redirectStart":{"median":0,"mean":0,"mdev":0,"stddev":0,"min":0,"p10":0,"p90":0,"p99":0,"max":0},"requestStart":{"median":693,"mean":693,"mdev":0,"stddev":0,"min":693,"p10":693,"p90":693,"p99":693,"max":693},"responseEnd":{"median":745,"mean":745,"mdev":0,"stddev":0,"min":745,"p10":745,"p90":745,"p99":745,"max":745},"responseStart":{"median":745,"mean":745,"mdev":0,"stddev":0,"min":745,"p10":745,"p90":745,"p99":745,"max":745},"secureConnectionStart":{"median":322,"mean":322,"mdev":0,"stddev":0,"min":322,"p10":322,"p90":322,"p99":322,"max":322},"startTime":{"median":0,"mean":0,"mdev":0,"stddev":0,"min":0,"p10":0,"p90":0,"p99":0,"max":0},"unloadEventEnd":{"median":0,"mean":0,"mdev":0,"stddev":0,"min":0,"p10":0,"p90":0,"p99":0,"max":0},"unloadEventStart":{"median":0,"mean":0,"mdev":0,"stddev":0,"min":0,"p10":0,"p90":0,"p99":0,"max":0},"workerStart":{"median":0,"mean":0,"mdev":0,"stddev":0,"min":0,"p10":0,"p90":0,"p99":0,"max":0}},"pageTimings":{"backEndTime":{"median":745,"mean":745,"mdev":0,"stddev":0,"min":745,"p10":745,"p90":745,"p99":745,"max":745},"domContentLoadedTime":{"median":1082,"mean":1082,"mdev":0,"stddev":0,"min":1082,"p10":1082,"p90":1082,"p99":1082,"max":1082},"domInteractiveTime":{"median":1081,"mean":1081,"mdev":0,"stddev":0,"min":1081,"p10":1081,"p90":1081,"p99":1081,"max":1081},"domainLookupTime":{"median":174,"mean":174,"mdev":0,"stddev":0,"min":174,"p10":174,"p90":174,"p99":174,"max":174},"frontEndTime":{"median":1334,"mean":1334,"mdev":0,"stddev":0,"min":1334,"p10":1334,"p90":1334,"p99":1334,"max":1334},"pageDownloadTime":{"median":0,"mean":0,"mdev":0,"stddev":0,"min":0,"p10":0,"p90":0,"p99":0,"max":0},"pageLoadTime":{"median":2079,"mean":2079,"mdev":0,"stddev":0,"min":2079,"p10":2079,"p90":2079,"p99":2079,"max":2079},"redirectionTime":{"median":0,"mean":0,"mdev":0,"stddev":0,"min":0,"p10":0,"p90":0,"p99":0,"max":0},"serverConnectionTime":{"median":515,"mean":515,"mdev":0,"stddev":0,"min":515,"p10":515,"p90":515,"p99":515,"max":515},"serverResponseTime":{"median":52,"mean":52,"mdev":0,"stddev":0,"min":52,"p10":52,"p90":52,"p99":52,"max":52}},"rumSpeedIndex":{"median":1224,"mean":1224,"mdev":0,"stddev":0,"min":1224,"p10":1224,"p90":1224,"p99":1224,"max":1224},"timeToContentfulPaint":{"median":1009,"mean":1009,"mdev":0,"stddev":0,"min":1009,"p10":1009,"p90":1009,"p99":1009,"max":1009},"timeToDomContentFlushed":{"median":1084,"mean":1084,"mdev":0,"stddev":0,"min":1084,"p10":1084,"p90":1084,"p99":1084,"max":1084},"timeToFirstInteractive":{"median":1082,"mean":1082,"mdev":0,"stddev":0,"min":1082,"p10":1082,"p90":1082,"p99":1082,"max":1082}}}}] \ No newline at end of file
diff --git a/python/mozperftest/mozperftest/tests/data/failing-samples/perftest_doc_failure_example.js b/python/mozperftest/mozperftest/tests/data/failing-samples/perftest_doc_failure_example.js
new file mode 100644
index 0000000000..6cc40d87db
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/data/failing-samples/perftest_doc_failure_example.js
@@ -0,0 +1,40 @@
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+/* eslint-env node */
+"use strict";
+
+var someVar;
+
+async function setUp(context) {
+ context.log.info("setUp example!");
+}
+
+async function test(context, commands) {
+ context.log.info("Test with setUp/tearDown example!");
+ await commands.measure.start("https://www.sitespeed.io/");
+ await commands.measure.start("https://www.mozilla.org/en-US/");
+}
+
+async function tearDown(context) {
+ context.log.info("tearDown example!");
+}
+
+
+module.exports = {
+ setUp,
+ tearDown,
+ test,
+ owner: "Performance Testing Team",
+ badName: "Example",
+ description: "The description of the example test.",
+ longDescription: `
+ This is a longer description of the test perhaps including information
+ about how it should be run locally or links to relevant information.
+ `,
+ usage: `
+ ./mach perftest python/mozperftest/mozperftest/tests/data/samples/perftest_example.js
+ `,
+ supportedBrowsers: ["Fenix nightly", "Geckoview_example", "Fennec", "Firefox"],
+ supportedPlatforms: ["Android", "Desktop"],
+};
diff --git a/python/mozperftest/mozperftest/tests/data/firefox.dmg b/python/mozperftest/mozperftest/tests/data/firefox.dmg
new file mode 100644
index 0000000000..f151b96871
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/data/firefox.dmg
Binary files differ
diff --git a/python/mozperftest/mozperftest/tests/data/home_activity.txt b/python/mozperftest/mozperftest/tests/data/home_activity.txt
new file mode 100644
index 0000000000..dae9cf996f
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/data/home_activity.txt
@@ -0,0 +1,2806 @@
+--------- beginning of main
+05-26 11:45:04.454 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@84a3577)
+05-26 11:45:04.456 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@84a3577)
+05-26 11:45:05.454 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@4ed35e4)
+05-26 11:45:05.455 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@4ed35e4)
+05-26 11:45:06.456 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@1c39a4d)
+05-26 11:45:06.457 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@1c39a4d)
+05-26 11:45:07.462 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@dda5902)
+05-26 11:45:07.463 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@dda5902)
+05-26 11:45:08.468 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@6a2b213)
+05-26 11:45:08.469 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@6a2b213)
+05-26 11:45:08.474 2482 7043 W ctxmgr : [AclManager]No 2 for (accnt=account#-517948760#, com.google.android.gms(10008):IndoorOutdoorProducer, vrsn=13280000, 0, 3pPkg = null , 3pMdlId = null , pid = 2482). Was: 3 for 57, account#-517948760#
+05-26 11:45:09.469 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@339cc50)
+05-26 11:45:09.470 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@339cc50)
+05-26 11:45:10.472 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@1bc8e49)
+05-26 11:45:10.473 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@1bc8e49)
+05-26 11:45:11.478 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@619034e)
+05-26 11:45:11.479 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@619034e)
+05-26 11:45:12.480 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@3fc5c6f)
+05-26 11:45:12.481 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@3fc5c6f)
+05-26 11:45:13.483 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@236c605)
+05-26 11:45:13.485 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@236c605)
+05-26 11:45:13.490 2482 7043 W ctxmgr : [AclManager]No 2 for (accnt=account#-517948760#, com.google.android.gms(10008):IndoorOutdoorProducer, vrsn=13280000, 0, 3pPkg = null , 3pMdlId = null , pid = 2482). Was: 3 for 57, account#-517948760#
+05-26 11:45:14.486 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@262bd81)
+05-26 11:45:14.488 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@262bd81)
+05-26 11:45:15.489 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@c8e2e26)
+05-26 11:45:15.491 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@c8e2e26)
+--------- beginning of system
+05-26 11:45:13.176 1876 1895 E BatteryExternalStatsWorker: no controller energy info supplied for wifi
+05-26 11:45:15.554 1746 1771 E storaged: getDiskStats failed with result NOT_SUPPORTED and size 0
+05-26 11:45:16.489 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@1fd6a67)
+05-26 11:45:16.491 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@1fd6a67)
+05-26 11:45:17.489 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@1fbe014)
+05-26 11:45:17.491 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@1fbe014)
+05-26 11:45:18.489 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@9fcb0bd)
+05-26 11:45:18.491 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@9fcb0bd)
+05-26 11:45:18.499 2482 7043 W ctxmgr : [AclManager]No 2 for (accnt=account#-517948760#, com.google.android.gms(10008):IndoorOutdoorProducer, vrsn=13280000, 0, 3pPkg = null , 3pMdlId = null , pid = 2482). Was: 3 for 57, account#-517948760#
+05-26 11:45:19.491 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@d9f06b2)
+05-26 11:45:19.492 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@d9f06b2)
+05-26 11:45:20.491 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@6c4603)
+05-26 11:45:20.492 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@6c4603)
+05-26 11:45:21.491 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@4c0e980)
+05-26 11:45:21.492 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@4c0e980)
+05-26 11:45:22.496 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@6489bb9)
+05-26 11:45:22.497 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@6489bb9)
+05-26 11:45:22.666 1666 1666 I lla.fenix.debu: Not late-enabling -Xcheck:jni (already on)
+05-26 11:45:22.668 1876 1893 I ActivityManager: Start proc 1666:org.mozilla.fenix.debug/u0a91 for service org.mozilla.fenix.debug/androidx.work.impl.background.systemjob.SystemJobService
+05-26 11:45:22.693 1666 1666 W lla.fenix.debu: Unexpected CPU variant for X86 using defaults: x86
+05-26 11:45:22.742 1666 1666 I lla.fenix.debu: The ClassLoaderContext is a special shared library.
+05-26 11:45:23.348 1666 1666 D FirebaseApp: Default FirebaseApp failed to initialize because no default options were found. This usually means that com.google.gms:google-services was not applied to your gradle project.
+05-26 11:45:23.348 1666 1666 I FirebaseInitProvider: FirebaseApp initialization unsuccessful
+05-26 11:45:23.498 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@1d508f1)
+05-26 11:45:23.499 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@1d508f1)
+05-26 11:45:23.510 2482 7043 W ctxmgr : [AclManager]No 2 for (accnt=account#-517948760#, com.google.android.gms(10008):IndoorOutdoorProducer, vrsn=13280000, 0, 3pPkg = null , 3pMdlId = null , pid = 2482). Was: 3 for 57, account#-517948760#
+05-26 11:45:23.566 1666 1666 D FenixApplication: Initializing Glean (uploadEnabled=true, isFennec=false)
+05-26 11:45:23.622 1666 1769 D RustNativeSupport: findMegazordLibraryName(viaduct, 0.59.0
+05-26 11:45:23.622 1666 1769 D RustNativeSupport: lib in use: none
+05-26 11:45:23.622 1666 1769 D RustNativeSupport: lib configured: megazord
+05-26 11:45:23.622 1666 1769 D RustNativeSupport: lib version configured: 0.59.0
+05-26 11:45:23.622 1666 1769 D RustNativeSupport: settled on megazord
+05-26 11:45:23.730 1666 1766 D libglean_ffi: glean_ffi: Android logging should be hooked up!
+05-26 11:45:23.738 1666 1766 I glean/Glean: Registering pings for mozilla.telemetry.glean.GleanMetrics.Pings
+05-26 11:45:23.743 1666 1766 I libglean_ffi: glean_core: Creating new Glean
+05-26 11:45:23.743 1666 1766 D libglean_ffi: glean_core::database: Database path: "/data/user/0/org.mozilla.fenix.debug/glean_data/db"
+05-26 11:45:23.743 1666 1766 I libglean_ffi: glean_core::database: Database initialized
+05-26 11:45:23.753 1666 1766 I libglean_ffi: glean_ffi: Glean initialized
+05-26 11:45:23.758 1666 1769 D RustNativeSupport: findMegazordLibraryName(rustlog, 0.59.0
+05-26 11:45:23.758 1666 1769 D RustNativeSupport: lib in use: none
+05-26 11:45:23.758 1666 1769 D RustNativeSupport: lib configured: megazord
+05-26 11:45:23.758 1666 1769 D RustNativeSupport: lib version configured: 0.59.0
+05-26 11:45:23.758 1666 1769 D RustNativeSupport: settled on megazord
+05-26 11:45:23.760 1666 1769 I rc_log_ffi::ios: rc_log adapter initialized!
+05-26 11:45:23.777 1666 1766 D glean/MetricsPingSched: The 'metrics' ping was last sent on Tue May 26 00:00:00 EDT 2020
+05-26 11:45:23.777 1666 1766 I glean/MetricsPingSched: The 'metrics' ping was already sent today, Tue May 26 11:45:23 EDT 2020.
+05-26 11:45:23.778 1666 1766 D glean/MetricsPingSched: Scheduling the 'metrics' ping in 58476240ms
+05-26 11:45:23.779 1666 1766 D libglean_ffi: glean_core: Clearing Lifetime::Application metrics
+05-26 11:45:23.788 11979 12094 I SessionStorage/AutoSave: Save: Periodic
+05-26 11:45:23.791 11979 12084 D SessionStorage/AutoSave: Saved state to disk [1ms]
+05-26 11:45:23.829 1666 1666 I GeckoRuntime: Adding debug configuration from: /data/local/tmp/org.mozilla.fenix.debug-geckoview-config.yaml
+05-26 11:45:23.829 1666 1666 D GeckoDebugConfig: Adding environment variables from debug config: {MOZ_CRASHREPORTER=1, MOZ_CRASHREPORTER_NO_REPORT=1, MOZ_CRASHREPORTER_SHUTDOWN=1}
+05-26 11:45:23.830 1666 1666 D GeckoDebugConfig: Adding arguments from debug config: [-marionette, -profile, /mnt/sdcard/org.mozilla.fenix.debug-geckodriver-profile]
+05-26 11:45:23.832 1666 1666 D GeckoThread: State changed to LAUNCHED
+05-26 11:45:23.832 1666 1791 I GeckoThread: preparing to run Gecko
+05-26 11:45:23.833 1666 1791 D GeckoThread: env var: MOZ_CRASHREPORTER=1
+05-26 11:45:23.833 1666 1791 D GeckoThread: env var: MOZ_CRASHREPORTER_NO_REPORT=1
+05-26 11:45:23.833 1666 1791 D GeckoThread: env var: MOZ_CRASHREPORTER_SHUTDOWN=1
+05-26 11:45:23.861 1666 1666 D GeckoRuntime: Lifecycle: onCreate
+05-26 11:45:23.878 1666 1791 D GeckoThread: State changed to MOZGLUE_READY
+05-26 11:45:23.889 1666 1791 W Settings: Setting animator_duration_scale has moved from android.provider.Settings.System to android.provider.Settings.Global, returning read-only global URI.
+05-26 11:45:23.892 1666 1791 E GeckoLibLoad: Load sqlite start
+05-26 11:45:23.906 1666 1791 E GeckoLibLoad: Load sqlite done
+05-26 11:45:23.906 1666 1791 E GeckoLibLoad: Load nss start
+05-26 11:45:23.906 1666 1791 E GeckoLibLoad: Load nss done
+05-26 11:45:23.963 1666 1791 E GeckoLibLoad: Loaded libs in 56.743000ms total, 10ms(80ms) user, 40ms(40ms) system, 0(0) faults
+05-26 11:45:23.963 1666 1791 D GeckoThread: State changed to LIBS_READY
+05-26 11:45:23.971 1666 1791 W GeckoThread: zerdatime 190844167 - runGecko
+05-26 11:45:23.977 1666 1791 D GeckoProfile: Loading profile at: null name: default
+05-26 11:45:23.978 1666 1791 D GeckoProfile: Found profile dir: /data/user/0/org.mozilla.fenix.debug/files/mozilla/u71tud99.default
+05-26 11:45:24.000 1666 1791 I Gecko:DumpUtils: Fifo watcher disabled via pref.
+05-26 11:45:24.025 1666 1791 D GeckoSysInfo: System memory: 1494MB.
+05-26 11:45:24.025 1666 1791 W lla.fenix.debu: Accessing hidden method Landroid/os/MessageQueue;->next()Landroid/os/Message; (light greylist, JNI)
+05-26 11:45:24.026 1666 1791 D StrictMode: StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/os/MessageQueue;->next()Landroid/os/Message;
+05-26 11:45:24.026 1666 1791 D StrictMode: at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+05-26 11:45:24.026 1666 1791 D StrictMode: at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+05-26 11:45:24.026 1666 1791 D StrictMode: at org.mozilla.gecko.mozglue.GeckoLoader.nativeRun(Native Method)
+05-26 11:45:24.026 1666 1791 D StrictMode: at org.mozilla.gecko.GeckoThread.run(GeckoThread.java:449)
+05-26 11:45:24.026 1666 1791 W lla.fenix.debu: Accessing hidden field Landroid/os/MessageQueue;->mMessages:Landroid/os/Message; (light greylist, JNI)
+05-26 11:45:24.026 1666 1791 D StrictMode: StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/os/MessageQueue;->mMessages:Landroid/os/Message;
+05-26 11:45:24.026 1666 1791 D StrictMode: at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+05-26 11:45:24.026 1666 1791 D StrictMode: at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+05-26 11:45:24.026 1666 1791 D StrictMode: at org.mozilla.gecko.mozglue.GeckoLoader.nativeRun(Native Method)
+05-26 11:45:24.026 1666 1791 D StrictMode: at org.mozilla.gecko.GeckoThread.run(GeckoThread.java:449)
+05-26 11:45:24.029 1666 1791 W lla.fenix.debu: Accessing hidden field Ljava/lang/Boolean;->value:Z (light greylist, JNI)
+05-26 11:45:24.030 1666 1791 D StrictMode: StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Ljava/lang/Boolean;->value:Z
+05-26 11:45:24.030 1666 1791 D StrictMode: at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+05-26 11:45:24.030 1666 1791 D StrictMode: at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+05-26 11:45:24.030 1666 1791 D StrictMode: at org.mozilla.gecko.mozglue.GeckoLoader.nativeRun(Native Method)
+05-26 11:45:24.030 1666 1791 D StrictMode: at org.mozilla.gecko.GeckoThread.run(GeckoThread.java:449)
+05-26 11:45:24.030 1666 1791 W lla.fenix.debu: Accessing hidden field Ljava/lang/Integer;->value:I (light greylist, JNI)
+05-26 11:45:24.031 1666 1791 D StrictMode: StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Ljava/lang/Integer;->value:I
+05-26 11:45:24.031 1666 1791 D StrictMode: at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+05-26 11:45:24.031 1666 1791 D StrictMode: at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+05-26 11:45:24.031 1666 1791 D StrictMode: at org.mozilla.gecko.mozglue.GeckoLoader.nativeRun(Native Method)
+05-26 11:45:24.031 1666 1791 D StrictMode: at org.mozilla.gecko.GeckoThread.run(GeckoThread.java:449)
+05-26 11:45:24.032 1666 1791 W lla.fenix.debu: Accessing hidden field Ljava/lang/Double;->value:D (light greylist, JNI)
+05-26 11:45:24.039 1666 1791 D StrictMode: StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Ljava/lang/Double;->value:D
+05-26 11:45:24.039 1666 1791 D StrictMode: at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+05-26 11:45:24.039 1666 1791 D StrictMode: at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+05-26 11:45:24.039 1666 1791 D StrictMode: at org.mozilla.gecko.mozglue.GeckoLoader.nativeRun(Native Method)
+05-26 11:45:24.039 1666 1791 D StrictMode: at org.mozilla.gecko.GeckoThread.run(GeckoThread.java:449)
+05-26 11:45:24.040 1666 1791 D GeckoThread: State changed to JNI_READY
+05-26 11:45:24.074 1666 1821 D ServiceAllocator: org.mozilla.gecko.process.GeckoChildProcessServices$tab0 updateBindings: BACKGROUND priority, 0 importance, 2 successful binds, 0 failed binds, 0 successful unbinds, 0 failed unbinds
+05-26 11:45:24.085 1831 1831 I enix.debug:tab: Not late-enabling -Xcheck:jni (already on)
+05-26 11:45:24.096 1876 1893 I ActivityManager: Start proc 1831:org.mozilla.fenix.debug:tab0/u0a91 for service org.mozilla.fenix.debug/org.mozilla.gecko.process.GeckoChildProcessServices$tab0
+05-26 11:45:24.106 1666 1666 D LeakCanary: Updated AppWatcher.config: Config(no changes)
+05-26 11:45:24.119 1831 1831 W enix.debug:tab: Unexpected CPU variant for X86 using defaults: x86
+05-26 11:45:24.175 1831 1831 I enix.debug:tab: The ClassLoaderContext is a special shared library.
+05-26 11:45:24.202 1666 1666 D LeakCanary: Updated LeakCanary.config: Config(no changes)
+05-26 11:45:24.213 1666 1666 D App : DebugMetricController: start
+05-26 11:45:24.213 1666 1666 D App : DebugMetricController: start
+05-26 11:45:24.216 1666 1666 W PushConfig: No firebase configuration found; cannot support push service.
+05-26 11:45:24.226 1666 1791 D GeckoViewStartup: observe: app-startup
+05-26 11:45:24.240 1666 1791 D GeckoViewConsole: enabled = true
+05-26 11:45:24.284 1666 1666 D StrictMode: StrictMode policy violation; ~duration=489 ms: android.os.strictmode.DiskReadViolation
+05-26 11:45:24.284 1666 1666 D StrictMode: at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+05-26 11:45:24.284 1666 1666 D StrictMode: at java.io.FileInputStream.<init>(FileInputStream.java:163)
+05-26 11:45:24.284 1666 1666 D StrictMode: at org.mozilla.gecko.util.DebugConfig.fromFile(DebugConfig.java:49)
+05-26 11:45:24.284 1666 1666 D StrictMode: at org.mozilla.geckoview.GeckoRuntime.init(GeckoRuntime.java:363)
+05-26 11:45:24.284 1666 1666 D StrictMode: at org.mozilla.geckoview.GeckoRuntime.create(GeckoRuntime.java:574)
+05-26 11:45:24.284 1666 1666 D StrictMode: at GeckoProvider.createRuntime(GeckoProvider.kt:58)
+05-26 11:45:24.284 1666 1666 D StrictMode: at GeckoProvider.getOrCreateRuntime(GeckoProvider.kt:28)
+05-26 11:45:24.284 1666 1666 D StrictMode: at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:79)
+05-26 11:45:24.284 1666 1666 D StrictMode: at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:57)
+05-26 11:45:24.284 1666 1666 D StrictMode: at kotlin.SynchronizedLazyImpl.getValue(LazyJVM.kt:74)
+05-26 11:45:24.284 1666 1666 D StrictMode: at org.mozilla.fenix.components.Core.getEngine(Unknown Source:7)
+05-26 11:45:24.284 1666 1666 D StrictMode: at org.mozilla.fenix.FenixApplication.setupInMainProcessOnly(FenixApplication.kt:128)
+05-26 11:45:24.284 1666 1666 D StrictMode: at org.mozilla.fenix.FenixApplication.onCreate(FenixApplication.kt:90)
+05-26 11:45:24.284 1666 1666 D StrictMode: at android.app.Instrumentation.callApplicationOnCreate(Instrumentation.java:1154)
+05-26 11:45:24.284 1666 1666 D StrictMode: at android.app.ActivityThread.handleBindApplication(ActivityThread.java:5871)
+05-26 11:45:24.284 1666 1666 D StrictMode: at android.app.ActivityThread.access$1100(ActivityThread.java:199)
+05-26 11:45:24.284 1666 1666 D StrictMode: at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1650)
+05-26 11:45:24.284 1666 1666 D StrictMode: at android.os.Handler.dispatchMessage(Handler.java:106)
+05-26 11:45:24.284 1666 1666 D StrictMode: at android.os.Looper.loop(Looper.java:193)
+05-26 11:45:24.284 1666 1666 D StrictMode: at android.app.ActivityThread.main(ActivityThread.java:6669)
+05-26 11:45:24.284 1666 1666 D StrictMode: at java.lang.reflect.Method.invoke(Native Method)
+05-26 11:45:24.284 1666 1666 D StrictMode: at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+05-26 11:45:24.284 1666 1666 D StrictMode: at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+05-26 11:45:24.296 1666 1666 D StrictMode: StrictMode policy violation; ~duration=473 ms: android.os.strictmode.DiskReadViolation
+05-26 11:45:24.296 1666 1666 D StrictMode: at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+05-26 11:45:24.296 1666 1666 D StrictMode: at libcore.io.BlockGuardOs.read(BlockGuardOs.java:253)
+05-26 11:45:24.296 1666 1666 D StrictMode: at libcore.io.IoBridge.read(IoBridge.java:501)
+05-26 11:45:24.296 1666 1666 D StrictMode: at java.io.FileInputStream.read(FileInputStream.java:307)
+05-26 11:45:24.296 1666 1666 D StrictMode: at java.io.FilterInputStream.read(FilterInputStream.java:133)
+05-26 11:45:24.296 1666 1666 D StrictMode: at java.io.PushbackInputStream.read(PushbackInputStream.java:186)
+05-26 11:45:24.296 1666 1666 D StrictMode: at org.yaml.snakeyaml.reader.UnicodeReader.init(UnicodeReader.java:92)
+05-26 11:45:24.296 1666 1666 D StrictMode: at org.yaml.snakeyaml.reader.UnicodeReader.read(UnicodeReader.java:124)
+05-26 11:45:24.296 1666 1666 D StrictMode: at org.yaml.snakeyaml.reader.StreamReader.update(StreamReader.java:183)
+05-26 11:45:24.296 1666 1666 D StrictMode: at org.yaml.snakeyaml.reader.StreamReader.ensureEnoughData(StreamReader.java:176)
+05-26 11:45:24.296 1666 1666 D StrictMode: at org.yaml.snakeyaml.reader.StreamReader.ensureEnoughData(StreamReader.java:171)
+05-26 11:45:24.296 1666 1666 D StrictMode: at org.yaml.snakeyaml.reader.StreamReader.peek(StreamReader.java:126)
+05-26 11:45:24.296 1666 1666 D StrictMode: at org.yaml.snakeyaml.scanner.ScannerImpl.scanToNextToken(ScannerImpl.java:1177)
+05-26 11:45:24.296 1666 1666 D StrictMode: at org.yaml.snakeyaml.scanner.ScannerImpl.fetchMoreTokens(ScannerImpl.java:287)
+05-26 11:45:24.296 1666 1666 D StrictMode: at org.yaml.snakeyaml.scanner.ScannerImpl.checkToken(ScannerImpl.java:227)
+05-26 11:45:24.296 1666 1666 D StrictMode: at org.yaml.snakeyaml.parser.ParserImpl$ParseImplicitDocumentStart.produce(ParserImpl.java:195)
+05-26 11:45:24.296 1666 1666 D StrictMode: at org.yaml.snakeyaml.parser.ParserImpl.peekEvent(ParserImpl.java:158)
+05-26 11:45:24.296 1666 1666 D StrictMode: at org.yaml.snakeyaml.parser.ParserImpl.checkEvent(ParserImpl.java:148)
+05-26 11:45:24.296 1666 1666 D StrictMode: at org.yaml.snakeyaml.composer.Composer.getSingleNode(Composer.java:107)
+05-26 11:45:24.296 1666 1666 D StrictMode: at org.yaml.snakeyaml.constructor.BaseConstructor.getSingleData(BaseConstructor.java:141)
+05-26 11:45:24.296 1666 1666 D StrictMode: at org.yaml.snakeyaml.Yaml.loadFromReader(Yaml.java:525)
+05-26 11:45:24.296 1666 1666 D StrictMode: at org.yaml.snakeyaml.Yaml.load(Yaml.java:453)
+05-26 11:45:24.296 1666 1666 D StrictMode: at org.mozilla.gecko.util.DebugConfig.fromFile(DebugConfig.java:51)
+05-26 11:45:24.296 1666 1666 D StrictMode: at org.mozilla.geckoview.GeckoRuntime.init(GeckoRuntime.java:363)
+05-26 11:45:24.296 1666 1666 D StrictMode: at org.mozilla.geckoview.GeckoRuntime.create(GeckoRuntime.java:574)
+05-26 11:45:24.296 1666 1666 D StrictMode: at GeckoProvider.createRuntime(GeckoProvider.kt:58)
+05-26 11:45:24.296 1666 1666 D StrictMode: at GeckoProvider.getOrCreateRuntime(GeckoProvider.kt:28)
+05-26 11:45:24.296 1666 1666 D StrictMode: at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:79)
+05-26 11:45:24.296 1666 1666 D StrictMode: at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:57)
+05-26 11:45:24.296 1666 1666 D StrictMode: at kotlin.SynchronizedLazyImpl.getValue(LazyJVM.kt:74)
+05-26 11:45:24.296 1666 1666 D StrictMode: at org.mozilla.fenix.components.Core.getEngine(Unknown Source:7)
+05-26 11:45:24.296 1666 1666 D StrictMode: at org.mozilla.fenix.FenixApplication.setupInMainProcessOnly(FenixApplication.kt:128)
+05-26 11:45:24.296 1666 1666 D StrictMode: at org.mozilla.fenix.FenixApplication.onCreate(FenixApplication.kt:90)
+05-26 11:45:24.296 1666 1666 D StrictMode: at android.app.Instrumentation.callApplicationOnCreate(Instrumentation.java:1154)
+05-26 11:45:24.296 1666 1666 D StrictMode: at android.app.ActivityThread.handleBindApplication(ActivityThread.java:5871)
+05-26 11:45:24.296 1666 1666 D StrictMode: at android.app.ActivityThread.access$1100(ActivityThread.java:199)
+05-26 11:45:24.296 1666 1666 D StrictMode: at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1650)
+05-26 11:45:24.296 1666 1666 D StrictMode: at android.os.Handler.dispatchMessage(Handler.java:106)
+05-26 11:45:24.296 1666 1666 D StrictMode: at android.os.Looper.loop(Looper.java:193)
+05-26 11:45:24.296 1666 1666 D StrictMode: at android.app.ActivityThread.main(ActivityThread.java:6669)
+05-26 11:45:24.296 1666 1666 D StrictMode: at java.lang.reflect.Method.invoke(Native Method)
+05-26 11:45:24.296 1666 1666 D StrictMode: at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+05-26 11:45:24.296 1666 1666 D StrictMode: at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+05-26 11:45:24.301 1666 1666 D StrictMode: StrictMode policy violation; ~duration=473 ms: android.os.strictmode.DiskReadViolation
+05-26 11:45:24.301 1666 1666 D StrictMode: at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+05-26 11:45:24.301 1666 1666 D StrictMode: at libcore.io.BlockGuardOs.read(BlockGuardOs.java:253)
+05-26 11:45:24.301 1666 1666 D StrictMode: at libcore.io.IoBridge.read(IoBridge.java:501)
+05-26 11:45:24.301 1666 1666 D StrictMode: at java.io.FileInputStream.read(FileInputStream.java:307)
+05-26 11:45:24.301 1666 1666 D StrictMode: at java.io.FilterInputStream.read(FilterInputStream.java:133)
+05-26 11:45:24.301 1666 1666 D StrictMode: at java.io.PushbackInputStream.read(PushbackInputStream.java:186)
+05-26 11:45:24.301 1666 1666 D StrictMode: at sun.nio.cs.StreamDecoder.readBytes(StreamDecoder.java:288)
+05-26 11:45:24.301 1666 1666 D StrictMode: at sun.nio.cs.StreamDecoder.implRead(StreamDecoder.java:351)
+05-26 11:45:24.301 1666 1666 D StrictMode: at sun.nio.cs.StreamDecoder.read(StreamDecoder.java:180)
+05-26 11:45:24.301 1666 1666 D StrictMode: at java.io.InputStreamReader.read(InputStreamReader.java:184)
+05-26 11:45:24.301 1666 1666 D StrictMode: at org.yaml.snakeyaml.reader.UnicodeReader.read(UnicodeReader.java:125)
+05-26 11:45:24.301 1666 1666 D StrictMode: at org.yaml.snakeyaml.reader.StreamReader.update(StreamReader.java:183)
+05-26 11:45:24.301 1666 1666 D StrictMode: at org.yaml.snakeyaml.reader.StreamReader.ensureEnoughData(StreamReader.java:176)
+05-26 11:45:24.301 1666 1666 D StrictMode: at org.yaml.snakeyaml.reader.StreamReader.ensureEnoughData(StreamReader.java:171)
+05-26 11:45:24.301 1666 1666 D StrictMode: at org.yaml.snakeyaml.reader.StreamReader.peek(StreamReader.java:126)
+05-26 11:45:24.301 1666 1666 D StrictMode: at org.yaml.snakeyaml.scanner.ScannerImpl.scanToNextToken(ScannerImpl.java:1177)
+05-26 11:45:24.301 1666 1666 D StrictMode: at org.yaml.snakeyaml.scanner.ScannerImpl.fetchMoreTokens(ScannerImpl.java:287)
+05-26 11:45:24.301 1666 1666 D StrictMode: at org.yaml.snakeyaml.scanner.ScannerImpl.checkToken(ScannerImpl.java:227)
+05-26 11:45:24.301 1666 1666 D StrictMode: at org.yaml.snakeyaml.parser.ParserImpl$ParseImplicitDocumentStart.produce(ParserImpl.java:195)
+05-26 11:45:24.301 1666 1666 D StrictMode: at org.yaml.snakeyaml.parser.ParserImpl.peekEvent(ParserImpl.java:158)
+05-26 11:45:24.301 1666 1666 D StrictMode: at org.yaml.snakeyaml.parser.ParserImpl.checkEvent(ParserImpl.java:148)
+05-26 11:45:24.301 1666 1666 D StrictMode: at org.yaml.snakeyaml.composer.Composer.getSingleNode(Composer.java:107)
+05-26 11:45:24.301 1666 1666 D StrictMode: at org.yaml.snakeyaml.constructor.BaseConstructor.getSingleData(BaseConstructor.java:141)
+05-26 11:45:24.301 1666 1666 D StrictMode: at org.yaml.snakeyaml.Yaml.loadFromReader(Yaml.java:525)
+05-26 11:45:24.301 1666 1666 D StrictMode: at org.yaml.snakeyaml.Yaml.load(Yaml.java:453)
+05-26 11:45:24.301 1666 1666 D StrictMode: at org.mozilla.gecko.util.DebugConfig.fromFile(DebugConfig.java:51)
+05-26 11:45:24.301 1666 1666 D StrictMode: at org.mozilla.geckoview.GeckoRuntime.init(GeckoRuntime.java:363)
+05-26 11:45:24.301 1666 1666 D StrictMode: at org.mozilla.geckoview.GeckoRuntime.create(GeckoRuntime.java:574)
+05-26 11:45:24.301 1666 1666 D StrictMode: at GeckoProvider.createRuntime(GeckoProvider.kt:58)
+05-26 11:45:24.301 1666 1666 D StrictMode: at GeckoProvider.getOrCreateRuntime(GeckoProvider.kt:28)
+05-26 11:45:24.301 1666 1666 D StrictMode: at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:79)
+05-26 11:45:24.301 1666 1666 D StrictMode: at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:57)
+05-26 11:45:24.301 1666 1666 D StrictMode: at kotlin.SynchronizedLazyImpl.getValue(LazyJVM.kt:74)
+05-26 11:45:24.301 1666 1666 D StrictMode: at org.mozilla.fenix.components.Core.getEngine(Unknown Source:7)
+05-26 11:45:24.301 1666 1666 D StrictMode: at org.mozilla.fenix.FenixApplication.setupInMainProcessOnly(FenixApplication.kt:128)
+05-26 11:45:24.301 1666 1666 D StrictMode: at org.mozilla.fenix.FenixApplication.onCreate(FenixApplication.kt:90)
+05-26 11:45:24.301 1666 1666 D StrictMode: at android.app.Instrumentation.callApplicationOnCreate(Instrumentation.java:1154)
+05-26 11:45:24.301 1666 1666 D StrictMode: at android.app.ActivityThread.handleBindApplication(ActivityThread.java:5871)
+05-26 11:45:24.301 1666 1666 D StrictMode: at android.app.ActivityThread.access$1100(ActivityThread.java:199)
+05-26 11:45:24.301 1666 1666 D StrictMode: at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1650)
+05-26 11:45:24.301 1666 1666 D StrictMode: at android.os.Handler.dispatchMessage(Handler.java:106)
+05-26 11:45:24.301 1666 1666 D StrictMode: at android.os.Looper.loop(Looper.java:193)
+05-26 11:45:24.301 1666 1666 D StrictMode: at android.app.ActivityThread.main(ActivityThread.java:6669)
+05-26 11:45:24.301 1666 1666 D StrictMode: at java.lang.reflect.Method.invoke(Native Method)
+05-26 11:45:24.301 1666 1666 D StrictMode: at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+05-26 11:45:24.301 1666 1666 D StrictMode: at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+05-26 11:45:24.308 1666 1666 D StrictMode: StrictMode policy violation; ~duration=456 ms: android.os.strictmode.DiskReadViolation
+05-26 11:45:24.308 1666 1666 D StrictMode: at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+05-26 11:45:24.308 1666 1666 D StrictMode: at libcore.io.BlockGuardOs.read(BlockGuardOs.java:253)
+05-26 11:45:24.308 1666 1666 D StrictMode: at libcore.io.IoBridge.read(IoBridge.java:501)
+05-26 11:45:24.308 1666 1666 D StrictMode: at java.io.FileInputStream.read(FileInputStream.java:307)
+05-26 11:45:24.308 1666 1666 D StrictMode: at java.io.FilterInputStream.read(FilterInputStream.java:133)
+05-26 11:45:24.308 1666 1666 D StrictMode: at java.io.PushbackInputStream.read(PushbackInputStream.java:186)
+05-26 11:45:24.308 1666 1666 D StrictMode: at sun.nio.cs.StreamDecoder.readBytes(StreamDecoder.java:288)
+05-26 11:45:24.308 1666 1666 D StrictMode: at sun.nio.cs.StreamDecoder.implRead(StreamDecoder.java:351)
+05-26 11:45:24.308 1666 1666 D StrictMode: at sun.nio.cs.StreamDecoder.read(StreamDecoder.java:180)
+05-26 11:45:24.308 1666 1666 D StrictMode: at java.io.InputStreamReader.read(InputStreamReader.java:184)
+05-26 11:45:24.308 1666 1666 D StrictMode: at org.yaml.snakeyaml.reader.UnicodeReader.read(UnicodeReader.java:125)
+05-26 11:45:24.308 1666 1666 D StrictMode: at org.yaml.snakeyaml.reader.StreamReader.update(StreamReader.java:183)
+05-26 11:45:24.308 1666 1666 D StrictMode: at org.yaml.snakeyaml.reader.StreamReader.ensureEnoughData(StreamReader.java:176)
+05-26 11:45:24.308 1666 1666 D StrictMode: at org.yaml.snakeyaml.reader.StreamReader.peek(StreamReader.java:136)
+05-26 11:45:24.308 1666 1666 D StrictMode: at org.yaml.snakeyaml.scanner.ScannerImpl.scanPlain(ScannerImpl.java:1999)
+05-26 11:45:24.308 1666 1666 D StrictMode: at org.yaml.snakeyaml.scanner.ScannerImpl.fetchPlain(ScannerImpl.java:1044)
+05-26 11:45:24.308 1666 1666 D StrictMode: at org.yaml.snakeyaml.scanner.ScannerImpl.fetchMoreTokens(ScannerImpl.java:399)
+05-26 11:45:24.308 1666 1666 D StrictMode: at org.yaml.snakeyaml.scanner.ScannerImpl.checkToken(ScannerImpl.java:227)
+05-26 11:45:24.308 1666 1666 D StrictMode: at org.yaml.snakeyaml.parser.ParserImpl$ParseBlockSequenceEntry.produce(ParserImpl.java:504)
+05-26 11:45:24.308 1666 1666 D StrictMode: at org.yaml.snakeyaml.parser.ParserImpl.peekEvent(ParserImpl.java:158)
+05-26 11:45:24.308 1666 1666 D StrictMode: at org.yaml.snakeyaml.parser.ParserImpl.checkEvent(ParserImpl.java:148)
+05-26 11:45:24.308 1666 1666 D StrictMode: at org.yaml.snakeyaml.composer.Composer.composeSequenceNode(Composer.java:188)
+05-26 11:45:24.308 1666 1666 D StrictMode: at org.yaml.snakeyaml.composer.Composer.composeNode(Composer.java:142)
+05-26 11:45:24.308 1666 1666 D StrictMode: at org.yaml.snakeyaml.composer.Composer.composeValueNode(Composer.java:236)
+05-26 11:45:24.308 1666 1666 D StrictMode: at org.yaml.snakeyaml.composer.Composer.composeMappingChildren(Composer.java:227)
+05-26 11:45:24.308 1666 1666 D StrictMode: at org.yaml.snakeyaml.composer.Composer.composeMappingNode(Composer.java:215)
+05-26 11:45:24.308 1666 1666 D StrictMode: at org.yaml.snakeyaml.composer.Composer.composeNode(Composer.java:144)
+05-26 11:45:24.308 1666 1666 D StrictMode: at org.yaml.snakeyaml.composer.Composer.getNode(Composer.java:85)
+05-26 11:45:24.308 1666 1666 D StrictMode: at org.yaml.snakeyaml.composer.Composer.getSingleNode(Composer.java:108)
+05-26 11:45:24.308 1666 1666 D StrictMode: at org.yaml.snakeyaml.constructor.BaseConstructor.getSingleData(BaseConstructor.java:141)
+05-26 11:45:24.308 1666 1666 D StrictMode: at org.yaml.snakeyaml.Yaml.loadFromReader(Yaml.java:525)
+05-26 11:45:24.308 1666 1666 D StrictMode: at org.yaml.snakeyaml.Yaml.load(Yaml.java:453)
+05-26 11:45:24.308 1666 1666 D StrictMode: at org.mozilla.gecko.util.DebugConfig.fromFile(DebugConfig.java:51)
+05-26 11:45:24.308 1666 1666 D StrictMode: at org.mozilla.geckoview.GeckoRuntime.init(GeckoRuntime.java:363)
+05-26 11:45:24.308 1666 1666 D StrictMode: at org.mozilla.geckoview.GeckoRuntime.create(GeckoRuntime.java:574)
+05-26 11:45:24.308 1666 1666 D StrictMode: at GeckoProvider.createRuntime(GeckoProvider.kt:58)
+05-26 11:45:24.308 1666 1666 D StrictMode: at GeckoProvider.getOrCreateRuntime(GeckoProvider.kt:28)
+05-26 11:45:24.308 1666 1666 D StrictMode: at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:79)
+05-26 11:45:24.308 1666 1666 D StrictMode: at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:57)
+05-26 11:45:24.308 1666 1666 D StrictMode: at kotlin.SynchronizedLazyImpl.getValue(LazyJVM.kt:74)
+05-26 11:45:24.308 1666 1666 D StrictMode: at org.mozilla.fenix.components.Core.getEngine(Unknown Source:7)
+05-26 11:45:24.308 1666 1666 D StrictMode: at org.mozilla.fenix.FenixApplication.setupInMainProcessOnly(FenixApplication.kt:128)
+05-26 11:45:24.308 1666 1666 D StrictMode: at org.mozilla.fenix.FenixApplication.onCreate(FenixApplication.kt:90)
+05-26 11:45:24.308 1666 1666 D StrictMode: at android.app.Instrumentation.callApplicationOnCreate(Instrumentation.java:1154)
+05-26 11:45:24.308 1666 1666 D StrictMode: at android.app.ActivityThread.handleBindApplication(ActivityThread.java:5871)
+05-26 11:45:24.308 1666 1666 D StrictMode: at android.app.ActivityThread.access$1100(ActivityThread.java:199)
+05-26 11:45:24.308 1666 1666 D StrictMode: at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1650)
+05-26 11:45:24.308 1666 1666 D StrictMode: at android.os.Handler.dispatchMessage(Handler.java:106)
+05-26 11:45:24.308 1666 1666 D StrictMode: at android.os.Looper.loop(Looper.java:193)
+05-26 11:45:24.308 1666 1666 D StrictMode: at android.app.ActivityThread.main(ActivityThread.java:6669)
+05-26 11:45:24.308 1666 1666 D StrictMode: at java.lang.reflect.Method.invoke(Native Method)
+05-26 11:45:24.308 1666 1666 D StrictMode: at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+05-26 11:45:24.308 1666 1666 D StrictMode: at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+05-26 11:45:24.318 1666 1666 D StrictMode: StrictMode policy violation; ~duration=413 ms: android.os.strictmode.DiskReadViolation
+05-26 11:45:24.318 1666 1666 D StrictMode: at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+05-26 11:45:24.318 1666 1666 D StrictMode: at java.io.UnixFileSystem.checkAccess(UnixFileSystem.java:251)
+05-26 11:45:24.318 1666 1666 D StrictMode: at java.io.File.exists(File.java:815)
+05-26 11:45:24.318 1666 1666 D StrictMode: at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:605)
+05-26 11:45:24.318 1666 1666 D StrictMode: at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:596)
+05-26 11:45:24.318 1666 1666 D StrictMode: at android.app.ContextImpl.getPreferencesDir(ContextImpl.java:552)
+05-26 11:45:24.318 1666 1666 D StrictMode: at android.app.ContextImpl.getSharedPreferencesPath(ContextImpl.java:747)
+05-26 11:45:24.318 1666 1666 D StrictMode: at android.app.ContextImpl.getSharedPreferences(ContextImpl.java:400)
+05-26 11:45:24.318 1666 1666 D StrictMode: at android.content.ContextWrapper.getSharedPreferences(ContextWrapper.java:174)
+05-26 11:45:24.318 1666 1666 D StrictMode: at mozilla.components.browser.engine.gecko.GeckoEngine.<init>(GeckoEngine.kt:68)
+05-26 11:45:24.318 1666 1666 D StrictMode: at mozilla.components.browser.engine.gecko.GeckoEngine.<init>(GeckoEngine.kt:63)
+05-26 11:45:24.318 1666 1666 D StrictMode: at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:76)
+05-26 11:45:24.318 1666 1666 D StrictMode: at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:57)
+05-26 11:45:24.318 1666 1666 D StrictMode: at kotlin.SynchronizedLazyImpl.getValue(LazyJVM.kt:74)
+05-26 11:45:24.318 1666 1666 D StrictMode: at org.mozilla.fenix.components.Core.getEngine(Unknown Source:7)
+05-26 11:45:24.318 1666 1666 D StrictMode: at org.mozilla.fenix.FenixApplication.setupInMainProcessOnly(FenixApplication.kt:128)
+05-26 11:45:24.318 1666 1666 D StrictMode: at org.mozilla.fenix.FenixApplication.onCreate(FenixApplication.kt:90)
+05-26 11:45:24.318 1666 1666 D StrictMode: at android.app.Instrumentation.callApplicationOnCreate(Instrumentation.java:1154)
+05-26 11:45:24.318 1666 1666 D StrictMode: at android.app.ActivityThread.handleBindApplication(ActivityThread.java:5871)
+05-26 11:45:24.318 1666 1666 D StrictMode: at android.app.ActivityThread.access$1100(ActivityThread.java:199)
+05-26 11:45:24.318 1666 1666 D StrictMode: at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1650)
+05-26 11:45:24.318 1666 1666 D StrictMode: at android.os.Handler.dispatchMessage(Handler.java:106)
+05-26 11:45:24.318 1666 1666 D StrictMode: at android.os.Looper.loop(Looper.java:193)
+05-26 11:45:24.318 1666 1666 D StrictMode: at android.app.ActivityThread.main(ActivityThread.java:6669)
+05-26 11:45:24.318 1666 1666 D StrictMode: at java.lang.reflect.Method.invoke(Native Method)
+05-26 11:45:24.318 1666 1666 D StrictMode: at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+05-26 11:45:24.318 1666 1666 D StrictMode: at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+05-26 11:45:24.326 1666 1666 D StrictMode: StrictMode policy violation; ~duration=189 ms: android.os.strictmode.DiskReadViolation
+05-26 11:45:24.326 1666 1666 D StrictMode: at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+05-26 11:45:24.326 1666 1666 D StrictMode: at java.io.UnixFileSystem.checkAccess(UnixFileSystem.java:251)
+05-26 11:45:24.326 1666 1666 D StrictMode: at java.io.File.exists(File.java:815)
+05-26 11:45:24.326 1666 1666 D StrictMode: at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:605)
+05-26 11:45:24.326 1666 1666 D StrictMode: at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:596)
+05-26 11:45:24.326 1666 1666 D StrictMode: at android.app.ContextImpl.getPreferencesDir(ContextImpl.java:552)
+05-26 11:45:24.326 1666 1666 D StrictMode: at android.app.ContextImpl.getSharedPreferencesPath(ContextImpl.java:747)
+05-26 11:45:24.326 1666 1666 D StrictMode: at android.app.ContextImpl.getSharedPreferences(ContextImpl.java:400)
+05-26 11:45:24.326 1666 1666 D StrictMode: at android.content.ContextWrapper.getSharedPreferences(ContextWrapper.java:174)
+05-26 11:45:24.326 1666 1666 D StrictMode: at androidx.preference.PreferenceManager.getDefaultSharedPreferences(PreferenceManager.java:119)
+05-26 11:45:24.326 1666 1666 D StrictMode: at org.mozilla.fenix.DebugFenixApplication.setupLeakCanary(DebugFenixApplication.kt:15)
+05-26 11:45:24.326 1666 1666 D StrictMode: at org.mozilla.fenix.FenixApplication.setupInMainProcessOnly(FenixApplication.kt:140)
+05-26 11:45:24.326 1666 1666 D StrictMode: at org.mozilla.fenix.FenixApplication.onCreate(FenixApplication.kt:90)
+05-26 11:45:24.326 1666 1666 D StrictMode: at android.app.Instrumentation.callApplicationOnCreate(Instrumentation.java:1154)
+05-26 11:45:24.326 1666 1666 D StrictMode: at android.app.ActivityThread.handleBindApplication(ActivityThread.java:5871)
+05-26 11:45:24.326 1666 1666 D StrictMode: at android.app.ActivityThread.access$1100(ActivityThread.java:199)
+05-26 11:45:24.326 1666 1666 D StrictMode: at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1650)
+05-26 11:45:24.326 1666 1666 D StrictMode: at android.os.Handler.dispatchMessage(Handler.java:106)
+05-26 11:45:24.326 1666 1666 D StrictMode: at android.os.Looper.loop(Looper.java:193)
+05-26 11:45:24.326 1666 1666 D StrictMode: at android.app.ActivityThread.main(ActivityThread.java:6669)
+05-26 11:45:24.326 1666 1666 D StrictMode: at java.lang.reflect.Method.invoke(Native Method)
+05-26 11:45:24.326 1666 1666 D StrictMode: at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+05-26 11:45:24.326 1666 1666 D StrictMode: at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+05-26 11:45:24.327 1666 1666 D StrictMode: StrictMode policy violation; ~duration=183 ms: android.os.strictmode.DiskReadViolation
+05-26 11:45:24.327 1666 1666 D StrictMode: at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+05-26 11:45:24.327 1666 1666 D StrictMode: at android.app.SharedPreferencesImpl.awaitLoadedLocked(SharedPreferencesImpl.java:256)
+05-26 11:45:24.327 1666 1666 D StrictMode: at android.app.SharedPreferencesImpl.getBoolean(SharedPreferencesImpl.java:325)
+05-26 11:45:24.327 1666 1666 D StrictMode: at org.mozilla.fenix.DebugFenixApplication.setupLeakCanary(DebugFenixApplication.kt:16)
+05-26 11:45:24.327 1666 1666 D StrictMode: at org.mozilla.fenix.FenixApplication.setupInMainProcessOnly(FenixApplication.kt:140)
+05-26 11:45:24.327 1666 1666 D StrictMode: at org.mozilla.fenix.FenixApplication.onCreate(FenixApplication.kt:90)
+05-26 11:45:24.327 1666 1666 D StrictMode: at android.app.Instrumentation.callApplicationOnCreate(Instrumentation.java:1154)
+05-26 11:45:24.327 1666 1666 D StrictMode: at android.app.ActivityThread.handleBindApplication(ActivityThread.java:5871)
+05-26 11:45:24.327 1666 1666 D StrictMode: at android.app.ActivityThread.access$1100(ActivityThread.java:199)
+05-26 11:45:24.327 1666 1666 D StrictMode: at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1650)
+05-26 11:45:24.327 1666 1666 D StrictMode: at android.os.Handler.dispatchMessage(Handler.java:106)
+05-26 11:45:24.327 1666 1666 D StrictMode: at android.os.Looper.loop(Looper.java:193)
+05-26 11:45:24.327 1666 1666 D StrictMode: at android.app.ActivityThread.main(ActivityThread.java:6669)
+05-26 11:45:24.327 1666 1666 D StrictMode: at java.lang.reflect.Method.invoke(Native Method)
+05-26 11:45:24.327 1666 1666 D StrictMode: at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+05-26 11:45:24.327 1666 1666 D StrictMode: at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+05-26 11:45:24.328 1666 1666 D SessionManager: onTrimMemory(5): clearThumbnails=false, closeEngineSessions=false
+05-26 11:45:24.345 1666 1666 D LeakCanary: LeakCanary is running and ready to detect leaks
+05-26 11:45:24.359 1666 1791 I GeckoConsole: No chrome package registered for chrome://browser/content/built_in_addons.json
+05-26 11:45:24.475 1666 1769 I SupportedAddonsWorker: Trying to check for new supported add-ons
+05-26 11:45:24.499 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@334f06b)
+05-26 11:45:24.500 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@334f06b)
+05-26 11:45:24.502 1666 1791 D GeckoThread: State changed to PROFILE_READY
+05-26 11:45:24.531 1666 1791 D GeckoViewStartup: observe: profile-after-change
+05-26 11:45:24.534 1666 1791 D GeckoViewTelemetryController: setup - canRecordPrereleaseData true, canRecordReleaseData true
+05-26 11:45:24.551 1666 1791 D GeckoThread: State changed to RUNNING
+05-26 11:45:24.557 1666 1791 I Gecko : 1590507924557 Marionette TRACE Marionette enabled
+05-26 11:45:24.558 1666 1791 I Gecko : 1590507924558 Marionette TRACE Received observer notification profile-after-change
+05-26 11:45:24.560 1666 1791 I Gecko : -*- nsDNSServiceDiscovery.js : nsDNSServiceDiscovery
+05-26 11:45:24.584 1666 1791 I Gecko : 1590507924584 Marionette TRACE Received observer notification command-line-startup
+05-26 11:45:24.592 1666 1666 D GeckoNetworkManager: Incoming event enableNotifications for state OffNoListeners -> OffWithListeners
+05-26 11:45:24.596 1666 1666 D GeckoNetworkManager: New network state: UP, WIFI, WIFI
+05-26 11:45:24.604 1666 1791 D GeckoViewStartup: onEvent GeckoView:SetLocale
+05-26 11:45:24.611 1666 1791 D GeckoViewStartup: onEvent GeckoView:ResetUserPrefs
+05-26 11:45:24.623 1666 1791 D GeckoViewRemoteDebugger: onInit
+05-26 11:45:24.628 1666 1791 D GeckoViewConsole: enabled = false
+05-26 11:45:24.638 1666 1791 D GeckoViewStartup: onEvent GeckoView:SetLocale
+05-26 11:45:24.639 1666 1791 D GeckoViewStartup: onEvent GeckoView:SetDefaultPrefs
+05-26 11:45:24.666 1666 1791 I chatty : uid=10091(org.mozilla.fenix.debug) Gecko identical 9 lines
+05-26 11:45:24.666 1666 1791 D GeckoViewStartup: onEvent GeckoView:SetDefaultPrefs
+05-26 11:45:24.671 1666 1791 D GeckoViewConsole: onEvent GeckoView:RegisterWebExtension {"allowContentMessaging":true,"id":"webcompat@mozilla.com","locationUri":"resource://android/assets/extensions/webcompat/"}
+05-26 11:45:24.675 1666 1791 D GeckoViewConsole: onEvent GeckoView:WebExtension:List null
+05-26 11:45:24.685 1666 1791 D GeckoViewConsole: onEvent GeckoView:RegisterWebExtension {"allowContentMessaging":true,"id":"mozacBrowserIcons","locationUri":"resource://android/assets/extensions/browser-icons/"}
+05-26 11:45:24.686 1666 1791 D GeckoViewConsole: onEvent GeckoView:RegisterWebExtension {"allowContentMessaging":true,"id":"mozacBrowserAds","locationUri":"resource://android/assets/extensions/ads/"}
+05-26 11:45:24.687 1666 1791 D GeckoViewConsole: onEvent GeckoView:RegisterWebExtension {"allowContentMessaging":true,"id":"BrowserCookiesExtension","locationUri":"resource://android/assets/extensions/cookies/"}
+05-26 11:45:24.698 1666 1791 I Gecko : 1590507924697 Marionette TRACE Received observer notification marionette-startup-requested
+05-26 11:45:24.699 1666 1791 I Gecko : 1590507924699 Marionette TRACE Waiting until startup recorder finished recording startup scripts...
+05-26 11:45:24.719 1666 1791 I Gecko : 1590507924719 Marionette TRACE All scripts recorded.
+05-26 11:45:24.720 1666 1791 I Gecko : 1590507924720 Marionette DEBUG Setting recommended pref browser.safebrowsing.malware.enabled to false
+05-26 11:45:24.723 1666 1791 I Gecko : 1590507924723 Marionette DEBUG Setting recommended pref browser.safebrowsing.phishing.enabled to false
+05-26 11:45:24.725 1666 1791 I Gecko : 1590507924725 Marionette DEBUG Setting recommended pref browser.search.update to false
+05-26 11:45:24.725 1666 1791 I Gecko : 1590507924725 Marionette DEBUG Setting recommended pref browser.tabs.disableBackgroundZombification to false
+05-26 11:45:24.725 1666 1791 I Gecko : 1590507924725 Marionette DEBUG Setting recommended pref browser.tabs.remote.separatePrivilegedContentProcess to false
+05-26 11:45:24.726 1666 1791 I Gecko : 1590507924726 Marionette DEBUG Setting recommended pref network.http.prompt-temp-redirect to false
+05-26 11:45:24.808 1666 1791 I Gecko : 1590507924807 Marionette FATAL Remote protocol server failed to start: Error: Could not bind to port 2829 (NS_ERROR_SOCKET_ADDRESS_IN_USE)(chrome://marionette/content/server.js:94:17) JS Stack trace: set acceptConnections@server.js:94:17
+05-26 11:45:24.808 1666 1791 I Gecko : start@server.js:124:5
+05-26 11:45:24.808 1666 1791 I Gecko : init/<@marionette.js:510:21
+05-26 11:45:24.808 1666 1791 I Gecko : 1590507924808 Marionette DEBUG Resetting recommended pref browser.safebrowsing.malware.enabled
+05-26 11:45:24.812 1666 1791 I Gecko : 1590507924812 Marionette DEBUG Resetting recommended pref browser.safebrowsing.phishing.enabled
+05-26 11:45:24.815 1666 1791 I Gecko : 1590507924815 Marionette DEBUG Resetting recommended pref browser.search.update
+05-26 11:45:24.815 1666 1791 I Gecko : 1590507924815 Marionette DEBUG Resetting recommended pref browser.tabs.disableBackgroundZombification
+05-26 11:45:24.816 1666 1791 I Gecko : 1590507924815 Marionette DEBUG Resetting recommended pref browser.tabs.remote.separatePrivilegedContentProcess
+05-26 11:45:24.816 1666 1791 I Gecko : 1590507924816 Marionette DEBUG Resetting recommended pref network.http.prompt-temp-redirect
+05-26 11:45:24.825 1666 1791 D GeckoThread: State changed to EXITING
+05-26 11:45:24.849 1666 1791 E GeckoConsole: [JavaScript Error: "NetworkError when attempting to fetch resource."]
+05-26 11:45:24.849 1666 1791 E GeckoConsole: get@resource://services-settings/RemoteSettingsClient.jsm:348:12
+05-26 11:45:25.001 1666 1666 D App : Installed browser-icons extension
+05-26 11:45:25.023 1666 1791 I GeckoConsole: 1590507925022 addons.xpi WARN Exception running bootstrap method shutdown on default-theme@mozilla.org: [Exception... "Component returned failure code: 0x80004002 (NS_NOINTERFACE) [nsISupports.QueryInterface]" nsresult: "0x80004002 (NS_NOINTERFACE)" location: "JS frame :: resource://gre/modules/Extension.jsm :: shutdown :: line 2586" data: no] Stack trace: shutdown()@resource://gre/modules/Extension.jsm:2586
+05-26 11:45:25.023 1666 1791 I GeckoConsole: shutdown()@resource://gre/modules/Extension.jsm:1703
+05-26 11:45:25.023 1666 1791 I GeckoConsole: callBootstrapMethod()@resource://gre/modules/addons/XPIProvider.jsm:1819
+05-26 11:45:25.023 1666 1791 I GeckoConsole: _shutdown()@resource://gre/modules/addons/XPIProvider.jsm:1948
+05-26 11:45:25.023 1666 1791 I GeckoConsole: observe()@resource://gre/modules/AsyncShutdown.jsm:554
+05-26 11:45:25.030 1666 1791 D : HostConnection::get() New Host Connection established 0xd119ec00, tid 1791
+05-26 11:45:25.033 1831 1831 D GeckoThread: State changed to LAUNCHED
+05-26 11:45:25.033 1666 1791 I ConfigStore: android::hardware::configstore::V1_0::ISurfaceFlingerConfigs::hasWideColorDisplay retrieved: 0
+05-26 11:45:25.033 1666 1791 I ConfigStore: android::hardware::configstore::V1_0::ISurfaceFlingerConfigs::hasHDRDisplay retrieved: 0
+05-26 11:45:25.034 1831 1903 I GeckoThread: preparing to run Gecko
+05-26 11:45:25.035 1666 1791 E EGL_emulation: tid 1791: eglBindAPI(1259): error 0x300c (EGL_BAD_PARAMETER)
+05-26 11:45:25.037 1666 1791 D EGL_emulation: eglCreateContext: 0xe8b05f00: maj 3 min 0 rcv 3
+05-26 11:45:25.039 1666 1791 D EGL_emulation: eglMakeCurrent: 0xe8b05f00: ver 3 0 (tinfo 0xe8b03c50)
+05-26 11:45:25.129 1666 1791 E GeckoConsole: [JavaScript Error: "Error: Phase "profile-change-teardown" is finished, it is too late to register completion condition "ServiceWorkerShutdownBlocker: shutting down Service Workers"" {file: "resource://gre/modules/AsyncShutdown.jsm" line: 697}]
+05-26 11:45:25.129 1666 1791 E GeckoConsole: addBlocker@resource://gre/modules/AsyncShutdown.jsm:697:15
+05-26 11:45:25.129 1666 1791 E GeckoConsole: addBlocker@resource://gre/modules/AsyncShutdown.jsm:505:26
+05-26 11:45:25.129 1666 1791 E GeckoConsole: addBlocker@resource://gre/modules/AsyncShutdown.jsm:444:15
+05-26 11:45:25.129 1666 1791 E GeckoConsole: addBlocker@resource://gre/modules/nsAsyncShutdown.jsm:162:24
+05-26 11:45:25.129 1666 1791 E GeckoConsole: observe@resource://gre/modules/AsyncShutdown.jsm:554:16
+05-26 11:45:25.267 1666 1791 W GeckoConsole: [JavaScript Warning: "Security wrapper denied access to property "ONE_QUARTER" on privileged Javascript object. Support for exposing privileged objects to untrusted content via __exposedProps__ has been removed - use WebIDL bindings or Components.utils.cloneInto instead. Note that only the first denied property access from a given global object will be reported." {file: "moz-extension://9670d999-4378-46ae-b907-6d0c226b09b6/data/picture_in_picture_overrides.js" line: 26}]
+05-26 11:45:25.311 1666 1666 D mozac-webcompat: Installed WebCompat webextension: webcompat@mozilla.com
+05-26 11:45:25.381 1666 1791 E GeckoConsole: [JavaScript Error: "can't access property "startupData", state is undefined" {file: "resource://gre/modules/addons/XPIProvider.jsm" line: 3079}]
+05-26 11:45:25.381 1666 1791 E GeckoConsole: setStartupData@resource://gre/modules/addons/XPIProvider.jsm:3079:5
+05-26 11:45:25.381 1666 1791 E GeckoConsole: saveStartupData@resource://gre/modules/Extension.jsm:2035:17
+05-26 11:45:25.381 1666 1791 E GeckoConsole: _writePersistentListeners@resource://gre/modules/ExtensionCommon.jsm:2271:15
+05-26 11:45:25.381 1666 1791 E GeckoConsole: savePersistentListener@resource://gre/modules/ExtensionCommon.jsm:2362:18
+05-26 11:45:25.381 1666 1791 E GeckoConsole: addListener@resource://gre/modules/ExtensionCommon.jsm:2495:20
+05-26 11:45:25.381 1666 1791 E GeckoConsole: addListener@resource://gre/modules/ExtensionCommon.jsm:2550:38
+05-26 11:45:25.381 1666 1791 E GeckoConsole: recvAddListener@resource://gre/modules/ExtensionParent.jsm:1079:13
+05-26 11:45:25.381 1666 1791 E GeckoConsole: observe@resource://gre/modules/AsyncShutdown.jsm:554:16
+05-26 11:45:25.401 1666 1791 I chatty : uid=10091(org.mozilla.fenix.debug) Gecko identical 24 lines
+05-26 11:45:25.478 1666 1791 E GeckoConsole: [JavaScript Error: "can't access property "startupData", state is undefined" {file: "resource://gre/modules/addons/XPIProvider.jsm" line: 3079}]
+05-26 11:45:25.478 1666 1791 E GeckoConsole: setStartupData@resource://gre/modules/addons/XPIProvider.jsm:3079:5
+05-26 11:45:25.478 1666 1791 E GeckoConsole: saveStartupData@resource://gre/modules/Extension.jsm:2035:17
+05-26 11:45:25.478 1666 1791 E GeckoConsole: _writePersistentListeners@resource://gre/modules/ExtensionCommon.jsm:2271:15
+05-26 11:45:25.478 1666 1791 E GeckoConsole: savePersistentListener@resource://gre/modules/ExtensionCommon.jsm:2362:18
+05-26 11:45:25.478 1666 1791 E GeckoConsole: addListener@resource://gre/modules/ExtensionCommon.jsm:2495:20
+05-26 11:45:25.478 1666 1791 E GeckoConsole: addListener@resource://gre/modules/ExtensionCommon.jsm:2550:38
+05-26 11:45:25.478 1666 1791 E GeckoConsole: recvAddListener@resource://gre/modules/ExtensionParent.jsm:1079:13
+05-26 11:45:25.478 1666 1791 E GeckoConsole: observe@resource://gre/modules/AsyncShutdown.jsm:554:16
+05-26 11:45:25.499 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@c8d27c8)
+05-26 11:45:25.500 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@c8d27c8)
+05-26 11:45:25.518 1666 1791 I Gecko : 1590507925518 Marionette TRACE Received observer notification xpcom-will-shutdown
+05-26 11:45:25.537 1666 1666 I DefaultSupportedAddonsChecker: Register check for new supported add-ons
+05-26 11:45:25.662 1666 1732 I WM-WorkerWrapper: Work [ id=aa9d958b-bd64-47de-ad98-331e40daa20b, tags={ mozilla.components.feature.addons.migration.DefaultSupportedAddonsChecker.periodicWork, mozilla.components.feature.addons.migration.SupportedAddonsWorker } ] was cancelled
+05-26 11:45:25.662 1666 1732 I WM-WorkerWrapper: java.util.concurrent.CancellationException: Task was cancelled.
+05-26 11:45:25.662 1666 1732 I WM-WorkerWrapper: at androidx.work.impl.utils.futures.AbstractFuture.cancellationExceptionWithCause(AbstractFuture.java:1184)
+05-26 11:45:25.662 1666 1732 I WM-WorkerWrapper: at androidx.work.impl.utils.futures.AbstractFuture.getDoneValue(AbstractFuture.java:514)
+05-26 11:45:25.662 1666 1732 I WM-WorkerWrapper: at androidx.work.impl.utils.futures.AbstractFuture.get(AbstractFuture.java:475)
+05-26 11:45:25.662 1666 1732 I WM-WorkerWrapper: at androidx.work.impl.WorkerWrapper$2.run(WorkerWrapper.java:284)
+05-26 11:45:25.662 1666 1732 I WM-WorkerWrapper: at androidx.work.impl.utils.SerialExecutor$Task.run(SerialExecutor.java:91)
+05-26 11:45:25.662 1666 1732 I WM-WorkerWrapper: at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1167)
+05-26 11:45:25.662 1666 1732 I WM-WorkerWrapper: at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:641)
+05-26 11:45:25.662 1666 1732 I WM-WorkerWrapper: at java.lang.Thread.run(Thread.java:764)
+05-26 11:45:25.723 1666 1769 E SupportedAddonsWorker: An exception happened trying to check for new supported add-ons, re-schedule Job was cancelled
+05-26 11:45:25.723 1666 1769 E SupportedAddonsWorker: kotlinx.coroutines.JobCancellationException: Job was cancelled; job=JobImpl{Cancelling}@1e7bb6
+05-26 11:45:25.725 1666 1769 I SupportedAddonsWorker: Trying to check for new supported add-ons
+05-26 11:45:25.737 1922 1791 W google-breakpad: ExceptionHandler::WaitForContinueSignal waiting for continue signal...
+05-26 11:45:25.738 1666 1791 W google-breakpad: ExceptionHandler::GenerateDump cloned child
+05-26 11:45:25.739 1666 1791 W google-breakpad: 1922
+05-26 11:45:25.739 1666 1791 W google-breakpad:
+05-26 11:45:25.739 1666 1791 W google-breakpad: ExceptionHandler::SendContinueSignalToChild sent continue signal to child
+05-26 11:45:26.025 1876 17867 I ActivityManager: Process org.mozilla.fenix.debug (pid 1666) has died: vis TRNB
+05-26 11:45:26.025 1876 1894 W libprocessgroup: kill(-1666, 9) failed: No such process
+05-26 11:45:26.025 1876 1894 I libprocessgroup: Successfully killed process cgroup uid 10091 pid 1666 in 0ms
+05-26 11:45:26.025 1734 1734 I Zygote : Process 1666 exited due to signal (11)
+05-26 11:45:26.026 1831 1831 I ServiceChildProcess: Service has been unbound. Stopping.
+05-26 11:45:26.027 1876 17867 W ActivityManager: Scheduling restart of crashed service org.mozilla.fenix.debug/androidx.work.impl.background.systemjob.SystemJobService in 1000ms
+05-26 11:45:26.042 11979 11979 D SessionManager: onTrimMemory(5): clearThumbnails=false, closeEngineSessions=false
+05-26 11:45:26.047 1831 1831 I Process : Sending signal. PID: 1831 SIG: 9
+05-26 11:45:26.075 1734 1734 I Zygote : Process 1831 exited due to signal (9)
+05-26 11:45:26.075 1876 17867 I ActivityManager: Process org.mozilla.fenix.debug:tab0 (pid 1831) has died: fore SVC
+05-26 11:45:26.075 1876 1894 W libprocessgroup: kill(-1831, 9) failed: No such process
+05-26 11:45:26.075 1876 1894 I libprocessgroup: Successfully killed process cgroup uid 10091 pid 1831 in 0ms
+05-26 11:45:26.245 1876 17867 D WificondControl: Scan result ready event
+05-26 11:45:26.499 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@5bd01e3)
+05-26 11:45:26.500 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@5bd01e3)
+05-26 11:45:27.503 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@9f27e0)
+05-26 11:45:27.503 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@9f27e0)
+05-26 11:45:28.503 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@92a6a99)
+05-26 11:45:28.504 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@92a6a99)
+05-26 11:45:28.509 2482 7043 W ctxmgr : [AclManager]No 2 for (accnt=account#-517948760#, com.google.android.gms(10008):IndoorOutdoorProducer, vrsn=13280000, 0, 3pPkg = null , 3pMdlId = null , pid = 2482). Was: 3 for 57, account#-517948760#
+05-26 11:45:28.888 1876 1884 I system_server: Background concurrent copying GC freed 100792(4MB) AllocSpace objects, 26(1616KB) LOS objects, 15% free, 33MB/39MB, paused 489us total 136.582ms
+05-26 11:45:29.506 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@ee0315e)
+05-26 11:45:29.507 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@ee0315e)
+05-26 11:45:30.506 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@133d93f)
+05-26 11:45:30.507 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@133d93f)
+05-26 11:45:31.506 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@4fee20c)
+05-26 11:45:31.508 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@4fee20c)
+05-26 11:45:32.191 1876 4539 W system_server: Long monitor contention with owner PackageInstaller (1938) at boolean com.android.server.pm.PackageInstallerSession$3.handleMessage(android.os.Message)(PackageInstallerSession.java:292) waiters=0 in void com.android.server.pm.PackageInstallerSession.write(org.xmlpull.v1.XmlSerializer, java.io.File) for 944ms
+05-26 11:45:32.196 1928 1928 W id.defcontaine: Unexpected CPU variant for X86 using defaults: x86
+05-26 11:45:32.199 1876 1893 I ActivityManager: Start proc 1928:com.android.defcontainer/u0a13 for service com.android.defcontainer/.DefaultContainerService
+05-26 11:45:32.202 1876 2208 W system_server: Long monitor contention with owner Binder:1876_14 (4539) at void com.android.server.pm.PackageInstallerService$InternalCallback.onSessionSealedBlocking(com.android.server.pm.PackageInstallerSession)(PackageInstallerService.java:1136) waiters=0 in android.content.pm.PackageInstaller$SessionInfo com.android.server.pm.PackageInstallerService.getSessionInfo(int) for 958ms
+05-26 11:45:32.238 1928 1928 I id.defcontaine: The ClassLoaderContext is a special shared library.
+05-26 11:45:32.450 1876 1891 I ActivityManager: Force stopping org.mozilla.fennec_aurora appid=10092 user=-1: installPackageLI
+05-26 11:45:32.453 1876 1891 I ActivityManager: Killing 12055:org.mozilla.fennec_aurora:tab0/u0a92 (adj 100): stop org.mozilla.fennec_aurora
+05-26 11:45:32.454 1876 1891 W ActivityManager: Scheduling restart of crashed service org.mozilla.fennec_aurora/org.mozilla.gecko.process.GeckoChildProcessServices$tab0 in 1000ms
+05-26 11:45:32.454 1876 1894 W libprocessgroup: kill(-12055, 9) failed: No such process
+05-26 11:45:32.458 1876 1891 I ActivityManager: Killing 11979:org.mozilla.fennec_aurora/u0a92 (adj 0): stop org.mozilla.fennec_aurora
+05-26 11:45:32.471 1876 1891 W ActivityManager: Force removing ActivityRecord{f1cab5f u0 org.mozilla.fennec_aurora/org.mozilla.fenix.HomeActivity t281}: app died, no saved state
+05-26 11:45:32.494 1876 1894 W libprocessgroup: kill(-12055, 9) failed: No such process
+05-26 11:45:32.505 1734 1734 I Zygote : Process 12055 exited due to signal (9)
+05-26 11:45:32.506 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@191d7c2)
+05-26 11:45:32.507 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@191d7c2)
+05-26 11:45:32.509 1876 1914 I PackageManager: Update package org.mozilla.fennec_aurora code path from /data/app/org.mozilla.fennec_aurora-gZjm3mMAvE9P4O2xmylTiQ== to /data/app/org.mozilla.fennec_aurora-0Iby3PCtt7bInD2AjVGM2A==; Retain data and using new
+05-26 11:45:32.510 1876 1914 I PackageManager: Update package org.mozilla.fennec_aurora resource path from /data/app/org.mozilla.fennec_aurora-gZjm3mMAvE9P4O2xmylTiQ== to /data/app/org.mozilla.fennec_aurora-0Iby3PCtt7bInD2AjVGM2A==; Retain data and using new
+05-26 11:45:32.531 1734 1734 I Zygote : Process 11979 exited due to signal (9)
+05-26 11:45:32.534 1876 1895 E BatteryExternalStatsWorker: no controller energy info supplied for wifi
+05-26 11:45:32.534 1876 1961 W InputDispatcher: channel '2ce2f5d org.mozilla.fennec_aurora/org.mozilla.fenix.HomeActivity (server)' ~ Consumer closed input channel or an error occurred. events=0x9
+05-26 11:45:32.534 1876 1961 E InputDispatcher: channel '2ce2f5d org.mozilla.fennec_aurora/org.mozilla.fenix.HomeActivity (server)' ~ Channel is unrecoverably broken and will be disposed!
+05-26 11:45:32.536 1876 1894 W libprocessgroup: kill(-12055, 9) failed: No such process
+05-26 11:45:32.536 1876 1894 I libprocessgroup: Successfully killed process cgroup uid 10092 pid 12055 in 81ms
+05-26 11:45:32.536 1876 1894 W libprocessgroup: kill(-11979, 9) failed: No such process
+05-26 11:45:32.536 1876 1894 I libprocessgroup: Successfully killed process cgroup uid 10092 pid 11979 in 0ms
+05-26 11:45:32.552 1904 16172 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:32.554 1904 16172 D : HostConnection::get() New Host Connection established 0xe69102c0, tid 16172
+05-26 11:45:32.554 1904 16172 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:32.555 1904 16172 D : HostConnection::get() New Host Connection established 0xe69102c0, tid 16172
+05-26 11:45:32.555 1904 16172 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:32.555 1904 16172 D : HostConnection::get() New Host Connection established 0xe69102c0, tid 16172
+05-26 11:45:32.555 1904 16172 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:32.556 1904 16172 D : HostConnection::get() New Host Connection established 0xe69102c0, tid 16172
+05-26 11:45:32.556 1904 16172 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:32.556 1904 16172 D : HostConnection::get() New Host Connection established 0xe69102c0, tid 16172
+05-26 11:45:32.556 1904 16172 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:32.560 1876 2445 I WindowManager: WIN DEATH: Window{2ce2f5d u0 org.mozilla.fennec_aurora/org.mozilla.fenix.HomeActivity}
+05-26 11:45:32.561 1904 16172 D : HostConnection::get() New Host Connection established 0xe69102c0, tid 16172
+05-26 11:45:32.561 1876 2445 W InputDispatcher: Attempted to unregister already unregistered input channel '2ce2f5d org.mozilla.fennec_aurora/org.mozilla.fenix.HomeActivity (server)'
+05-26 11:45:32.568 1733 1967 W SurfaceFlinger: Attempting to destroy on removed layer: AppWindowToken{5d38675 token=Token{5e619ac ActivityRecord{f1cab5f u0 org.mozilla.fennec_aurora/org.mozilla.fenix.HomeActivity t281}}}#0
+05-26 11:45:32.583 1738 4197 D installd: Detected label change from u:object_r:app_data_file:s0 to u:object_r:app_data_file:s0:c92,c256,c512,c768 at /data/data/org.mozilla.fennec_aurora/code_cache; running recursive restorecon
+05-26 11:45:32.585 1738 4197 D installd: Detected label change from u:object_r:app_data_file:s0 to u:object_r:app_data_file:s0:c92,c256,c512,c768 at /data/user_de/0/org.mozilla.fennec_aurora/cache; running recursive restorecon
+05-26 11:45:32.589 1876 1914 I PackageManager.DexOptimizer: Running dexopt (dexoptNeeded=1) on: /data/app/org.mozilla.fennec_aurora-0Iby3PCtt7bInD2AjVGM2A==/base.apk pkg=org.mozilla.fennec_aurora isa=x86 dexoptFlags=boot_complete,profile_guided,public,enable_hidden_api_checks targetFilter=speed-profile oatDir=/data/app/org.mozilla.fennec_aurora-0Iby3PCtt7bInD2AjVGM2A==/oat classLoaderContext=PCL[/system/framework/org.apache.http.legacy.boot.jar]
+05-26 11:45:32.590 1738 4197 V installed: DexInv: --- BEGIN '/data/app/org.mozilla.fennec_aurora-0Iby3PCtt7bInD2AjVGM2A==/base.apk' ---
+05-26 11:45:32.613 1876 1899 W ActivityManager: setHasOverlayUi called on unknown pid: 11979
+05-26 11:45:32.629 1876 1892 W Looper : Slow dispatch took 171ms android.ui h=com.android.server.am.ActivityManagerService$UiHandler c=null m=53
+05-26 11:45:32.636 1623 5774 D gralloc_ranchu: gralloc_alloc: Creating ashmem region of size 9334784
+05-26 11:45:32.649 1623 5774 D gralloc_ranchu: gralloc_alloc: Creating ashmem region of size 9334784
+05-26 11:45:32.654 2002 2002 W dex2oat : Unexpected CPU variant for X86 using defaults: x86
+05-26 11:45:32.655 2002 2002 W dex2oat : Mismatch between dex2oat instruction set features (ISA: X86 Feature string: -ssse3,-sse4.1,-sse4.2,-avx,-avx2,-popcnt) and those of dex2oat executable (ISA: X86 Feature string: ssse3,-sse4.1,-sse4.2,-avx,-avx2,-popcnt) for the command line:
+05-26 11:45:32.655 2002 2002 W dex2oat : /system/bin/dex2oat --zip-fd=8 --zip-location=base.apk --input-vdex-fd=-1 --output-vdex-fd=10 --oat-fd=9 --oat-location=/data/app/org.mozilla.fennec_aurora-0Iby3PCtt7bInD2AjVGM2A==/oat/x86/base.odex --instruction-set=x86 --instruction-set-variant=x86 --instruction-set-features=default --runtime-arg -Xms64m --runtime-arg -Xmx512m --compiler-filter=speed-profile --swap-fd=11 --app-image-fd=12 --image-format=lz4 --classpath-dir=/data/app/org.mozilla.fennec_aurora-0Iby3PCtt7bInD2AjVGM2A== --class-loader-context=PCL[/system/framework/org.apache.http.legacy.boot.jar] --generate-mini-debug-info --compact-dex-level=none --runtime-arg -Xtarget-sdk-version:28 --runtime-arg -Xhidden-api-checks --compilation-reason=install
+05-26 11:45:32.655 2002 2002 I dex2oat : /system/bin/dex2oat --input-vdex-fd=-1 --output-vdex-fd=10 --compiler-filter=speed-profile --classpath-dir=/data/app/org.mozilla.fennec_aurora-0Iby3PCtt7bInD2AjVGM2A== --class-loader-context=PCL[/system/framework/org.apache.http.legacy.boot.jar] --generate-mini-debug-info --compact-dex-level=none --compilation-reason=install
+05-26 11:45:32.663 1623 5774 D gralloc_ranchu: gralloc_alloc: Creating ashmem region of size 9334784
+05-26 11:45:32.689 1733 1733 D SurfaceFlinger: duplicate layer name: changing com.google.android.apps.nexuslauncher/com.google.android.apps.nexuslauncher.NexusLauncherActivity to com.google.android.apps.nexuslauncher/com.google.android.apps.nexuslauncher.NexusLauncherActivity#1
+05-26 11:45:32.691 1623 5774 D gralloc_ranchu: gralloc_alloc: Creating ashmem region of size 9334784
+05-26 11:45:32.703 1623 5774 I chatty : uid=1000(system) HwBinder:1623_3 identical 1 line
+05-26 11:45:32.712 1623 5774 D gralloc_ranchu: gralloc_alloc: Creating ashmem region of size 9334784
+05-26 11:45:32.734 2000 2000 I GoogleInputMethod: onFinishInput() : Dummy InputConnection bound
+05-26 11:45:32.748 2499 2681 D EGL_emulation: eglMakeCurrent: 0xd35359e0: ver 3 0 (tinfo 0xd353b4f0)
+05-26 11:45:32.762 2000 2000 I GoogleInputMethod: onStartInput() : Dummy InputConnection bound
+05-26 11:45:32.792 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:45:32.794 2534 2825 D EGL_emulation: eglMakeCurrent: 0xe8b06aa0: ver 3 0 (tinfo 0xe8b03b50)
+05-26 11:45:32.812 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:45:32.821 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:45:32.823 2534 2534 W SessionLifecycleManager: Handover failed. Creating new session controller.
+05-26 11:45:32.855 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:45:32.985 2012 2265 I chatty : uid=10024(com.android.systemui) RenderThread identical 12 lines
+05-26 11:45:32.990 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:45:33.096 2534 2034 W LocationOracle: No location history returned by ContextManager
+05-26 11:45:33.263 2534 2534 I MicroDetectionWorker: #startMicroDetector [speakerMode: 0]
+05-26 11:45:33.267 2534 2534 I AudioController: Using mInputStreamFactoryBuilder
+05-26 11:45:33.268 2534 2534 I AudioController: Created new AudioSource
+05-26 11:45:33.275 2534 2534 I MicroDetectionWorker: onReady
+05-26 11:45:33.303 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:45:33.311 1876 2445 I GnssLocationProvider: WakeLock acquired by sendMessage(SET_REQUEST, 0, com.android.server.location.GnssLocationProvider$GpsRequest@69389c9)
+05-26 11:45:33.314 2482 6319 W ctxmgr : [AclManager]No 3 for (accnt=account#-517948760#, com.google.android.gms(10008):UserVelocityProducer, vrsn=13280022, 0, 3pPkg = null , 3pMdlId = null , pid = 2482). Was: 3 for 1, account#-517948760#
+05-26 11:45:33.333 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@b4318ce)
+05-26 11:45:33.340 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(SET_REQUEST, 0, com.android.server.location.GnssLocationProvider$GpsRequest@69389c9)
+05-26 11:45:33.341 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@b4318ce)
+05-26 11:45:33.344 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_LOCATION, 1, Location[gps 37.421998,-122.084000 hAcc=20 et=?!? alt=5.0 vel=0.0 bear=0.0 vAcc=??? sAcc=??? bAcc=???])
+05-26 11:45:33.346 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_LOCATION, 1, Location[gps 37.421998,-122.084000 hAcc=20 et=+2d5h0m53s542ms alt=5.0 vel=0.0 bear=0.0 vAcc=??? sAcc=??? bAcc=??? {Bundle[{satellites=0, maxCn0=0, meanCn0=0}]}])
+05-26 11:45:33.347 2534 2038 I MicroRecognitionRunner: Starting detection.
+05-26 11:45:33.357 2534 2010 I MicrophoneInputStream: mic_starting SR : 16000 CC : 16 SO : 6
+05-26 11:45:33.367 1631 1682 E : Request requires android.permission.RECORD_AUDIO
+05-26 11:45:33.367 1631 1682 E AudioPolicyIntefaceImpl: getInputForAttr permission denied: recording not allowed for uid 10039 pid 2534
+05-26 11:45:33.367 1631 1682 E AudioFlinger: createRecord() checkRecordThread_l failed
+05-26 11:45:33.367 2534 2010 E IAudioFlinger: createRecord returned error -22
+05-26 11:45:33.367 2534 2010 E AudioRecord: AudioFlinger could not create record track, status: -22
+05-26 11:45:33.367 2534 2010 E AudioRecord-JNI: Error creating AudioRecord instance: initialization check failed with status -22.
+05-26 11:45:33.370 2534 2010 E android.media.AudioRecord: Error code -20 when initializing native AudioRecord object.
+05-26 11:45:33.370 2534 2010 I MicrophoneInputStream: mic_started SR : 16000 CC : 16 SO : 6
+05-26 11:45:33.371 2534 2010 E ActivityThread: Failed to find provider info for com.google.android.apps.gsa.testing.ui.audio.recorded
+05-26 11:45:33.372 2534 2534 I MicroDetectionWorker: onReady
+05-26 11:45:33.373 2534 2038 W SpeechLevelGenerator: Really low audio levels detected. The audio input may have issues.
+05-26 11:45:33.376 2534 2010 I MicrophoneInputStream: mic_close SR : 16000 CC : 16 SO : 6
+05-26 11:45:33.383 2534 2038 I MicroRecognitionRunner: Detection finished
+05-26 11:45:33.383 2534 2038 W ErrorReporter: reportError [type: 211, code: 524300]: Error reading from input stream
+05-26 11:45:33.384 2534 2954 I MicroRecognitionRunner: Stopping hotword detection.
+05-26 11:45:33.390 2482 3056 I Places : Converted 0 out of 1 WiFi scans
+05-26 11:45:33.398 2534 2038 W ErrorProcessor: onFatalError, processing error from engine(4)
+05-26 11:45:33.398 2534 2038 W ErrorProcessor: com.google.android.apps.gsa.shared.speech.b.g: Error reading from input stream
+05-26 11:45:33.398 2534 2038 W ErrorProcessor: at com.google.android.apps.gsa.staticplugins.microdetection.d.k.a(SourceFile:91)
+05-26 11:45:33.398 2534 2038 W ErrorProcessor: at com.google.android.apps.gsa.staticplugins.microdetection.d.l.run(Unknown Source:14)
+05-26 11:45:33.398 2534 2038 W ErrorProcessor: at com.google.android.libraries.gsa.runner.a.a.b(SourceFile:32)
+05-26 11:45:33.398 2534 2038 W ErrorProcessor: at com.google.android.libraries.gsa.runner.a.c.call(Unknown Source:4)
+05-26 11:45:33.398 2534 2038 W ErrorProcessor: at java.util.concurrent.FutureTask.run(FutureTask.java:266)
+05-26 11:45:33.398 2534 2038 W ErrorProcessor: at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:458)
+05-26 11:45:33.398 2534 2038 W ErrorProcessor: at java.util.concurrent.FutureTask.run(FutureTask.java:266)
+05-26 11:45:33.398 2534 2038 W ErrorProcessor: at com.google.android.apps.gsa.shared.util.concurrent.b.g.run(Unknown Source:4)
+05-26 11:45:33.398 2534 2038 W ErrorProcessor: at com.google.android.apps.gsa.shared.util.concurrent.b.aw.run(SourceFile:4)
+05-26 11:45:33.398 2534 2038 W ErrorProcessor: at com.google.android.apps.gsa.shared.util.concurrent.b.aw.run(SourceFile:4)
+05-26 11:45:33.398 2534 2038 W ErrorProcessor: at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1167)
+05-26 11:45:33.398 2534 2038 W ErrorProcessor: at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:641)
+05-26 11:45:33.398 2534 2038 W ErrorProcessor: at java.lang.Thread.run(Thread.java:764)
+05-26 11:45:33.398 2534 2038 W ErrorProcessor: at com.google.android.apps.gsa.shared.util.concurrent.b.i.run(SourceFile:6)
+05-26 11:45:33.398 2534 2038 W ErrorProcessor: Caused by: com.google.android.apps.gsa.shared.exception.GsaIOException: Error code: 393238 | Buffer overflow, no available space.
+05-26 11:45:33.398 2534 2038 W ErrorProcessor: at com.google.android.apps.gsa.speech.audio.Tee.j(SourceFile:103)
+05-26 11:45:33.398 2534 2038 W ErrorProcessor: at com.google.android.apps.gsa.speech.audio.au.read(SourceFile:2)
+05-26 11:45:33.398 2534 2038 W ErrorProcessor: at java.io.InputStream.read(InputStream.java:101)
+05-26 11:45:33.398 2534 2038 W ErrorProcessor: at com.google.android.apps.gsa.speech.audio.ao.run(SourceFile:17)
+05-26 11:45:33.398 2534 2038 W ErrorProcessor: at com.google.android.apps.gsa.speech.audio.an.run(SourceFile:2)
+05-26 11:45:33.398 2534 2038 W ErrorProcessor: at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:458)
+05-26 11:45:33.398 2534 2038 W ErrorProcessor: ... 10 more
+05-26 11:45:33.398 2534 2038 I AudioController: internalShutdown
+05-26 11:45:33.402 2534 2534 I MicroDetector: Keeping mic open: false
+05-26 11:45:33.402 2534 2534 I MicroDetectionWorker: #onError(false)
+05-26 11:45:33.404 2534 2034 I DeviceStateChecker: DeviceStateChecker cancelled
+05-26 11:45:33.419 2482 2043 I PlaceInferenceEngine: No beacon scan available - ignoring candidates.
+05-26 11:45:33.447 2482 6319 W ctxmgr : [AclManager]No 2 for (accnt=account#-517948760#, com.google.android.gms(10008):UserLocationProducer, vrsn=13280000, 0, 3pPkg = null , 3pMdlId = null , pid = 2482). Was: 3 for 1, account#-517948760#
+05-26 11:45:33.468 2482 2508 I .gms.persisten: Background concurrent copying GC freed 164538(5MB) AllocSpace objects, 1(20KB) LOS objects, 41% free, 8MB/14MB, paused 8.204ms total 129.449ms
+05-26 11:45:33.506 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@41f985)
+05-26 11:45:33.508 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@41f985)
+05-26 11:45:33.516 2482 2044 W ctxmgr : [AclManager]No 2 for (accnt=account#-517948760#, com.google.android.gms(10008):IndoorOutdoorProducer, vrsn=13280000, 0, 3pPkg = null , 3pMdlId = null , pid = 2482). Was: 3 for 57, account#-517948760#
+05-26 11:45:33.520 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_LOCATION, 1, Location[gps 37.421998,-122.084000 hAcc=20 et=?!? alt=5.0 vel=0.0 bear=0.0 vAcc=??? sAcc=??? bAcc=???])
+05-26 11:45:33.521 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_LOCATION, 1, Location[gps 37.421998,-122.084000 hAcc=20 et=+2d5h0m53s716ms alt=5.0 vel=0.0 bear=0.0 vAcc=??? sAcc=??? bAcc=??? {Bundle[{satellites=0, maxCn0=0, meanCn0=0}]}])
+05-26 11:45:33.575 2482 6319 W ctxmgr : [AclManager]No 2 for (accnt=account#-517948760#, com.google.android.gms(10008):UserLocationProducer, vrsn=13280000, 0, 3pPkg = null , 3pMdlId = null , pid = 2482). Was: 3 for 1, account#-517948760#
+05-26 11:45:33.591 2482 3056 I Places : ?: PlacesBleScanner start() with priority 2
+05-26 11:45:33.601 2482 3056 I Places : ?: PlacesBleScanner start() with priority 2
+05-26 11:45:33.602 2482 3056 I PlaceInferenceEngine: [anon] Changed inference mode: 1
+05-26 11:45:33.613 2482 3056 I Places : Converted 0 out of 1 WiFi scans
+05-26 11:45:33.642 2482 3056 I Places : ?: PlacesBleScanner start() with priority 2
+05-26 11:45:33.658 2482 3056 I Places : ?: PlacesBleScanner start() with priority 2
+05-26 11:45:33.658 2482 3056 I PlaceInferenceEngine: [anon] Changed inference mode: 1
+05-26 11:45:33.662 2482 6319 I PlaceInferenceEngine: No beacon scan available - ignoring candidates.
+05-26 11:45:33.681 2002 2002 W dex2oat : Accessing hidden method Landroid/content/res/Resources$Theme;->rebase()V (dark greylist, linking)
+05-26 11:45:33.699 2482 3056 I Places : Converted 0 out of 1 WiFi scans
+05-26 11:45:33.718 2482 2043 I PlaceInferenceEngine: No beacon scan available - ignoring candidates.
+05-26 11:45:33.764 2482 6321 W ctxmgr : [AclManager]No 2 for (accnt=account#-517948760#, com.google.android.gms(10008):PlacesProducer, vrsn=13280000, 0, 3pPkg = null , 3pMdlId = null , pid = 2482). Was: 3 for 18, account#-517948760#
+05-26 11:45:34.506 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@b6301e7)
+05-26 11:45:34.507 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@b6301e7)
+05-26 11:45:34.507 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_LOCATION, 1, Location[gps 37.421998,-122.084000 hAcc=20 et=?!? alt=5.0 vel=0.0 bear=0.0 vAcc=??? sAcc=??? bAcc=???])
+05-26 11:45:34.507 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_LOCATION, 1, Location[gps 37.421998,-122.084000 hAcc=20 et=+2d5h0m54s703ms alt=5.0 vel=0.0 bear=0.0 vAcc=??? sAcc=??? bAcc=??? {Bundle[{satellites=0, maxCn0=0, meanCn0=0}]}])
+05-26 11:45:34.534 2482 7043 W ctxmgr : [AclManager]No 2 for (accnt=account#-517948760#, com.google.android.gms(10008):UserLocationProducer, vrsn=13280000, 0, 3pPkg = null , 3pMdlId = null , pid = 2482). Was: 3 for 1, account#-517948760#
+05-26 11:45:34.675 2002 2040 W dex2oat : Accessing hidden method Landroid/view/View;->setTransitionVisibility(I)V (dark greylist, linking)
+05-26 11:45:34.788 2002 2041 W dex2oat : Accessing hidden method Landroid/view/View;->setTransitionVisibility(I)V (dark greylist, linking)
+05-26 11:45:35.507 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@3666194)
+05-26 11:45:35.508 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@3666194)
+05-26 11:45:35.509 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_LOCATION, 1, Location[gps 37.421998,-122.084000 hAcc=20 et=?!? alt=5.0 vel=0.0 bear=0.0 vAcc=??? sAcc=??? bAcc=???])
+05-26 11:45:35.510 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_LOCATION, 1, Location[gps 37.421998,-122.084000 hAcc=20 et=+2d5h0m55s706ms alt=5.0 vel=0.0 bear=0.0 vAcc=??? sAcc=??? bAcc=??? {Bundle[{satellites=0, maxCn0=0, meanCn0=0}]}])
+05-26 11:45:35.522 1876 3809 I GnssLocationProvider: WakeLock acquired by sendMessage(SET_REQUEST, 0, com.android.server.location.GnssLocationProvider$GpsRequest@cad43d)
+05-26 11:45:35.522 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@7352432)
+05-26 11:45:35.522 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(SET_REQUEST, 0, com.android.server.location.GnssLocationProvider$GpsRequest@cad43d)
+05-26 11:45:35.524 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@7352432)
+05-26 11:45:35.534 2482 7043 W ctxmgr : [AclManager]No 2 for (accnt=account#-517948760#, com.google.android.gms(10008):UserLocationProducer, vrsn=13280000, 0, 3pPkg = null , 3pMdlId = null , pid = 2482). Was: 3 for 1, account#-517948760#
+05-26 11:45:36.509 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@353d583)
+05-26 11:45:36.510 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@353d583)
+05-26 11:45:37.510 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@c120300)
+05-26 11:45:37.510 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@c120300)
+05-26 11:45:37.759 2002 2002 W dex2oat : Accessing hidden method Landroid/content/res/Resources$Theme;->rebase()V (dark greylist, linking)
+05-26 11:45:37.797 2002 2002 I dex2oat : Explicit concurrent copying GC freed 51169(7MB) AllocSpace objects, 0(0B) LOS objects, 99% free, 1232B/1537KB, paused 882us total 4.299ms
+05-26 11:45:37.952 2002 2002 I dex2oat : dex2oat took 5.306s (15.105s cpu) (threads: 4) arena alloc=2832B (2832B) java alloc=17KB (17616B) native alloc=11MB (12219416B) free=3MB (3509224B)
+05-26 11:45:37.959 1738 4197 V installed: DexInv: --- END '/data/app/org.mozilla.fennec_aurora-0Iby3PCtt7bInD2AjVGM2A==/base.apk' (success) ---
+05-26 11:45:37.961 1876 1914 D PackageManager: Instant App installer not found with android.intent.action.INSTALL_INSTANT_APP_PACKAGE
+05-26 11:45:37.962 1876 1914 D PackageManager: Clear ephemeral installer activity
+05-26 11:45:38.079 1876 1914 I system_server: Explicit concurrent copying GC freed 97404(5MB) AllocSpace objects, 27(1336KB) LOS objects, 18% free, 26MB/32MB, paused 1.291ms total 116.530ms
+05-26 11:45:38.115 1738 4197 E : Couldn't opendir /data/app/vmdl1342695088.tmp: No such file or directory
+05-26 11:45:38.116 1738 4197 E installd: Failed to delete /data/app/vmdl1342695088.tmp: No such file or directory
+05-26 11:45:38.120 1876 1914 I ActivityManager: Force stopping org.mozilla.fennec_aurora appid=10092 user=0: pkg removed
+05-26 11:45:38.129 1876 1891 W BroadcastQueue: Background execution not allowed: receiving Intent { act=android.intent.action.PACKAGE_REMOVED dat=package:org.mozilla.fennec_aurora flg=0x4000010 (has extras) } to com.android.musicfx/.Compatibility$Receiver
+05-26 11:45:38.151 1876 1890 E system_server: No package ID 7f found for ID 0x7f08019f.
+05-26 11:45:38.151 1876 1890 E system_server: No package ID 7f found for ID 0x7f130122.
+05-26 11:45:38.151 1876 1890 E system_server: No package ID 7f found for ID 0x7f130122.
+05-26 11:45:38.153 1876 1890 E system_server: No package ID 7f found for ID 0x7f08019d.
+05-26 11:45:38.153 1876 1890 E system_server: No package ID 7f found for ID 0x7f130121.
+05-26 11:45:38.153 1876 1890 E system_server: No package ID 7f found for ID 0x7f130121.
+05-26 11:45:38.155 2054 2054 W ndroid.keychai: Unexpected CPU variant for X86 using defaults: x86
+05-26 11:45:38.158 1876 1893 I ActivityManager: Start proc 2054:com.android.keychain/1000 for service com.android.keychain/.KeyChainService
+05-26 11:45:38.176 1876 3809 W BroadcastQueue: Background execution not allowed: receiving Intent { act=android.intent.action.PACKAGE_REMOVED dat=package:org.mozilla.fennec_aurora flg=0x4000010 (has extras) } to com.google.android.googlequicksearchbox/com.google.android.apps.gsa.googlequicksearchbox.GelStubAppWatcher
+05-26 11:45:38.178 1876 1891 W BroadcastQueue: Background execution not allowed: receiving Intent { act=android.intent.action.PACKAGE_ADDED dat=package:org.mozilla.fennec_aurora flg=0x4000010 (has extras) } to com.android.musicfx/.Compatibility$Receiver
+05-26 11:45:38.179 2534 2034 I EventLogSendingHelper: Sending log events.
+05-26 11:45:38.215 2054 2054 I ndroid.keychai: The ClassLoaderContext is a special shared library.
+05-26 11:45:38.217 1876 3809 W BroadcastQueue: Background execution not allowed: receiving Intent { act=android.intent.action.PACKAGE_ADDED dat=package:org.mozilla.fennec_aurora flg=0x4000010 (has extras) } to com.google.android.googlequicksearchbox/com.google.android.apps.gsa.googlequicksearchbox.GelStubAppWatcher
+05-26 11:45:38.217 1876 1891 W BroadcastQueue: Background execution not allowed: receiving Intent { act=android.intent.action.PACKAGE_REPLACED dat=package:org.mozilla.fennec_aurora flg=0x4000010 (has extras) } to com.android.musicfx/.Compatibility$Receiver
+05-26 11:45:38.218 1876 1891 W BroadcastQueue: Background execution not allowed: receiving Intent { act=android.intent.action.PACKAGE_REPLACED dat=package:org.mozilla.fennec_aurora flg=0x4000010 (has extras) } to com.google.android.apps.photos/.account.full.FetchAccountPropertiesAppUpgradeBroadcastReceiver
+05-26 11:45:38.218 1876 1891 W BroadcastQueue: Background execution not allowed: receiving Intent { act=android.intent.action.PACKAGE_REPLACED dat=package:org.mozilla.fennec_aurora flg=0x4000010 (has extras) } to com.google.android.apps.photos/.account.full.SyncAccountsForLoginBroadcastReceiver
+05-26 11:45:38.218 1876 1891 W BroadcastQueue: Background execution not allowed: receiving Intent { act=android.intent.action.PACKAGE_REPLACED dat=package:org.mozilla.fennec_aurora flg=0x4000010 (has extras) } to com.google.android.apps.photos/.experiments.phenotype.full.PhenotypeAppUpgradeBroadcastReceiver
+05-26 11:45:38.218 1876 1891 W BroadcastQueue: Background execution not allowed: receiving Intent { act=android.intent.action.PACKAGE_REPLACED dat=package:org.mozilla.fennec_aurora flg=0x4000010 (has extras) } to com.google.android.apps.photos/.notificationchannels.AppUpdateBroadcastReceiver
+05-26 11:45:38.241 1876 4916 W BroadcastQueue: Background execution not allowed: receiving Intent { act=android.intent.action.PACKAGE_REPLACED dat=package:org.mozilla.fennec_aurora flg=0x4000010 (has extras) } to com.google.android.googlequicksearchbox/com.google.android.apps.gsa.googlequicksearchbox.GelStubAppWatcher
+05-26 11:45:38.264 2534 2034 I EventLogSendingHelper: Sending log events.
+05-26 11:45:38.281 1876 1962 I InputReader: Reconfiguring input devices. changes=0x00000010
+05-26 11:45:38.296 7086 2100 I Auth : [SupervisedAccountIntentOperation] onHandleIntent(): android.intent.action.PACKAGE_ADDED
+05-26 11:45:38.299 7086 2100 I Auth : [SupervisedAccountIntentOperation] This operation is disabled
+05-26 11:45:38.309 7086 2100 I ChromeSync: [Sync,SyncIntentOperation] Handling the intent: Intent { act=android.intent.action.PACKAGE_ADDED dat=package:org.mozilla.fennec_aurora flg=0x4000010 cmp=com.google.android.gms/.chimera.GmsIntentOperationService (has extras) }.
+05-26 11:45:38.339 1876 1962 I InputReader: Reconfiguring input devices. changes=0x00000010
+05-26 11:45:38.370 1876 1876 W Looper : Slow dispatch took 130ms main h=android.app.ActivityThread$H c=android.app.-$$Lambda$LoadedApk$ReceiverDispatcher$Args$_BumDX2UKsnxLVrE6UJsJZkotuA@a0fbbe1 m=0
+05-26 11:45:38.370 1876 1876 W Looper : Slow delivery took 235ms main h=android.app.ActivityThread$H c=android.app.-$$Lambda$LoadedApk$ReceiverDispatcher$Args$_BumDX2UKsnxLVrE6UJsJZkotuA@4f7bd06 m=0
+05-26 11:45:38.366 1876 1962 I InputReader: Reconfiguring input devices. changes=0x00000010
+05-26 11:45:38.375 7086 2094 W PeopleContactsSync: CP2 sync disabled by gservices.
+05-26 11:45:38.405 2534 2534 I MicroDetectionWorker: #startMicroDetector [speakerMode: 0]
+05-26 11:45:38.406 2534 2534 I AudioController: Using mInputStreamFactoryBuilder
+05-26 11:45:38.406 2534 2534 I AudioController: Created new AudioSource
+05-26 11:45:38.407 2534 2534 I MicroDetectionWorker: onReady
+05-26 11:45:38.412 2131 2131 D CarrierSvcBindHelper: No carrier app for: 0
+05-26 11:45:38.426 2131 2131 D CarrierSvcBindHelper: No carrier app for: 0
+05-26 11:45:38.488 2534 2034 I MicroRecognitionRunner: Starting detection.
+05-26 11:45:38.488 2534 2010 I MicrophoneInputStream: mic_starting SR : 16000 CC : 16 SO : 6
+05-26 11:45:38.492 1631 1682 E : Request requires android.permission.RECORD_AUDIO
+05-26 11:45:38.492 1631 1682 E AudioPolicyIntefaceImpl: getInputForAttr permission denied: recording not allowed for uid 10039 pid 2534
+05-26 11:45:38.492 1631 1682 E AudioFlinger: createRecord() checkRecordThread_l failed
+05-26 11:45:38.492 2534 2010 E IAudioFlinger: createRecord returned error -22
+05-26 11:45:38.492 2534 2010 E AudioRecord: AudioFlinger could not create record track, status: -22
+05-26 11:45:38.497 2534 2010 E AudioRecord-JNI: Error creating AudioRecord instance: initialization check failed with status -22.
+05-26 11:45:38.499 2131 2131 D ImsResolver: maybeAddedImsService, packageName: org.mozilla.fennec_aurora
+05-26 11:45:38.500 2534 2010 E android.media.AudioRecord: Error code -20 when initializing native AudioRecord object.
+05-26 11:45:38.500 2534 2010 I MicrophoneInputStream: mic_started SR : 16000 CC : 16 SO : 6
+05-26 11:45:38.502 2534 2010 E ActivityThread: Failed to find provider info for com.google.android.apps.gsa.testing.ui.audio.recorded
+05-26 11:45:38.502 2534 2534 I MicroDetectionWorker: onReady
+05-26 11:45:38.503 2131 2131 D CarrierConfigLoader: mHandler: 9 phoneId: 0
+05-26 11:45:38.506 2534 2010 I MicrophoneInputStream: mic_close SR : 16000 CC : 16 SO : 6
+05-26 11:45:38.515 2534 2034 I MicroRecognitionRunner: Detection finished
+05-26 11:45:38.515 2534 2034 W ErrorReporter: reportError [type: 211, code: 524300]: Error reading from input stream
+05-26 11:45:38.516 2534 2954 I MicroRecognitionRunner: Stopping hotword detection.
+05-26 11:45:38.516 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@3c2a392)
+05-26 11:45:38.517 2534 2034 W ErrorProcessor: onFatalError, processing error from engine(4)
+05-26 11:45:38.517 2534 2034 W ErrorProcessor: com.google.android.apps.gsa.shared.speech.b.g: Error reading from input stream
+05-26 11:45:38.517 2534 2034 W ErrorProcessor: at com.google.android.apps.gsa.staticplugins.microdetection.d.k.a(SourceFile:91)
+05-26 11:45:38.517 2534 2034 W ErrorProcessor: at com.google.android.apps.gsa.staticplugins.microdetection.d.l.run(Unknown Source:14)
+05-26 11:45:38.517 2534 2034 W ErrorProcessor: at com.google.android.libraries.gsa.runner.a.a.b(SourceFile:32)
+05-26 11:45:38.517 2534 2034 W ErrorProcessor: at com.google.android.libraries.gsa.runner.a.c.call(Unknown Source:4)
+05-26 11:45:38.517 2534 2034 W ErrorProcessor: at java.util.concurrent.FutureTask.run(FutureTask.java:266)
+05-26 11:45:38.517 2534 2034 W ErrorProcessor: at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:458)
+05-26 11:45:38.517 2534 2034 W ErrorProcessor: at java.util.concurrent.FutureTask.run(FutureTask.java:266)
+05-26 11:45:38.517 2534 2034 W ErrorProcessor: at com.google.android.apps.gsa.shared.util.concurrent.b.g.run(Unknown Source:4)
+05-26 11:45:38.517 2534 2034 W ErrorProcessor: at com.google.android.apps.gsa.shared.util.concurrent.b.aw.run(SourceFile:4)
+05-26 11:45:38.517 2534 2034 W ErrorProcessor: at com.google.android.apps.gsa.shared.util.concurrent.b.aw.run(SourceFile:4)
+05-26 11:45:38.517 2534 2034 W ErrorProcessor: at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1167)
+05-26 11:45:38.517 2534 2034 W ErrorProcessor: at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:641)
+05-26 11:45:38.517 2534 2034 W ErrorProcessor: at java.lang.Thread.run(Thread.java:764)
+05-26 11:45:38.517 2534 2034 W ErrorProcessor: at com.google.android.apps.gsa.shared.util.concurrent.b.i.run(SourceFile:6)
+05-26 11:45:38.517 2534 2034 W ErrorProcessor: Caused by: com.google.android.apps.gsa.shared.exception.GsaIOException: Error code: 393238 | Buffer overflow, no available space.
+05-26 11:45:38.517 2534 2034 W ErrorProcessor: at com.google.android.apps.gsa.speech.audio.Tee.j(SourceFile:103)
+05-26 11:45:38.517 2534 2034 W ErrorProcessor: at com.google.android.apps.gsa.speech.audio.au.read(SourceFile:2)
+05-26 11:45:38.517 2534 2034 W ErrorProcessor: at java.io.InputStream.read(InputStream.java:101)
+05-26 11:45:38.517 2534 2034 W ErrorProcessor: at com.google.android.apps.gsa.speech.audio.ao.run(SourceFile:17)
+05-26 11:45:38.517 2534 2034 W ErrorProcessor: at com.google.android.apps.gsa.speech.audio.an.run(SourceFile:2)
+05-26 11:45:38.517 2534 2034 W ErrorProcessor: at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:458)
+05-26 11:45:38.517 2534 2034 W ErrorProcessor: ... 10 more
+05-26 11:45:38.517 2534 2034 I AudioController: internalShutdown
+05-26 11:45:38.531 2534 2534 I MicroDetector: Keeping mic open: false
+05-26 11:45:38.531 2534 2534 I MicroDetectionWorker: #onError(false)
+05-26 11:45:38.531 2534 2038 I DeviceStateChecker: DeviceStateChecker cancelled
+05-26 11:45:38.590 7086 2099 W gle.android.gm: Long monitor contention with owner [dko] processing com.google.android.gms.growth.service.PackageActionReceiver for action android.intent.action.PACKAGE_REMOVED (2077) at boolean android.os.BinderProxy.transactNative(int, android.os.Parcel, android.os.Parcel, int)(Binder.java:-2) waiters=0 in void dkr.run() for 276ms
+05-26 11:45:38.714 7086 7246 I Icing : IndexChimeraService.getServiceInterface callingPackage=com.google.android.gms componentName=AppsCorpus serviceId=36
+05-26 11:45:38.720 7086 7237 I Icing : IndexChimeraService.getServiceInterface callingPackage=com.google.android.gms componentName=AppsCorpus serviceId=32
+05-26 11:45:38.866 1876 1876 W system_server: Long monitor contention with owner android.bg (1890) at android.content.res.Resources android.app.ResourcesManager.getOrCreateResources(android.os.IBinder, android.content.res.ResourcesKey, java.lang.ClassLoader)(ResourcesManager.java:736) waiters=0 in android.content.res.Resources android.app.ResourcesManager.getOrCreateResources(android.os.IBinder, android.content.res.ResourcesKey, java.lang.ClassLoader) for 110ms
+05-26 11:45:38.869 7086 7256 I Icing : Usage reports ok 1, Failed Usage reports 0, indexed 0, rejected 0, imm upload false
+05-26 11:45:38.889 7086 7256 I Icing : Usage reports ok 0, Failed Usage reports 0, indexed 0, rejected 0, imm upload false
+05-26 11:45:38.952 1876 1876 W Looper : Slow dispatch took 532ms main h=android.app.ActivityThread$H c=android.app.-$$Lambda$LoadedApk$ReceiverDispatcher$Args$_BumDX2UKsnxLVrE6UJsJZkotuA@4adfab m=0
+05-26 11:45:38.963 1876 1876 I Telecom : DefaultDialerCache: Refreshing default dialer for user 0: now com.google.android.dialer: DDC.oR@AYE
+05-26 11:45:38.977 1876 1892 D AutofillUI: destroySaveUiUiThread(): already destroyed
+05-26 11:45:38.982 1876 1890 D AutofillManagerServiceImpl: Set component for user 0 as AutofillServiceInfo[ServiceInfo{d54b1a1 com.google.android.gms.autofill.service.AutofillService}, settings:com.google.android.gms.autofill.ui.AutofillSettingsActivity, hasCompatPckgs:false]
+05-26 11:45:39.008 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@3c2a392)
+05-26 11:45:39.009 7086 7246 I Icing : IndexChimeraService.getServiceInterface callingPackage=com.google.android.gms componentName=AppsCorpus serviceId=32
+05-26 11:45:39.010 7086 7237 I Icing : IndexChimeraService.getServiceInterface callingPackage=com.google.android.gms componentName=AppsCorpus serviceId=36
+05-26 11:45:39.019 2482 6321 W ctxmgr : [AclManager]No 2 for (accnt=account#-517948760#, com.google.android.gms(10008):IndoorOutdoorProducer, vrsn=13280000, 0, 3pPkg = null , 3pMdlId = null , pid = 2482). Was: 3 for 57, account#-517948760#
+05-26 11:45:39.050 7086 7256 I Icing : Usage reports ok 0, Failed Usage reports 0, indexed 0, rejected 0, imm upload false
+05-26 11:45:39.066 7086 7256 I Icing : Usage reports ok 0, Failed Usage reports 0, indexed 0, rejected 0, imm upload false
+05-26 11:45:39.509 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@eb4dfc6)
+05-26 11:45:39.510 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@eb4dfc6)
+05-26 11:45:39.943 7086 11609 I Icing : Indexing com.google.android.gms-apps from com.google.android.gms
+05-26 11:45:40.042 7086 11609 I Icing : Indexing com.google.android.gms-internal.3p:MobileApplication from com.google.android.gms
+05-26 11:45:40.052 7086 11609 I Icing : Indexing done com.google.android.gms-apps
+05-26 11:45:40.056 7086 11609 I Icing : Indexing done com.google.android.gms-internal.3p:MobileApplication
+05-26 11:45:40.109 7086 11609 I Icing : Indexing com.google.android.gms-apps from com.google.android.gms
+05-26 11:45:40.112 7086 11609 I Icing : Indexing done com.google.android.gms-apps
+05-26 11:45:40.123 2000 2000 I IcingNotification: Received intent: Intent { act=com.google.android.gms.icing.IME_NOTIFICATION flg=0x10 pkg=com.google.android.inputmethod.latin (has extras) }
+05-26 11:45:40.126 2000 2000 I IcingNotification: Received intent: Intent { act=com.google.android.gms.icing.IME_NOTIFICATION flg=0x10 pkg=com.google.android.inputmethod.latin (has extras) }
+05-26 11:45:40.510 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@855cb87)
+05-26 11:45:40.511 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@855cb87)
+05-26 11:45:40.936 1876 1888 I ActivityManager: Force stopping org.mozilla.fennec_aurora appid=10092 user=0: clear data
+05-26 11:45:40.937 1876 1891 I ActivityManager: Force stopping org.mozilla.fennec_aurora appid=10092 user=-1: clearApplicationUserData
+05-26 11:45:40.941 1876 1876 W Looper : Drained
+05-26 11:45:40.946 1876 1888 D ZenLog : config: removeAutomaticZenRules,ZenModeConfig[user=0,allowAlarms=true,allowMedia=true,allowSystem=false,allowReminders=false,allowEvents=false,allowCalls=true,allowRepeatCallers=true,allowMessages=false,allowCallsFrom=stars,allowMessagesFrom=contacts,suppressedVisualEffects=511,areChannelsBypassingDnd=false,automaticRules={EVENTS_DEFAULT_RULE=ZenRule[enabled=false,snoozing=false,name=Event,zenMode=ZEN_MODE_IMPORTANT_INTERRUPTIONS,conditionId=condition://android/event?userId=-10000&calendar=&reply=1,condition=Condition[id=condition://android/event?userId=-10000&calendar=&reply=1,summary=...,line1=...,line2=...,icon=0,state=STATE_FALSE,flags=2],component=ComponentInfo{android/com.android.server.notification.EventConditionProvider},id=EVENTS_DEFAULT_RULE,creationTime=1587308662810,enabler=null], EVERY_NIGHT_DEFAULT_RULE=ZenRule[enabled=false,snoozing=false,name=Sleeping,zenMode=ZEN_MODE_IMPORTANT_INTERRUPTIONS,conditionId=condition://android/schedule?days=1.2.3.4.5.6.7&start=22.0&end=7.0&exitAtAlarm=true,condition=Condition[id=condition://android/schedule?days=1.2.3.4.5.6.7&start=22.0&end=7.0&exitAtAlarm=true,summary=...,line1=...,line2=...,icon=0,state=STATE_FALSE,flags=2],component=ComponentInfo{android/com.android.server.notification.ScheduleConditionProvider},id=EVERY_NIGHT_DEFAULT_RULE,creationTime=1587308662810,enabler=null]},manualRule=null],Diff[]
+05-26 11:45:40.946 1876 1888 I ConditionProviders: Disallowing condition provider org.mozilla.fennec_aurora
+05-26 11:45:40.946 1876 1876 D ZenLog : set_zen_mode: off,removeAutomaticZenRules
+05-26 11:45:40.971 1739 1739 I keystore: clear_uid 10092
+05-26 11:45:41.009 2482 2482 I GeofencerStateMachine: removeGeofences: removeRequest=RemoveGeofencingRequest[REMOVE_ALL packageName=org.mozilla.fennec_aurora]
+05-26 11:45:41.010 2131 2131 D CarrierSvcBindHelper: No carrier app for: 0
+05-26 11:45:41.011 1876 1890 E system_server: No package ID 7f found for ID 0x7f08019f.
+05-26 11:45:41.011 1876 1890 E system_server: No package ID 7f found for ID 0x7f130122.
+05-26 11:45:41.011 1876 1890 E system_server: No package ID 7f found for ID 0x7f130122.
+05-26 11:45:41.011 1876 1890 E system_server: No package ID 7f found for ID 0x7f08019d.
+05-26 11:45:41.011 1876 1890 E system_server: No package ID 7f found for ID 0x7f130121.
+05-26 11:45:41.011 1876 1890 E system_server: No package ID 7f found for ID 0x7f130121.
+05-26 11:45:41.020 2116 2116 W oid.documentsu: Unexpected CPU variant for X86 using defaults: x86
+05-26 11:45:41.023 1876 1893 I ActivityManager: Start proc 2116:com.android.documentsui/u0a37 for broadcast com.android.documentsui/.PackageReceiver
+05-26 11:45:41.031 1876 1876 I GnssLocationProvider: WakeLock acquired by sendMessage(SET_REQUEST, 0, com.android.server.location.GnssLocationProvider$GpsRequest@8410e20)
+05-26 11:45:41.032 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(SET_REQUEST, 0, com.android.server.location.GnssLocationProvider$GpsRequest@8410e20)
+05-26 11:45:41.034 1560 1573 D vold : Remounting 10092 as mode read
+05-26 11:45:41.066 1560 1573 D vold : Remounting 10092 as mode write
+05-26 11:45:41.067 1876 1876 I GnssLocationProvider: WakeLock acquired by sendMessage(SET_REQUEST, 0, com.android.server.location.GnssLocationProvider$GpsRequest@902a47f)
+05-26 11:45:41.068 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(SET_REQUEST, 0, com.android.server.location.GnssLocationProvider$GpsRequest@902a47f)
+05-26 11:45:41.073 2116 2116 I oid.documentsu: The ClassLoaderContext is a special shared library.
+05-26 11:45:41.158 2171 2171 W oid.apps.photo: Unexpected CPU variant for X86 using defaults: x86
+05-26 11:45:41.160 1876 1893 I ActivityManager: Start proc 2171:com.google.android.apps.photos/u0a61 for broadcast com.google.android.apps.photos/com.google.android.libraries.social.mediastoresync.reset.impl.MediaStoreClearedReceiver
+05-26 11:45:41.178 1876 3809 I ActivityManager: Force stopping org.mozilla.fennec_aurora appid=10092 user=-1: set debug app
+05-26 11:45:41.214 1876 1893 I ActivityManager: Start proc 2211:com.google.android.apps.docs/u0a69 for content provider com.google.android.apps.docs/.storagebackend.StorageBackendContentProvider
+05-26 11:45:41.215 2211 2211 W droid.apps.doc: Unexpected CPU variant for X86 using defaults: x86
+05-26 11:45:41.221 2171 2171 I oid.apps.photo: The ClassLoaderContext is a special shared library.
+05-26 11:45:41.226 1876 1888 I ActivityManager: START u0 {flg=0x10000000 cmp=org.mozilla.fennec_aurora/org.mozilla.fenix.HomeActivity (has extras)} from uid 0
+05-26 11:45:41.263 1876 2453 I ActivityManager: Killing 21133:com.google.android.dialer/u0a28 (adj 906): empty for 3589s
+05-26 11:45:41.264 1876 1894 W libprocessgroup: kill(-21133, 9) failed: No such process
+05-26 11:45:41.278 2211 2211 I droid.apps.doc: The ClassLoaderContext is a special shared library.
+05-26 11:45:41.286 1623 5774 D gralloc_ranchu: gralloc_alloc: Creating ashmem region of size 9334784
+05-26 11:45:41.290 1876 1893 I ActivityManager: Start proc 2233:org.mozilla.fennec_aurora/u0a92 for activity org.mozilla.fennec_aurora/org.mozilla.fenix.HomeActivity
+05-26 11:45:41.292 2211 2211 I droid.apps.doc: The ClassLoaderContext is a special shared library.
+05-26 11:45:41.295 2233 2233 W a.fennec_auror: Unexpected CPU variant for X86 using defaults: x86
+05-26 11:45:41.301 1733 2119 D : HostConnection::get() New Host Connection established 0xede98140, tid 2119
+05-26 11:45:41.303 1876 1940 D : HostConnection::get() New Host Connection established 0xd3ab0cc0, tid 1940
+05-26 11:45:41.305 1876 1894 W libprocessgroup: kill(-21133, 9) failed: No such process
+05-26 11:45:41.324 1734 1734 I Zygote : Process 21133 exited due to signal (9)
+05-26 11:45:41.344 1876 1894 W libprocessgroup: kill(-21133, 9) failed: No such process
+05-26 11:45:41.344 1876 1894 I libprocessgroup: Successfully killed process cgroup uid 10028 pid 21133 in 80ms
+05-26 11:45:41.379 2233 2233 W ActivityThread: Application org.mozilla.fennec_aurora can be debugged on port 8100...
+05-26 11:45:41.382 2233 2233 I a.fennec_auror: The ClassLoaderContext is a special shared library.
+05-26 11:45:41.408 1904 1911 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:41.409 1904 1911 D : HostConnection::get() New Host Connection established 0xe6d43140, tid 1911
+05-26 11:45:41.409 1904 1911 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:41.410 1904 1911 D : HostConnection::get() New Host Connection established 0xe6d43140, tid 1911
+05-26 11:45:41.410 1904 1911 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:41.410 1904 1911 D : HostConnection::get() New Host Connection established 0xe6d43140, tid 1911
+05-26 11:45:41.410 1904 1911 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:41.412 2534 2825 D EGL_emulation: eglMakeCurrent: 0xe8b06aa0: ver 3 0 (tinfo 0xe8b03b50)
+05-26 11:45:41.415 1733 1749 D : HostConnection::get() New Host Connection established 0xede98980, tid 1749
+05-26 11:45:41.415 1904 1911 D : HostConnection::get() New Host Connection established 0xe6d43140, tid 1911
+05-26 11:45:41.415 1733 1749 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:41.415 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:45:41.416 1733 1749 D : HostConnection::get() New Host Connection established 0xede98980, tid 1749
+05-26 11:45:41.416 1733 1749 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:41.416 1733 1749 D : HostConnection::get() New Host Connection established 0xede98980, tid 1749
+05-26 11:45:41.417 1733 1749 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:41.417 1733 1749 D : HostConnection::get() New Host Connection established 0xede98980, tid 1749
+05-26 11:45:41.417 1733 1749 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:41.426 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:45:41.429 2499 2681 D EGL_emulation: eglMakeCurrent: 0xd35359e0: ver 3 0 (tinfo 0xd353b4f0)
+05-26 11:45:41.431 1733 2119 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:41.431 1733 2119 D : HostConnection::get() New Host Connection established 0xede98140, tid 2119
+05-26 11:45:41.431 1733 2119 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:41.432 1733 2119 D : HostConnection::get() New Host Connection established 0xede98140, tid 2119
+05-26 11:45:41.432 1733 2119 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:41.432 1733 2119 D : HostConnection::get() New Host Connection established 0xede98140, tid 2119
+05-26 11:45:41.433 1733 2119 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:41.435 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:45:41.479 2012 2265 I chatty : uid=10024(com.android.systemui) RenderThread identical 5 lines
+05-26 11:45:41.486 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:45:41.496 2233 2233 D FirebaseApp: com.google.firebase.auth.FirebaseAuth is not linked. Skipping initialization.
+05-26 11:45:41.496 2233 2233 D FirebaseApp: com.google.firebase.crash.FirebaseCrash is not linked. Skipping initialization.
+05-26 11:45:41.496 2233 2233 I FirebaseInitProvider: FirebaseApp initialization successful
+05-26 11:45:41.496 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:45:41.505 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:45:41.516 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@c8be314)
+05-26 11:45:41.517 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@c8be314)
+05-26 11:45:41.521 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:45:41.533 2012 2265 I chatty : uid=10024(com.android.systemui) RenderThread identical 2 lines
+05-26 11:45:41.538 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:45:41.541 2233 2282 W a.fennec_auror: Accessing hidden method Landroid/content/res/Resources$Theme;->rebase()V (dark greylist, linking)
+05-26 11:45:41.573 2233 2233 D FennecProfile: profiles.ini: false
+05-26 11:45:41.573 2233 2233 D FennecProfile: No profiles found
+05-26 11:45:41.580 1876 16736 I ActivityManager: Killing 12513:com.google.android.partnersetup/u0a23 (adj 906): empty for 2263s
+05-26 11:45:41.580 1876 1894 W libprocessgroup: kill(-12513, 9) failed: No such process
+05-26 11:45:41.580 2233 2233 D FennecMigrator: No migrations to run. Fennec install - false.
+05-26 11:45:41.590 7086 2094 I LocationSettingsChecker: Removing dialog suppression flag for package org.mozilla.fennec_aurora
+05-26 11:45:41.592 2233 2233 D FenixApplication: Initializing Glean (uploadEnabled=true, isFennec=true)
+05-26 11:45:41.610 7086 11609 I Icing : doRemovePackageData org.mozilla.fennec_aurora
+05-26 11:45:41.625 1876 1894 W libprocessgroup: kill(-12513, 9) failed: No such process
+05-26 11:45:41.635 1734 1734 I Zygote : Process 12513 exited due to signal (9)
+05-26 11:45:41.647 2233 2306 D RustNativeSupport: findMegazordLibraryName(viaduct, 0.58.1
+05-26 11:45:41.647 2233 2306 D RustNativeSupport: lib in use: none
+05-26 11:45:41.647 2233 2306 D RustNativeSupport: lib configured: megazord
+05-26 11:45:41.647 2233 2306 D RustNativeSupport: lib version configured: 0.58.1
+05-26 11:45:41.647 2233 2306 D RustNativeSupport: settled on megazord
+05-26 11:45:41.668 1876 1894 W libprocessgroup: kill(-12513, 9) failed: No such process
+05-26 11:45:41.668 1876 1894 I libprocessgroup: Successfully killed process cgroup uid 10023 pid 12513 in 87ms
+05-26 11:45:41.708 2211 2321 I GAv4 : Google Analytics 10.2.98 is starting up. To enable debug logging on a device run:
+05-26 11:45:41.708 2211 2321 I GAv4 : adb shell setprop log.tag.GAv4 DEBUG
+05-26 11:45:41.708 2211 2321 I GAv4 : adb logcat -s GAv4
+05-26 11:45:41.723 2211 2325 W GAv4 : AnalyticsReceiver is not registered or is disabled. Register the receiver for reliable dispatching on non-Google Play devices. See http://goo.gl/8Rd3yj for instructions.
+05-26 11:45:41.742 2233 2233 W ActivityThread: ClassLoader.loadClass: The class loader returned by Thread.getContextClassLoader() may fail for processes that host multiple applications. You should explicitly specify a context class loader. For example: Thread.setContextClassLoader(getClass().getClassLoader());
+05-26 11:45:41.756 2233 2233 I GeckoRuntime: Adding debug configuration from: /data/local/tmp/org.mozilla.fennec_aurora-geckoview-config.yaml
+05-26 11:45:41.756 2233 2233 D GeckoDebugConfig: Adding environment variables from debug config: {MOZ_CRASHREPORTER=1, MOZ_CRASHREPORTER_NO_REPORT=1, MOZ_CRASHREPORTER_SHUTDOWN=1}
+05-26 11:45:41.757 2233 2233 D GeckoDebugConfig: Adding arguments from debug config: [-marionette, -profile, /mnt/sdcard/org.mozilla.fennec_aurora-geckodriver-profile]
+05-26 11:45:41.757 2233 2233 D GeckoThread: State changed to LAUNCHED
+05-26 11:45:41.758 2233 2328 I GeckoThread: preparing to run Gecko
+05-26 11:45:41.760 2233 2282 I FA : Collection disabled with firebase_analytics_collection_enabled=0
+05-26 11:45:41.760 2233 2328 D GeckoThread: env var: MOZ_CRASHREPORTER=1
+05-26 11:45:41.760 2233 2328 D GeckoThread: env var: MOZ_CRASHREPORTER_NO_REPORT=1
+05-26 11:45:41.760 2233 2328 D GeckoThread: env var: MOZ_CRASHREPORTER_SHUTDOWN=1
+05-26 11:45:41.768 2211 2211 W FieldDefinition: Ignoring isIndexed constraint as field also has uniqueness constraint (on just this field, and therefore SQLite will have to create an index on that. For field: com.google.android.apps.docs.database.common.FieldDefinition$a@caa0d29
+05-26 11:45:41.774 2233 2282 I FA : App measurement is starting up, version: 12780
+05-26 11:45:41.774 2233 2282 I FA : To enable debug logging run: adb shell setprop log.tag.FA VERBOSE
+05-26 11:45:41.777 2233 2282 I FA : To enable faster debug mode event logging run:
+05-26 11:45:41.777 2233 2282 I FA : adb shell setprop debug.firebase.analytics.app org.mozilla.fennec_aurora
+05-26 11:45:41.781 2211 2325 W GAv4 : CampaignTrackingReceiver is not registered, not exported or is disabled. Installation campaign tracking is not possible. See http://goo.gl/8Rd3yj for instructions.
+05-26 11:45:41.784 2233 2233 D GeckoRuntime: Lifecycle: onCreate
+05-26 11:45:41.788 2211 2325 W GAv4 : AnalyticsService not registered in the app manifest. Hits might not be delivered reliably. See http://goo.gl/8Rd3yj for instructions.
+05-26 11:45:41.836 2211 2228 W droid.apps.doc: Long wait of 3.298ms for Thread[14,tid=2325,Suspended,Thread*=0xe8b5c000,peer=0x1354c3e0,"measurement-1"] suspension!
+05-26 11:45:41.904 2233 2306 D RustNativeSupport: findMegazordLibraryName(rustlog, 0.58.1
+05-26 11:45:41.904 2233 2306 D RustNativeSupport: lib in use: none
+05-26 11:45:41.904 2233 2306 D RustNativeSupport: lib configured: megazord
+05-26 11:45:41.904 2233 2306 D RustNativeSupport: lib version configured: 0.58.1
+05-26 11:45:41.904 2233 2306 D RustNativeSupport: settled on megazord
+05-26 11:45:41.904 2233 2295 D libglean_ffi: glean_ffi: Android logging should be hooked up!
+05-26 11:45:41.905 2233 2295 I glean/Glean: Registering pings for mozilla.telemetry.glean.GleanMetrics.Pings
+05-26 11:45:41.906 2233 2295 I libglean_ffi: glean_core: Creating new Glean
+05-26 11:45:41.906 2233 2295 D libglean_ffi: glean_core::database: Database path: "/data/user/0/org.mozilla.fennec_aurora/glean_data/db"
+05-26 11:45:41.907 2233 2295 I libglean_ffi: glean_core::database: Database initialized
+05-26 11:45:41.907 2233 2328 D GeckoThread: State changed to MOZGLUE_READY
+05-26 11:45:41.912 2233 2306 I rc_log_ffi::ios: rc_log adapter initialized!
+05-26 11:45:41.915 2233 2233 D GleanMetricsService: Enabling Glean.
+05-26 11:45:41.917 2233 2233 I AdjustMetricsService: No adjust token defined
+05-26 11:45:41.918 2233 2233 D PushConfig: Creating push configuration for autopush.
+05-26 11:45:41.927 2233 2233 I App : AutoPushFeature is configured, initializing it...
+05-26 11:45:41.928 2233 2233 I AutoPushFeature: Checking validity of push subscriptions.
+05-26 11:45:41.932 2233 2233 D FennecProfile: profiles.ini: false
+05-26 11:45:41.932 2233 2233 D FennecProfile: No profiles found
+05-26 11:45:41.942 2233 2328 W Settings: Setting animator_duration_scale has moved from android.provider.Settings.System to android.provider.Settings.Global, returning read-only global URI.
+05-26 11:45:41.944 2233 2328 E GeckoLibLoad: Load sqlite start
+05-26 11:45:41.980 2233 2306 W [WARNING][Leanplum]: [com.leanplum.internal.ActionManager::getLocationManager::8]: Geofencing support requires leanplum-location module and Google Play Services v8.1 and higher.
+05-26 11:45:41.980 2233 2306 W [WARNING][Leanplum]: Add this to your build.gradle file:
+05-26 11:45:41.980 2233 2306 W [WARNING][Leanplum]: implementation 'com.google.android.gms:play-services-location:8.3.0+'
+05-26 11:45:41.980 2233 2306 W [WARNING][Leanplum]: implementation 'com.leanplum:leanplum-location:+'
+05-26 11:45:41.986 2233 2328 E GeckoLibLoad: Load sqlite done
+05-26 11:45:41.986 2233 2328 E GeckoLibLoad: Load nss start
+05-26 11:45:41.986 2233 2328 E GeckoLibLoad: Load nss done
+05-26 11:45:41.991 2233 2295 I libglean_ffi: glean_ffi: Glean initialized
+05-26 11:45:42.007 2233 2306 I [INFO][Leanplum]: [com.leanplum.monitoring.ExceptionHandler::setContext::6]: LeanplumExceptionHandler could not initialize Exception Reporting.This is expected if you have not included the leanplum-monitoring module
+05-26 11:45:42.049 2116 2158 I ProvidersCache: Provider returned no roots. Possibly naughty: com.google.android.apps.docs.storage
+05-26 11:45:42.057 2233 2233 D FennecMigrator: This is not a Fennec installation. No migration needed.
+05-26 11:45:42.070 2233 2270 I a.fennec_auror: Background concurrent copying GC freed 13546(2032KB) AllocSpace objects, 4(208KB) LOS objects, 49% free, 2MB/4MB, paused 1.316ms total 192.952ms
+05-26 11:45:42.101 2233 2233 W a.fennec_auror: Accessing hidden method Landroid/content/res/Resources$Theme;->rebase()V (dark greylist, reflection)
+05-26 11:45:42.105 2233 2233 I ResourcesCompat: Failed to retrieve rebase() method
+05-26 11:45:42.105 2233 2233 I ResourcesCompat: java.lang.NoSuchMethodException: rebase []
+05-26 11:45:42.105 2233 2233 I ResourcesCompat: at java.lang.Class.getMethod(Class.java:2068)
+05-26 11:45:42.105 2233 2233 I ResourcesCompat: at java.lang.Class.getDeclaredMethod(Class.java:2047)
+05-26 11:45:42.105 2233 2233 I ResourcesCompat: at androidx.core.content.res.ResourcesCompat$ThemeCompat$ImplApi23.rebase(ResourcesCompat.java:3)
+05-26 11:45:42.105 2233 2233 I ResourcesCompat: at androidx.appcompat.app.AppCompatActivity.attachBaseContext(AppCompatActivity.java:80)
+05-26 11:45:42.105 2233 2233 I ResourcesCompat: at mozilla.components.support.locale.LocaleAwareAppCompatActivity.attachBaseContext(LocaleAwareAppCompatActivity.kt:2)
+05-26 11:45:42.105 2233 2233 I ResourcesCompat: at android.app.Activity.attach(Activity.java:7051)
+05-26 11:45:42.105 2233 2233 I ResourcesCompat: at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2873)
+05-26 11:45:42.105 2233 2233 I ResourcesCompat: at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:3048)
+05-26 11:45:42.105 2233 2233 I ResourcesCompat: at android.app.servertransaction.LaunchActivityItem.execute(LaunchActivityItem.java:78)
+05-26 11:45:42.105 2233 2233 I ResourcesCompat: at android.app.servertransaction.TransactionExecutor.executeCallbacks(TransactionExecutor.java:108)
+05-26 11:45:42.105 2233 2233 I ResourcesCompat: at android.app.servertransaction.TransactionExecutor.execute(TransactionExecutor.java:68)
+05-26 11:45:42.105 2233 2233 I ResourcesCompat: at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1808)
+05-26 11:45:42.105 2233 2233 I ResourcesCompat: at android.os.Handler.dispatchMessage(Handler.java:106)
+05-26 11:45:42.105 2233 2233 I ResourcesCompat: at android.os.Looper.loop(Looper.java:193)
+05-26 11:45:42.105 2233 2233 I ResourcesCompat: at android.app.ActivityThread.main(ActivityThread.java:6669)
+05-26 11:45:42.105 2233 2233 I ResourcesCompat: at java.lang.reflect.Method.invoke(Native Method)
+05-26 11:45:42.105 2233 2233 I ResourcesCompat: at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+05-26 11:45:42.105 2233 2233 I ResourcesCompat: at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+05-26 11:45:42.139 2233 2347 I [INFO][Leanplum]: [com.leanplum.LeanplumFcmProvider::isManifestSetup::11]: Firebase Messaging is setup correctly.
+05-26 11:45:42.314 2233 2328 E GeckoLibLoad: Loaded libs in 328.210000ms total, 10ms(260ms) user, 140ms(330ms) system, 9(42) faults
+05-26 11:45:42.314 2233 2328 D GeckoThread: State changed to LIBS_READY
+05-26 11:45:42.316 2233 2328 W GeckoThread: zerdatime 190862512 - runGecko
+05-26 11:45:42.317 2233 2328 D GeckoProfile: Loading profile at: null name: default
+05-26 11:45:42.317 2233 2328 D GeckoProfile: Created new profile dir.
+05-26 11:45:42.321 2233 2328 I GeckoProfile: Enqueuing profile init.
+05-26 11:45:42.327 2233 2328 D GeckoProfile: Found profile dir: /data/user/0/org.mozilla.fennec_aurora/files/mozilla/wftld4kh.default
+05-26 11:45:42.327 2233 2328 D GeckoProfile: Attempting to write new client ID properties
+05-26 11:45:42.329 2233 2328 D GeckoProfile: Creating profile dir: /data/user/0/org.mozilla.fennec_aurora/files/mozilla/wftld4kh.default
+05-26 11:45:42.338 1876 3809 I ActivityManager: Killing 27012:com.google.android.youtube/u0a72 (adj 906): empty for 1936s
+05-26 11:45:42.340 1876 1894 W libprocessgroup: kill(-27012, 9) failed: No such process
+05-26 11:45:42.384 1876 1894 W libprocessgroup: kill(-27012, 9) failed: No such process
+05-26 11:45:42.391 2233 2328 I Gecko:DumpUtils: Fifo watcher disabled via pref.
+05-26 11:45:42.401 2233 2295 I glean/MetricsPingSched: The application just updated. Send metrics ping now.
+05-26 11:45:42.402 2233 2295 I glean/MetricsPingSched: Collecting the 'metrics' ping, now = Tue May 26 11:45:42 EDT 2020, startup = true, reason = upgrade
+05-26 11:45:42.403 2233 2295 I libglean_ffi: glean_core::ping: Collecting metrics
+05-26 11:45:42.403 2233 2295 I libglean_ffi: glean_core::ping: Storage for metrics empty. Bailing out.
+05-26 11:45:42.403 2233 2295 I libglean_ffi: glean_core: No content for ping 'metrics', therefore no ping queued.
+05-26 11:45:42.404 2233 2295 D glean/MetricsPingSched: Scheduling the 'metrics' ping in 58457608ms
+05-26 11:45:42.407 1876 2734 D ConnectivityService: ConnectivityService NetworkRequestInfo binderDied(NetworkRequest [ LISTEN id=233, [ Capabilities: INTERNET&NOT_RESTRICTED&TRUSTED Unwanted: Uid: 10072] ], android.os.BinderProxy@f0d2037)
+05-26 11:45:42.408 1734 1734 I Zygote : Process 27012 exited due to signal (9)
+05-26 11:45:42.430 1876 1894 W libprocessgroup: kill(-27012, 9) failed: No such process
+05-26 11:45:42.430 1876 1894 I libprocessgroup: Successfully killed process cgroup uid 10072 pid 27012 in 89ms
+05-26 11:45:42.438 1740 16222 W MediaAnalyticsItem: Failed to record: [1:drm.vendor.Google.WidevineCDM:5770:-1:com.google.android.youtube:0:-1:1:0:0:] [forcenew=0]
+05-26 11:45:42.438 1740 16222 E PluginMetricsReporting: selfrecord() returned false. sessioId 5770
+05-26 11:45:42.487 2233 2233 I FirefoxAccountStateMachine: Enabling/updating sync with a new SyncConfig: SyncConfig(supportedEngines=[mozilla.components.service.fxa.SyncEngine$History@83c4938, mozilla.components.service.fxa.SyncEngine$Bookmarks@94a6d11, mozilla.components.service.fxa.SyncEngine$Passwords@e11ce76], syncPeriodInMinutes=240)
+05-26 11:45:42.491 2233 2233 I BgSyncManager: Periodic syncing enabled at a 240 interval
+05-26 11:45:42.492 2233 2233 I FirefoxAccountStateMachine: Sync is enabled
+05-26 11:45:42.496 2233 2328 D GeckoSysInfo: System memory: 1494MB.
+05-26 11:45:42.499 2233 2328 D GeckoThread: State changed to JNI_READY
+05-26 11:45:42.510 2233 2384 I FirefoxAccountStateMachine: Processing event Event$Init for state Start. Next state is Start
+05-26 11:45:42.513 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@fb8e6c5)
+05-26 11:45:42.514 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@fb8e6c5)
+05-26 11:45:42.530 2233 2233 E ActivityThread: Failed to find provider info for org.mozilla.fennec_aurora.fxa.auth
+05-26 11:45:42.536 2233 2383 D ServiceAllocator: org.mozilla.gecko.process.GeckoChildProcessServices$tab0 updateBindings: BACKGROUND priority, 0 importance, 2 successful binds, 0 failed binds, 0 successful unbinds, 0 failed unbinds
+05-26 11:45:42.542 2393 2393 W nec_aurora:tab: Unexpected CPU variant for X86 using defaults: x86
+05-26 11:45:42.543 1876 1893 I ActivityManager: Start proc 2393:org.mozilla.fennec_aurora:tab0/u0a92 for service org.mozilla.fennec_aurora/org.mozilla.gecko.process.GeckoChildProcessServices$tab0
+05-26 11:45:42.567 2233 2233 D GeckoRuntime: Lifecycle: onStart
+05-26 11:45:42.578 2233 2233 D GeckoRuntime: Lifecycle: onResume
+05-26 11:45:42.581 2393 2393 I nec_aurora:tab: The ClassLoaderContext is a special shared library.
+05-26 11:45:42.582 1739 1739 I keystore: del USRPKEY_org.mozilla.fennec_aurora 10092
+05-26 11:45:42.582 2233 2233 D GeckoNetworkManager: Incoming event start for state OffNoListeners -> OnNoListeners
+05-26 11:45:42.583 1739 1739 I keystore: del USRCERT_org.mozilla.fennec_aurora 10092
+05-26 11:45:42.584 1739 1739 I keystore: del CACERT_org.mozilla.fennec_aurora 10092
+05-26 11:45:42.584 2233 2233 D GeckoNetworkManager: New network state: UP, WIFI, WIFI
+05-26 11:45:42.585 2233 2233 D OpenGLRenderer: Skia GL Pipeline
+05-26 11:45:42.599 2233 2347 D NetworkSecurityConfig: No Network Security Config specified, using platform default
+05-26 11:45:42.603 1733 2205 E SurfaceFlinger: ro.sf.lcd_density must be defined as a build property
+05-26 11:45:42.639 2233 2384 I FirefoxAccountStateMachine: Ran 'Event$Init' side-effects for state Start, got successive event Event$AccountNotFound
+05-26 11:45:42.640 2233 2384 I FirefoxAccountStateMachine: Processing event Event$AccountNotFound for state Start. Next state is NotAuthenticated
+05-26 11:45:42.641 2233 2384 D RustNativeSupport: findMegazordLibraryName(fxaclient, 0.58.1
+05-26 11:45:42.641 2233 2384 D RustNativeSupport: lib in use: none
+05-26 11:45:42.641 2233 2384 D RustNativeSupport: lib configured: megazord
+05-26 11:45:42.641 2233 2384 D RustNativeSupport: lib version configured: 0.58.1
+05-26 11:45:42.641 2233 2384 D RustNativeSupport: settled on megazord
+05-26 11:45:42.642 2233 2384 D fxaclient_ffi: fxa_new
+05-26 11:45:42.668 2233 2295 I libglean_ffi: glean_core::ping: Collecting baseline
+05-26 11:45:42.678 2393 2393 D GeckoThread: State changed to LAUNCHED
+05-26 11:45:42.684 2233 2384 W FirefoxAccountStateMachine: Got invalid event Event$Init for state NotAuthenticated.
+05-26 11:45:42.685 2233 2233 D GeckoNetworkManager: Incoming event receivedUpdate for state OnNoListeners -> OnNoListeners
+05-26 11:45:42.688 2233 2233 D GeckoNetworkManager: New network state: UP, WIFI, WIFI
+05-26 11:45:42.692 2393 2429 I GeckoThread: preparing to run Gecko
+05-26 11:45:42.696 2233 2295 D libglean_ffi: glean_core::ping: Storing ping '61b99eaa-d33b-4833-b80d-f6b14990a92f' at '/data/user/0/org.mozilla.fennec_aurora/glean_data/pending_pings/61b99eaa-d33b-4833-b80d-f6b14990a92f'
+05-26 11:45:42.696 2233 2295 I libglean_ffi: glean_core: The ping 'baseline' was submitted and will be sent as soon as possible
+05-26 11:45:42.707 7086 7097 I gle.android.gm: Background concurrent copying GC freed 3920(431KB) AllocSpace objects, 12(624KB) LOS objects, 49% free, 3MB/6MB, paused 5.464ms total 61.729ms
+05-26 11:45:42.756 1876 2734 D ConnectivityService: requestNetwork for uid/pid:10092/2233 NetworkRequest [ TRACK_DEFAULT id=234, [ Capabilities: INTERNET&NOT_RESTRICTED&TRUSTED Unwanted: Uid: 10092] ]
+05-26 11:45:42.756 1876 1975 D WIFI : got request NetworkRequest [ TRACK_DEFAULT id=234, [ Capabilities: INTERNET&NOT_RESTRICTED&TRUSTED Unwanted: Uid: 10092] ] with score 60
+05-26 11:45:42.757 1876 1975 D WIFI_UT : got request NetworkRequest [ TRACK_DEFAULT id=234, [ Capabilities: INTERNET&NOT_RESTRICTED&TRUSTED Unwanted: Uid: 10092] ] with score 60
+05-26 11:45:42.763 2131 2131 D PhoneSwitcherNetworkRequstListener: got request NetworkRequest [ TRACK_DEFAULT id=234, [ Capabilities: INTERNET&NOT_RESTRICTED&TRUSTED Unwanted: Uid: 10092] ] with score 60
+05-26 11:45:42.795 1623 5774 D gralloc_ranchu: gralloc_alloc: Creating ashmem region of size 9334784
+05-26 11:45:42.807 1623 1623 D gralloc_ranchu: gralloc_alloc: Creating ashmem region of size 9334784
+05-26 11:45:42.816 1623 5774 D gralloc_ranchu: gralloc_alloc: Creating ashmem region of size 9334784
+05-26 11:45:42.822 2233 2270 I a.fennec_auror: Background concurrent copying GC freed 9948(1151KB) AllocSpace objects, 18(936KB) LOS objects, 49% free, 3MB/6MB, paused 1.727ms total 105.781ms
+05-26 11:45:42.856 2233 2233 D MigrationPushRenewer: Migration state: NONE
+05-26 11:45:42.856 2233 2233 D MigrationTelemetryListener: Migration state: NONE
+05-26 11:45:42.859 2233 2419 D : HostConnection::get() New Host Connection established 0xd0db54c0, tid 2419
+05-26 11:45:42.869 2233 2419 I ConfigStore: android::hardware::configstore::V1_0::ISurfaceFlingerConfigs::hasWideColorDisplay retrieved: 0
+05-26 11:45:42.870 2233 2419 I ConfigStore: android::hardware::configstore::V1_0::ISurfaceFlingerConfigs::hasHDRDisplay retrieved: 0
+05-26 11:45:42.870 2233 2419 I OpenGLRenderer: Initialized EGL, version 1.4
+05-26 11:45:42.870 2233 2419 D OpenGLRenderer: Swap behavior 1
+05-26 11:45:42.870 2233 2419 W OpenGLRenderer: Failed to choose config with EGL_SWAP_BEHAVIOR_PRESERVED, retrying without...
+05-26 11:45:42.870 2233 2419 D OpenGLRenderer: Swap behavior 0
+05-26 11:45:42.873 2233 2419 D EGL_emulation: eglCreateContext: 0xc8c71840: maj 3 min 0 rcv 3
+05-26 11:45:42.875 2233 2419 D EGL_emulation: eglMakeCurrent: 0xc8c71840: ver 3 0 (tinfo 0xc8c501c0)
+05-26 11:45:42.879 1733 2205 E SurfaceFlinger: ro.sf.lcd_density must be defined as a build property
+05-26 11:45:42.960 2233 2419 D EGL_emulation: eglMakeCurrent: 0xc8c71840: ver 3 0 (tinfo 0xc8c501c0)
+05-26 11:45:43.006 2233 2468 D glean/PingUploadWorker: Processing persisted pings at /data/user/0/org.mozilla.fennec_aurora/glean_data/pending_pings
+05-26 11:45:43.008 1876 1899 I ActivityManager: Displayed org.mozilla.fennec_aurora/org.mozilla.fenix.HomeActivity: +1s743ms
+05-26 11:45:43.014 2000 2000 I GoogleInputMethod: onFinishInput() : Dummy InputConnection bound
+05-26 11:45:43.015 2000 2000 I GoogleInputMethod: onStartInput() : Dummy InputConnection bound
+05-26 11:45:43.018 2233 2468 D glean/PingUploadWorker: Processing ping: 61b99eaa-d33b-4833-b80d-f6b14990a92f
+05-26 11:45:43.018 2233 2233 D GeckoNetworkManager: Incoming event enableNotifications for state OnNoListeners -> OnWithListeners
+05-26 11:45:43.020 2233 2468 D glean/ConceptFetchHttpUploader: Submitting ping to: https://incoming.telemetry.mozilla.org/submit/org-mozilla-fennec-aurora/baseline/1/61b99eaa-d33b-4833-b80d-f6b14990a92f
+05-26 11:45:43.020 2233 2233 D GeckoNetworkManager: New network state: UP, WIFI, WIFI
+05-26 11:45:43.021 1876 1888 W ActivityManager: Receiver with filter android.content.IntentFilter@ffe2a5d already registered for pid 2233, callerPackage is org.mozilla.fennec_aurora
+05-26 11:45:43.031 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:45:43.033 2233 2233 E ActivityThread: Failed to find provider info for org.mozilla.fennec_aurora.fxa.auth
+05-26 11:45:43.044 2233 2233 D GeckoNetworkManager: Incoming event receivedUpdate for state OnWithListeners -> OnWithListeners
+05-26 11:45:43.049 2233 2233 D GeckoNetworkManager: New network state: UP, WIFI, WIFI
+05-26 11:45:43.133 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:45:43.180 2534 2038 I EventLogSendingHelper: Sending log events.
+05-26 11:45:43.191 1876 1940 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:43.192 1876 1940 D : HostConnection::get() New Host Connection established 0xd3ab0cc0, tid 1940
+05-26 11:45:43.194 1876 1940 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:43.194 1733 1749 W SurfaceFlinger: Attempting to set client state on removed layer: Splash Screen org.mozilla.fennec_aurora#0
+05-26 11:45:43.194 1733 1749 W SurfaceFlinger: Attempting to destroy on removed layer: Splash Screen org.mozilla.fennec_aurora#0
+05-26 11:45:43.207 1904 16172 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:43.208 1904 16172 D : HostConnection::get() New Host Connection established 0xe69102c0, tid 16172
+05-26 11:45:43.208 1904 16172 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:43.215 1904 16172 D : HostConnection::get() New Host Connection established 0xe69102c0, tid 16172
+05-26 11:45:43.239 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:45:43.259 2534 2038 I EventLogSendingHelper: Sending log events.
+05-26 11:45:43.342 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:45:43.344 2233 2233 I [INFO][Leanplum]: [com.leanplum.LeanplumCloudMessagingProvider::onRegistrationIdReceived::3]: Device registered for push notifications with registration token, erw6jzriO4Q:APA91bGwMfmPLEEo-IkdAvopxKRRxGkhNuoO0bTDwwhHPSQAPWUmrt76JeND8tsHPVb2qmlqFBOwOjLA9BrSWDJDtFrMAQ_yujW2xnawPYawyHt1AanpWHGqbi8rQmoX_D4DVLXwLNqm
+05-26 11:45:43.368 1733 1733 W SurfaceFlinger: couldn't log to binary event log: overflow.
+05-26 11:45:43.448 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:45:43.468 2233 2486 I [INFO][Leanplum]: [com.leanplum.LeanplumCloudMessagingProvider::onRegistrationIdReceived::3]: Device registered for push notifications with registration token, erw6jzriO4Q:APA91bGwMfmPLEEo-IkdAvopxKRRxGkhNuoO0bTDwwhHPSQAPWUmrt76JeND8tsHPVb2qmlqFBOwOjLA9BrSWDJDtFrMAQ_yujW2xnawPYawyHt1AanpWHGqbi8rQmoX_D4DVLXwLNqm
+05-26 11:45:43.469 2233 2488 I AutoPushFeature: Received a new registration token from push service.
+05-26 11:45:43.473 2233 2488 D RustNativeSupport: findMegazordLibraryName(push, 0.58.1
+05-26 11:45:43.473 2233 2488 D RustNativeSupport: lib in use: none
+05-26 11:45:43.473 2233 2488 D RustNativeSupport: lib configured: megazord
+05-26 11:45:43.473 2233 2488 D RustNativeSupport: lib version configured: 0.58.1
+05-26 11:45:43.473 2233 2488 D RustNativeSupport: settled on megazord
+05-26 11:45:43.483 2233 2328 D GeckoThread: State changed to PROFILE_READY
+05-26 11:45:43.516 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@e1dd1fc)
+05-26 11:45:43.518 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@e1dd1fc)
+05-26 11:45:43.530 2534 2006 I PBSessionCacheImpl: Deleted sessionId[290782929457] from persistence.
+05-26 11:45:43.550 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:45:43.553 2534 2534 W SearchService: Abort, client detached.
+05-26 11:45:43.563 2233 2328 D GeckoThread: State changed to RUNNING
+05-26 11:45:43.571 2233 2328 I Gecko : -*- nsDNSServiceDiscovery.js : nsDNSServiceDiscovery
+05-26 11:45:43.597 2534 2034 I WorkController: WorkProxy is not enqueued because WorkController is disposed: WorkProxy{Name=context::j, WorkerId=context, id=b8d54f6}
+05-26 11:45:43.597 2534 2034 I WorkController: WorkProxy is not enqueued because WorkController is disposed: WorkProxy{Name=context::m, WorkerId=context, id=75450f7}
+05-26 11:45:43.598 2534 2034 I WorkController: WorkProxy is not enqueued because WorkController is disposed: WorkProxy{Name=context::n, WorkerId=context, id=59beb64}
+05-26 11:45:43.598 2233 2328 I Gecko : 1590507943598 Marionette TRACE Marionette enabled
+05-26 11:45:43.599 2534 2034 I WorkController: WorkProxy is not enqueued because WorkController is disposed: WorkProxy{Name=context::p, WorkerId=context, id=873e1cd}
+05-26 11:45:43.605 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:45:43.656 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:45:43.751 2233 2233 I DefaultSupportedAddonsChecker: Register check for new supported add-ons
+05-26 11:45:43.761 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:45:43.764 2233 2328 I Gecko : 1590507943764 Marionette TRACE Received observer notification marionette-startup-requested
+05-26 11:45:43.765 2233 2328 I Gecko : 1590507943765 Marionette TRACE Waiting until startup recorder finished recording startup scripts...
+05-26 11:45:43.786 2233 2351 I SupportedAddonsWorker: Trying to check for new supported add-ons
+05-26 11:45:43.865 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:45:43.968 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:45:43.992 2233 2233 D App : Installed browser-icons extension
+05-26 11:45:44.040 2233 2328 D : HostConnection::get() New Host Connection established 0xd0dc10c0, tid 2328
+05-26 11:45:44.042 2233 2328 E EGL_emulation: tid 2328: eglBindAPI(1259): error 0x300c (EGL_BAD_PARAMETER)
+05-26 11:45:44.043 2233 2328 D EGL_emulation: eglCreateContext: 0xe5e99040: maj 3 min 0 rcv 3
+05-26 11:45:44.045 2233 2328 D EGL_emulation: eglMakeCurrent: 0xe5e99040: ver 3 0 (tinfo 0xe71134f0)
+05-26 11:45:44.074 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:45:44.180 4313 4313 I chatty : uid=0(root) /system/bin/adbd identical 1 line
+05-26 11:45:44.282 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:45:44.353 2233 2468 D glean/ConceptFetchHttpUploader: Ping successfully sent (200)
+05-26 11:45:44.354 2233 2468 D glean/PingUploadWorker: 61b99eaa-d33b-4833-b80d-f6b14990a92f was deleted: true
+05-26 11:45:44.361 2233 2378 I WM-WorkerWrapper: Worker result SUCCESS for Work [ id=6de17969-b2aa-4d55-8d3d-9d1360d3ed18, tags={ mozilla.telemetry.glean.scheduler.PingUploadWorker, mozac_service_glean_ping_upload_worker } ]
+05-26 11:45:44.386 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:45:44.400 1876 1978 D ConnectivityService: releasing NetworkRequest [ TRACK_DEFAULT id=234, [ Capabilities: INTERNET&NOT_RESTRICTED&TRUSTED Unwanted: Uid: 10092] ] (release request)
+05-26 11:45:44.489 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:45:44.517 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@8c7f483)
+05-26 11:45:44.517 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@8c7f483)
+05-26 11:45:44.523 2482 7043 W ctxmgr : [AclManager]No 2 for (accnt=account#-517948760#, com.google.android.gms(10008):IndoorOutdoorProducer, vrsn=13280000, 0, 3pPkg = null , 3pMdlId = null , pid = 2482). Was: 3 for 57, account#-517948760#
+05-26 11:45:44.592 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:45:44.694 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:45:44.782 2211 2323 I droid.apps.doc: The ClassLoaderContext is a special shared library.
+05-26 11:45:44.785 2211 2323 I chatty : uid=10069(com.google.android.apps.docs) TaskSchedulerLo identical 1 line
+05-26 11:45:44.790 2211 2323 I droid.apps.doc: The ClassLoaderContext is a special shared library.
+05-26 11:45:44.797 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:45:44.798 2211 2323 I droid.apps.doc: The ClassLoaderContext is a special shared library.
+05-26 11:45:44.826 2211 2323 V NativeCrypto: Registering com/google/android/gms/org/conscrypt/NativeCrypto's 284 native methods...
+05-26 11:45:44.863 2211 2323 D NetworkSecurityConfig: No Network Security Config specified, using platform default
+05-26 11:45:44.872 2211 2323 I ProviderInstaller: Installed default security provider GmsCore_OpenSSL
+05-26 11:45:44.899 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:45:44.976 2233 2233 D mozac-webcompat: Installed WebCompat webextension: webcompat@mozilla.com
+05-26 11:45:45.001 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:45:45.107 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:45:45.108 2233 2328 I Gecko : 1590507945108 Marionette TRACE All scripts recorded.
+05-26 11:45:45.109 2233 2328 I Gecko : 1590507945109 Marionette DEBUG Setting recommended pref apz.content_response_timeout to 60000
+05-26 11:45:45.109 2233 2328 I Gecko : 1590507945109 Marionette DEBUG Setting recommended pref browser.contentblocking.introCount to 99
+05-26 11:45:45.110 2233 2328 I Gecko : 1590507945109 Marionette DEBUG Setting recommended pref browser.download.panel.shown to true
+05-26 11:45:45.110 2233 2328 I Gecko : 1590507945110 Marionette DEBUG Setting recommended pref browser.newtabpage.enabled to false
+05-26 11:45:45.110 2233 2328 I Gecko : 1590507945110 Marionette DEBUG Setting recommended pref browser.safebrowsing.malware.enabled to false
+05-26 11:45:45.115 2233 2328 I Gecko : 1590507945115 Marionette DEBUG Setting recommended pref browser.safebrowsing.phishing.enabled to false
+05-26 11:45:45.118 2233 2328 I Gecko : 1590507945118 Marionette DEBUG Setting recommended pref browser.search.update to false
+05-26 11:45:45.118 2233 2328 I Gecko : 1590507945118 Marionette DEBUG Setting recommended pref browser.tabs.disableBackgroundZombification to false
+05-26 11:45:45.118 2233 2328 I Gecko : 1590507945118 Marionette DEBUG Setting recommended pref browser.tabs.remote.separatePrivilegedContentProcess to false
+05-26 11:45:45.119 2233 2328 I Gecko : 1590507945119 Marionette DEBUG Setting recommended pref browser.tabs.unloadOnLowMemory to false
+05-26 11:45:45.119 2233 2328 I Gecko : 1590507945119 Marionette DEBUG Setting recommended pref browser.tabs.warnOnCloseOtherTabs to false
+05-26 11:45:45.119 2233 2328 I Gecko : 1590507945119 Marionette DEBUG Setting recommended pref browser.tabs.warnOnOpen to false
+05-26 11:45:45.119 2233 2328 I Gecko : 1590507945119 Marionette DEBUG Setting recommended pref browser.usedOnWindows10.introURL to
+05-26 11:45:45.120 2233 2328 I Gecko : 1590507945120 Marionette DEBUG Setting recommended pref browser.urlbar.suggest.searches to false
+05-26 11:45:45.120 2233 2328 I Gecko : 1590507945120 Marionette DEBUG Setting recommended pref dom.disable_beforeunload to true
+05-26 11:45:45.120 2233 2328 I Gecko : 1590507945120 Marionette DEBUG Setting recommended pref dom.file.createInChild to true
+05-26 11:45:45.121 2233 2328 I Gecko : 1590507945121 Marionette DEBUG Setting recommended pref extensions.getAddons.cache.enabled to false
+05-26 11:45:45.121 2233 2328 I Gecko : 1590507945121 Marionette DEBUG Setting recommended pref network.http.prompt-temp-redirect to false
+05-26 11:45:45.121 2233 2328 I Gecko : 1590507945121 Marionette DEBUG Setting recommended pref security.notification_enable_delay to 0
+05-26 11:45:45.122 2233 2328 I Gecko : 1590507945121 Marionette DEBUG Setting recommended pref signon.autofillForms to false
+05-26 11:45:45.122 2233 2328 I Gecko : 1590507945122 Marionette DEBUG Setting recommended pref signon.rememberSignons to false
+05-26 11:45:45.122 2233 2328 I Gecko : 1590507945122 Marionette DEBUG Setting recommended pref toolkit.cosmeticAnimations.enabled to false
+05-26 11:45:45.187 2233 2328 I Gecko : 1590507945187 Marionette INFO Listening on port 2829
+05-26 11:45:45.188 2233 2328 I Gecko : 1590507945188 Marionette DEBUG Marionette is listening
+05-26 11:45:45.213 2233 2328 I Gecko : 1590507945213 Marionette DEBUG Accepted connection 0 from 127.0.0.1:56174
+05-26 11:45:45.219 2233 2328 I Gecko : 1590507945219 Marionette DEBUG 0 -> [0,1,"WebDriver:NewSession",{"browserName":"firefox","pageLoadStrategy":"none"}]
+05-26 11:45:45.518 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@9f53600)
+05-26 11:45:45.519 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@9f53600)
+05-26 11:45:45.589 2233 2318 I WM-WorkerWrapper: Worker result SUCCESS for Work [ id=5f498c6b-af92-4b6b-8e5b-2a362b3bd663, tags={ mozilla.components.feature.addons.migration.DefaultSupportedAddonsChecker.periodicWork, mozilla.components.feature.addons.migration.SupportedAddonsWorker } ]
+05-26 11:45:46.522 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@7eb3c7e)
+05-26 11:45:46.524 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@7eb3c7e)
+05-26 11:45:47.524 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@30b55df)
+05-26 11:45:47.525 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@30b55df)
+05-26 11:45:47.838 2233 2233 I glean/Glean: Registering pings for org.mozilla.fenix.GleanMetrics.Pings
+05-26 11:45:47.883 2233 2233 I FenixApplication: Kicking-off account manager...
+05-26 11:45:47.883 2233 2233 I FenixApplication: 'Kicking-off account manager' took 0 ms
+05-26 11:45:47.886 2233 2305 I FenixApplication: Running post-visual completeness tasks...
+05-26 11:45:47.886 2233 2305 I FenixApplication: Storage initialization...
+05-26 11:45:47.890 2233 2305 I PlacesHistoryStorage: Warming up places storage...
+05-26 11:45:47.903 2233 2305 D RustNativeSupport: findMegazordLibraryName(places, 0.58.1
+05-26 11:45:47.903 2233 2305 D RustNativeSupport: lib in use: none
+05-26 11:45:47.903 2233 2305 D RustNativeSupport: lib configured: megazord
+05-26 11:45:47.903 2233 2305 D RustNativeSupport: lib version configured: 0.58.1
+05-26 11:45:47.903 2233 2305 D RustNativeSupport: settled on megazord
+05-26 11:45:47.908 2233 2305 D places_ffi: places_api_new
+05-26 11:45:47.945 2233 2305 D places::db::schema: Creating schema
+05-26 11:45:48.023 2233 2351 I App : ActivationPing - generating ping with the hashed id
+05-26 11:45:48.024 2233 2351 I App : ActivationPing - generating ping (has `identifier`: true)
+05-26 11:45:48.034 2233 2305 D sql_support::conn_ext: Transaction commited after 87.947ms
+05-26 11:45:48.041 2233 2305 D places_ffi: places_connection_new
+05-26 11:45:48.041 2233 2295 I libglean_ffi: glean_core::ping: Collecting activation
+05-26 11:45:48.046 2233 2305 D places_ffi: places_connection_new
+05-26 11:45:48.056 2233 2305 I PlacesHistoryStorage: 'Warming up places storage' took 166 ms
+05-26 11:45:48.059 2233 2305 I PlacesBookmarksStorage: Warming up places storage...
+05-26 11:45:48.062 2233 2305 D places_ffi: places_connection_new
+05-26 11:45:48.072 2233 2305 I PlacesBookmarksStorage: 'Warming up places storage' took 13 ms
+05-26 11:45:48.075 2233 2295 D libglean_ffi: glean_core::ping: Storing ping 'bc58f9d5-110e-442c-ae4f-6dbee99fe25d' at '/data/user/0/org.mozilla.fennec_aurora/glean_data/pending_pings/bc58f9d5-110e-442c-ae4f-6dbee99fe25d'
+05-26 11:45:48.077 2233 2295 I libglean_ffi: glean_core: The ping 'activation' was submitted and will be sent as soon as possible
+05-26 11:45:48.111 1739 1739 I keystore: 1 0
+05-26 11:45:48.134 2233 2305 I SyncableLoginsStorage: Warming up storage...
+05-26 11:45:48.166 1876 4289 D ConnectivityService: requestNetwork for uid/pid:10092/2233 NetworkRequest [ TRACK_DEFAULT id=235, [ Capabilities: INTERNET&NOT_RESTRICTED&TRUSTED Unwanted: Uid: 10092] ]
+05-26 11:45:48.170 1876 1975 D WIFI : got request NetworkRequest [ TRACK_DEFAULT id=235, [ Capabilities: INTERNET&NOT_RESTRICTED&TRUSTED Unwanted: Uid: 10092] ] with score 60
+05-26 11:45:48.170 1876 1975 D WIFI_UT : got request NetworkRequest [ TRACK_DEFAULT id=235, [ Capabilities: INTERNET&NOT_RESTRICTED&TRUSTED Unwanted: Uid: 10092] ] with score 60
+05-26 11:45:48.174 2233 2305 D RustNativeSupport: findMegazordLibraryName(logins, 0.58.1
+05-26 11:45:48.190 2233 2305 D RustNativeSupport: lib in use: none
+05-26 11:45:48.190 2131 2131 D PhoneSwitcherNetworkRequstListener: got request NetworkRequest [ TRACK_DEFAULT id=235, [ Capabilities: INTERNET&NOT_RESTRICTED&TRUSTED Unwanted: Uid: 10092] ] with score 60
+05-26 11:45:48.191 2233 2305 D RustNativeSupport: lib configured: megazord
+05-26 11:45:48.191 2233 2305 D RustNativeSupport: lib version configured: 0.58.1
+05-26 11:45:48.191 2233 2305 D RustNativeSupport: settled on megazord
+05-26 11:45:48.193 2233 2305 D logins_ffi: sync15_passwords_state_new
+05-26 11:45:48.219 2233 2601 D glean/PingUploadWorker: Processing persisted pings at /data/user/0/org.mozilla.fennec_aurora/glean_data/pending_pings
+05-26 11:45:48.227 2233 2601 D glean/PingUploadWorker: Processing ping: bc58f9d5-110e-442c-ae4f-6dbee99fe25d
+05-26 11:45:48.230 2233 2601 D glean/ConceptFetchHttpUploader: Submitting ping to: https://incoming.telemetry.mozilla.org/submit/org-mozilla-fennec-aurora/activation/1/bc58f9d5-110e-442c-ae4f-6dbee99fe25d
+05-26 11:45:48.230 2233 2305 D logins::schema: Creating schema
+05-26 11:45:48.526 2233 2305 I SyncableLoginsStorage: 'Warming up storage' took 391 ms
+05-26 11:45:48.526 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@2ea12e1)
+05-26 11:45:48.527 2233 2305 I FenixApplication: 'Storage initialization' took 640 ms
+05-26 11:45:48.527 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@2ea12e1)
+05-26 11:45:48.749 2233 2601 D glean/ConceptFetchHttpUploader: Ping successfully sent (200)
+05-26 11:45:48.750 2233 2601 D glean/PingUploadWorker: bc58f9d5-110e-442c-ae4f-6dbee99fe25d was deleted: true
+05-26 11:45:48.753 2233 2318 I WM-WorkerWrapper: Worker result SUCCESS for Work [ id=bc69b5a8-e4e5-43bf-b478-16556bcd27b1, tags={ mozilla.telemetry.glean.scheduler.PingUploadWorker, mozac_service_glean_ping_upload_worker } ]
+05-26 11:45:48.760 1876 1978 D ConnectivityService: releasing NetworkRequest [ TRACK_DEFAULT id=235, [ Capabilities: INTERNET&NOT_RESTRICTED&TRUSTED Unwanted: Uid: 10092] ] (release request)
+05-26 11:45:48.903 1876 1893 I ActivityManager: Start proc 2605:com.google.android.dialer/u0a28 for service com.google.android.dialer/com.google.android.apps.dialer.spam.inapp.SpamJobService
+05-26 11:45:48.904 2605 2605 W .android.diale: Unexpected CPU variant for X86 using defaults: x86
+05-26 11:45:48.955 2605 2605 W System : ClassLoader referenced unknown path: /system/framework/com.google.android.dialer.support.jar
+05-26 11:45:49.180 2605 2605 I Dialer : GoogleDialerApplication.onCreate - enter
+05-26 11:45:49.202 2605 2605 I Dialer : DialerExecutorModule.newThread - creating low priority thread
+05-26 11:45:49.244 2605 2605 I Dialer : SpamModule.provideSpam - using in-app spam
+05-26 11:45:49.252 2605 2605 I Dialer : SpamModule.provideSpam - using in-app spam
+05-26 11:45:49.254 2605 2605 I Dialer : CallLogConfigImpl.schedulePollingJob - scheduling
+05-26 11:45:49.260 2605 2605 I Dialer : DialerApplication.initializeAnnotatedCallLog - framework not enabled
+05-26 11:45:49.302 2605 2605 E .android.diale: The String#value field is not present on Android versions >= 6.0
+05-26 11:45:49.311 2605 2605 I Dialer : NonUiTaskBuilder.newThread - creating serial thread
+05-26 11:45:49.312 2605 2605 I Dialer : DialerExecutorModule.newThread - creating low priority thread
+05-26 11:45:49.313 2605 2605 I Dialer : ShortcutsJobScheduler.scheduleAllJobs - enter
+05-26 11:45:49.321 2605 2605 I Dialer : ShortcutsJobScheduler.scheduleAllJobs - enabling shortcuts
+05-26 11:45:49.321 2605 2605 I Dialer : PeriodicJobService.schedulePeriodicJob - enter
+05-26 11:45:49.323 2605 2605 I Dialer : PeriodicJobService.schedulePeriodicJob - job already scheduled.
+05-26 11:45:49.325 2605 2630 I Dialer : SpamJobScheduler.scheduleJob - job with id SPAM_JOB_WIFI is already scheduled
+05-26 11:45:49.326 2605 2605 I Dialer : GoogleDialerApplication.onCreate - register new client
+05-26 11:45:49.329 2605 2630 I Dialer : SpamJobScheduler.scheduleJob - job with id SPAM_JOB_ANY_NETWORK is already scheduled
+05-26 11:45:49.361 2605 2605 I Dialer : GoogleDialerApplication.onCreate - registered new client
+05-26 11:45:49.376 2605 2605 I Dialer : SpamJobScheduler.scheduleJob - job with id SPAM_JOB_WIFI is already scheduled
+05-26 11:45:49.528 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@e013eb6)
+05-26 11:45:49.529 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@e013eb6)
+05-26 11:45:49.552 2482 6321 W ctxmgr : [AclManager]No 2 for (accnt=account#-517948760#, com.google.android.gms(10008):IndoorOutdoorProducer, vrsn=13280000, 0, 3pPkg = null , 3pMdlId = null , pid = 2482). Was: 3 for 57, account#-517948760#
+05-26 11:45:49.570 2605 2605 I Dialer : Flags.register - phenotype register status: true
+05-26 11:45:49.668 1876 1884 I system_server: Background concurrent copying GC freed 116473(5MB) AllocSpace objects, 53(2MB) LOS objects, 19% free, 25MB/31MB, paused 2.716ms total 271.820ms
+05-26 11:45:49.675 2605 2629 I Dialer : DialerExecutorModule.newThread - creating low priority thread
+05-26 11:45:49.678 2605 2605 I Dialer : Flags.register - commit succeeded: true
+05-26 11:45:50.530 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@e6757b7)
+05-26 11:45:50.532 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@e6757b7)
+05-26 11:45:50.931 1876 4289 I ActivityManager: Force stopping org.mozilla.fennec_aurora appid=10092 user=0: clear data
+05-26 11:45:50.932 1876 4289 I ActivityManager: Killing 2393:org.mozilla.fennec_aurora:tab0/u0a92 (adj 100): stop org.mozilla.fennec_aurora
+05-26 11:45:50.933 1876 4289 W ActivityManager: Scheduling restart of crashed service org.mozilla.fennec_aurora/org.mozilla.gecko.process.GeckoChildProcessServices$tab0 in 1000ms
+05-26 11:45:50.934 1876 1894 W libprocessgroup: kill(-2393, 9) failed: No such process
+05-26 11:45:50.939 1876 4289 I ActivityManager: Killing 2233:org.mozilla.fennec_aurora/u0a92 (adj 0): stop org.mozilla.fennec_aurora
+05-26 11:45:50.946 1876 4289 W ActivityManager: Force removing ActivityRecord{a5ac676 u0 org.mozilla.fennec_aurora/org.mozilla.fenix.HomeActivity t282}: app died, no saved state
+05-26 11:45:50.971 1904 1911 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:50.972 1904 1911 D : HostConnection::get() New Host Connection established 0xe6d43140, tid 1911
+05-26 11:45:50.973 1904 1911 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:50.973 1904 1911 D : HostConnection::get() New Host Connection established 0xe6d43140, tid 1911
+05-26 11:45:50.974 1904 1911 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:50.974 1904 1911 D : HostConnection::get() New Host Connection established 0xe6d43140, tid 1911
+05-26 11:45:50.975 1904 1911 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:50.975 1904 1911 D : HostConnection::get() New Host Connection established 0xe6d43140, tid 1911
+05-26 11:45:50.975 1904 1911 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:50.976 1876 1894 W libprocessgroup: kill(-2393, 9) failed: No such process
+05-26 11:45:50.976 1904 1911 D : HostConnection::get() New Host Connection established 0xe6d43140, tid 1911
+05-26 11:45:50.976 1904 1911 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:50.983 1904 1911 D : HostConnection::get() New Host Connection established 0xe6d43140, tid 1911
+05-26 11:45:50.993 1876 4289 D ZenLog : config: removeAutomaticZenRules,ZenModeConfig[user=0,allowAlarms=true,allowMedia=true,allowSystem=false,allowReminders=false,allowEvents=false,allowCalls=true,allowRepeatCallers=true,allowMessages=false,allowCallsFrom=stars,allowMessagesFrom=contacts,suppressedVisualEffects=511,areChannelsBypassingDnd=false,automaticRules={EVENTS_DEFAULT_RULE=ZenRule[enabled=false,snoozing=false,name=Event,zenMode=ZEN_MODE_IMPORTANT_INTERRUPTIONS,conditionId=condition://android/event?userId=-10000&calendar=&reply=1,condition=Condition[id=condition://android/event?userId=-10000&calendar=&reply=1,summary=...,line1=...,line2=...,icon=0,state=STATE_FALSE,flags=2],component=ComponentInfo{android/com.android.server.notification.EventConditionProvider},id=EVENTS_DEFAULT_RULE,creationTime=1587308662810,enabler=null], EVERY_NIGHT_DEFAULT_RULE=ZenRule[enabled=false,snoozing=false,name=Sleeping,zenMode=ZEN_MODE_IMPORTANT_INTERRUPTIONS,conditionId=condition://android/schedule?days=1.2.3.4.5.6.7&start=22.0&end=7.0&exitAtAlarm=true,condition=Condition[id=condition://android/schedule?days=1.2.3.4.5.6.7&start=22.0&end=7.0&exitAtAlarm=true,summary=...,line1=...,line2=...,icon=0,state=STATE_FALSE,flags=2],component=ComponentInfo{android/com.android.server.notification.ScheduleConditionProvider},id=EVERY_NIGHT_DEFAULT_RULE,creationTime=1587308662810,enabler=null]},manualRule=null],Diff[]
+05-26 11:45:50.993 1876 4289 I ConditionProviders: Disallowing condition provider org.mozilla.fennec_aurora
+05-26 11:45:50.997 1734 1734 I Zygote : Process 2393 exited due to signal (9)
+05-26 11:45:50.999 1876 1876 D ZenLog : set_zen_mode: off,removeAutomaticZenRules
+05-26 11:45:51.015 1876 1894 W libprocessgroup: kill(-2393, 9) failed: No such process
+05-26 11:45:51.015 1876 1894 I libprocessgroup: Successfully killed process cgroup uid 10092 pid 2393 in 81ms
+05-26 11:45:51.016 1876 1894 W libprocessgroup: kill(-2233, 9) failed: No such process
+05-26 11:45:51.039 1623 5774 D gralloc_ranchu: gralloc_alloc: Creating ashmem region of size 9334784
+05-26 11:45:51.056 1876 1894 W libprocessgroup: kill(-2233, 9) failed: No such process
+05-26 11:45:51.058 1733 1759 D : HostConnection::get() New Host Connection established 0xed960600, tid 1759
+05-26 11:45:51.059 1623 5774 D gralloc_ranchu: gralloc_alloc: Creating ashmem region of size 9334784
+05-26 11:45:51.071 1623 5774 D gralloc_ranchu: gralloc_alloc: Creating ashmem region of size 9334784
+05-26 11:45:51.096 1876 1894 W libprocessgroup: kill(-2233, 9) failed: No such process
+05-26 11:45:51.102 1739 1739 I keystore: clear_uid 10092
+05-26 11:45:51.120 1733 1733 D SurfaceFlinger: duplicate layer name: changing com.google.android.apps.nexuslauncher/com.google.android.apps.nexuslauncher.NexusLauncherActivity to com.google.android.apps.nexuslauncher/com.google.android.apps.nexuslauncher.NexusLauncherActivity#1
+05-26 11:45:51.131 1623 5774 D gralloc_ranchu: gralloc_alloc: Creating ashmem region of size 9334784
+05-26 11:45:51.135 1876 1894 W libprocessgroup: kill(-2233, 9) failed: No such process
+05-26 11:45:51.150 2534 2825 D EGL_emulation: eglMakeCurrent: 0xe8b06aa0: ver 3 0 (tinfo 0xe8b03b50)
+05-26 11:45:51.150 1876 1961 W InputDispatcher: channel '894e079 org.mozilla.fennec_aurora/org.mozilla.fenix.HomeActivity (server)' ~ Consumer closed input channel or an error occurred. events=0x9
+05-26 11:45:51.151 1876 1961 E InputDispatcher: channel '894e079 org.mozilla.fennec_aurora/org.mozilla.fenix.HomeActivity (server)' ~ Channel is unrecoverably broken and will be disposed!
+05-26 11:45:51.155 1733 1749 D : HostConnection::get() New Host Connection established 0xed960a40, tid 1749
+05-26 11:45:51.165 1623 5774 D gralloc_ranchu: gralloc_alloc: Creating ashmem region of size 9334784
+05-26 11:45:51.175 1876 1894 W libprocessgroup: kill(-2233, 9) failed: No such process
+05-26 11:45:51.182 1623 1623 D gralloc_ranchu: gralloc_alloc: Creating ashmem region of size 9334784
+05-26 11:45:51.193 1876 1891 I ActivityManager: Force stopping org.mozilla.fennec_aurora appid=10092 user=-1: clearApplicationUserData
+05-26 11:45:51.198 1733 1759 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:51.199 1733 1759 D : HostConnection::get() New Host Connection established 0xed960600, tid 1759
+05-26 11:45:51.202 1733 1759 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:51.203 1733 1759 D : HostConnection::get() New Host Connection established 0xede98480, tid 1759
+05-26 11:45:51.203 1733 1759 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:51.204 1733 1759 D : HostConnection::get() New Host Connection established 0xede98480, tid 1759
+05-26 11:45:51.214 1733 1759 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:51.215 1876 1894 W libprocessgroup: kill(-2233, 9) failed: No such process
+05-26 11:45:51.218 1876 7720 I WindowManager: WIN DEATH: Window{894e079 u0 org.mozilla.fennec_aurora/org.mozilla.fenix.HomeActivity}
+05-26 11:45:51.219 1876 7720 W InputDispatcher: Attempted to unregister already unregistered input channel '894e079 org.mozilla.fennec_aurora/org.mozilla.fenix.HomeActivity (server)'
+05-26 11:45:51.235 1733 2205 W SurfaceFlinger: Attempting to destroy on removed layer: Task=282#0
+05-26 11:45:51.249 1734 1734 I Zygote : Process 2233 exited due to signal (9)
+05-26 11:45:51.254 1876 1894 W libprocessgroup: kill(-2233, 9) failed: No such process
+05-26 11:45:51.254 1876 1894 I libprocessgroup: Successfully killed process cgroup uid 10092 pid 2233 in 238ms
+05-26 11:45:51.260 1876 1899 W ActivityManager: setHasOverlayUi called on unknown pid: 2233
+05-26 11:45:51.263 2000 2000 I GoogleInputMethod: onFinishInput() : Dummy InputConnection bound
+05-26 11:45:51.263 2499 2681 D EGL_emulation: eglMakeCurrent: 0xd35359e0: ver 3 0 (tinfo 0xd353b4f0)
+05-26 11:45:51.265 2000 2000 I GoogleInputMethod: onStartInput() : Dummy InputConnection bound
+05-26 11:45:51.300 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:45:51.327 2012 2265 I chatty : uid=10024(com.android.systemui) RenderThread identical 1 line
+05-26 11:45:51.403 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:45:51.428 2482 2482 I GeofencerStateMachine: removeGeofences: removeRequest=RemoveGeofencingRequest[REMOVE_ALL packageName=org.mozilla.fennec_aurora]
+05-26 11:45:51.433 2131 2131 D CarrierSvcBindHelper: No carrier app for: 0
+05-26 11:45:51.438 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:45:51.463 2012 2265 I chatty : uid=10024(com.android.systemui) RenderThread identical 3 lines
+05-26 11:45:51.479 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:45:51.481 1876 1890 E system_server: No package ID 7f found for ID 0x7f08019f.
+05-26 11:45:51.481 1876 1890 E system_server: No package ID 7f found for ID 0x7f130122.
+05-26 11:45:51.481 1876 1890 E system_server: No package ID 7f found for ID 0x7f130122.
+05-26 11:45:51.481 1876 1890 E system_server: No package ID 7f found for ID 0x7f08019d.
+05-26 11:45:51.481 1876 1890 E system_server: No package ID 7f found for ID 0x7f130121.
+05-26 11:45:51.481 1876 1890 E system_server: No package ID 7f found for ID 0x7f130121.
+05-26 11:45:51.484 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:45:51.493 2534 2534 W SessionLifecycleManager: Handover failed. Creating new session controller.
+05-26 11:45:51.499 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:45:51.508 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:45:51.538 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@8dd9b95)
+05-26 11:45:51.540 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@8dd9b95)
+05-26 11:45:51.542 1876 5181 I ActivityManager: Force stopping org.mozilla.fennec_aurora appid=10092 user=0: from pid 2663
+05-26 11:45:51.557 2131 2131 D CarrierSvcBindHelper: No carrier app for: 0
+05-26 11:45:51.590 1560 1573 D vold : Remounting 10092 as mode read
+05-26 11:45:51.600 1876 1876 I GnssLocationProvider: WakeLock acquired by sendMessage(SET_REQUEST, 0, com.android.server.location.GnssLocationProvider$GpsRequest@4971377)
+05-26 11:45:51.602 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(SET_REQUEST, 0, com.android.server.location.GnssLocationProvider$GpsRequest@4971377)
+05-26 11:45:51.641 1876 1876 V SettingsProvider: Notifying for 0: content://settings/global/debug_app
+05-26 11:45:51.673 1876 1876 I GnssLocationProvider: WakeLock acquired by sendMessage(SET_REQUEST, 0, com.android.server.location.GnssLocationProvider$GpsRequest@dfab250)
+05-26 11:45:51.674 1560 1573 D vold : Remounting 10092 as mode write
+05-26 11:45:51.676 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(SET_REQUEST, 0, com.android.server.location.GnssLocationProvider$GpsRequest@dfab250)
+05-26 11:45:51.682 2116 2662 I ProvidersCache: Provider returned no roots. Possibly naughty: com.google.android.apps.docs.storage
+05-26 11:45:51.691 7086 2683 I LocationSettingsChecker: Removing dialog suppression flag for package org.mozilla.fennec_aurora
+05-26 11:45:51.738 7086 7124 I Icing : doRemovePackageData org.mozilla.fennec_aurora
+05-26 11:45:51.742 2534 2697 W LocationOracle: No location history returned by ContextManager
+05-26 11:45:51.781 2482 11975 W ctxmgr : [AclManager]No 3 for (accnt=account#-517948760#, com.google.android.gms(10008):UserVelocityProducer, vrsn=13280022, 0, 3pPkg = null , 3pMdlId = null , pid = 2482). Was: 3 for 1, account#-517948760#
+05-26 11:45:51.791 2534 2534 I MicroDetectionWorker: #startMicroDetector [speakerMode: 0]
+05-26 11:45:51.791 2534 2534 I AudioController: Using mInputStreamFactoryBuilder
+05-26 11:45:51.791 2534 2534 I AudioController: Created new AudioSource
+05-26 11:45:51.793 2534 2534 I MicroDetectionWorker: onReady
+05-26 11:45:51.795 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:45:51.808 2534 2016 I MicroRecognitionRunner: Starting detection.
+05-26 11:45:51.817 2534 2004 I MicrophoneInputStream: mic_starting SR : 16000 CC : 16 SO : 6
+05-26 11:45:51.819 1631 1683 E : Request requires android.permission.RECORD_AUDIO
+05-26 11:45:51.819 1631 1683 E AudioPolicyIntefaceImpl: getInputForAttr permission denied: recording not allowed for uid 10039 pid 2534
+05-26 11:45:51.819 1631 1683 E AudioFlinger: createRecord() checkRecordThread_l failed
+05-26 11:45:51.819 2534 2004 E IAudioFlinger: createRecord returned error -22
+05-26 11:45:51.819 2534 2004 E AudioRecord: AudioFlinger could not create record track, status: -22
+05-26 11:45:51.819 2534 2004 E AudioRecord-JNI: Error creating AudioRecord instance: initialization check failed with status -22.
+05-26 11:45:51.820 2534 2004 E android.media.AudioRecord: Error code -20 when initializing native AudioRecord object.
+05-26 11:45:51.820 2534 2004 I MicrophoneInputStream: mic_started SR : 16000 CC : 16 SO : 6
+05-26 11:45:51.820 2534 2004 E ActivityThread: Failed to find provider info for com.google.android.apps.gsa.testing.ui.audio.recorded
+05-26 11:45:51.821 2534 2534 I MicroDetectionWorker: onReady
+05-26 11:45:51.830 2534 2016 W SpeechLevelGenerator: Really low audio levels detected. The audio input may have issues.
+05-26 11:45:51.833 2534 2004 I MicrophoneInputStream: mic_close SR : 16000 CC : 16 SO : 6
+05-26 11:45:51.838 2534 2016 I MicroRecognitionRunner: Detection finished
+05-26 11:45:51.839 2534 2016 W ErrorReporter: reportError [type: 211, code: 524300]: Error reading from input stream
+05-26 11:45:51.841 2534 2954 I MicroRecognitionRunner: Stopping hotword detection.
+05-26 11:45:51.842 2534 2016 W ErrorProcessor: onFatalError, processing error from engine(4)
+05-26 11:45:51.842 2534 2016 W ErrorProcessor: com.google.android.apps.gsa.shared.speech.b.g: Error reading from input stream
+05-26 11:45:51.842 2534 2016 W ErrorProcessor: at com.google.android.apps.gsa.staticplugins.microdetection.d.k.a(SourceFile:91)
+05-26 11:45:51.842 2534 2016 W ErrorProcessor: at com.google.android.apps.gsa.staticplugins.microdetection.d.l.run(Unknown Source:14)
+05-26 11:45:51.842 2534 2016 W ErrorProcessor: at com.google.android.libraries.gsa.runner.a.a.b(SourceFile:32)
+05-26 11:45:51.842 2534 2016 W ErrorProcessor: at com.google.android.libraries.gsa.runner.a.c.call(Unknown Source:4)
+05-26 11:45:51.842 2534 2016 W ErrorProcessor: at java.util.concurrent.FutureTask.run(FutureTask.java:266)
+05-26 11:45:51.842 2534 2016 W ErrorProcessor: at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:458)
+05-26 11:45:51.842 2534 2016 W ErrorProcessor: at java.util.concurrent.FutureTask.run(FutureTask.java:266)
+05-26 11:45:51.842 2534 2016 W ErrorProcessor: at com.google.android.apps.gsa.shared.util.concurrent.b.g.run(Unknown Source:4)
+05-26 11:45:51.842 2534 2016 W ErrorProcessor: at com.google.android.apps.gsa.shared.util.concurrent.b.aw.run(SourceFile:4)
+05-26 11:45:51.842 2534 2016 W ErrorProcessor: at com.google.android.apps.gsa.shared.util.concurrent.b.aw.run(SourceFile:4)
+05-26 11:45:51.842 2534 2016 W ErrorProcessor: at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1167)
+05-26 11:45:51.842 2534 2016 W ErrorProcessor: at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:641)
+05-26 11:45:51.842 2534 2016 W ErrorProcessor: at java.lang.Thread.run(Thread.java:764)
+05-26 11:45:51.842 2534 2016 W ErrorProcessor: at com.google.android.apps.gsa.shared.util.concurrent.b.i.run(SourceFile:6)
+05-26 11:45:51.842 2534 2016 W ErrorProcessor: Caused by: com.google.android.apps.gsa.shared.exception.GsaIOException: Error code: 393238 | Buffer overflow, no available space.
+05-26 11:45:51.842 2534 2016 W ErrorProcessor: at com.google.android.apps.gsa.speech.audio.Tee.j(SourceFile:103)
+05-26 11:45:51.842 2534 2016 W ErrorProcessor: at com.google.android.apps.gsa.speech.audio.au.read(SourceFile:2)
+05-26 11:45:51.842 2534 2016 W ErrorProcessor: at java.io.InputStream.read(InputStream.java:101)
+05-26 11:45:51.842 2534 2016 W ErrorProcessor: at com.google.android.apps.gsa.speech.audio.ao.run(SourceFile:17)
+05-26 11:45:51.842 2534 2016 W ErrorProcessor: at com.google.android.apps.gsa.speech.audio.an.run(SourceFile:2)
+05-26 11:45:51.842 2534 2016 W ErrorProcessor: at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:458)
+05-26 11:45:51.842 2534 2016 W ErrorProcessor: ... 10 more
+05-26 11:45:51.842 2534 2016 I AudioController: internalShutdown
+05-26 11:45:51.846 2534 2534 I MicroDetector: Keeping mic open: false
+05-26 11:45:51.846 2534 2534 I MicroDetectionWorker: #onError(false)
+05-26 11:45:51.847 2534 2697 I DeviceStateChecker: DeviceStateChecker cancelled
+05-26 11:45:51.879 1876 16736 I ActivityManager: Force stopping org.mozilla.fennec_aurora appid=10092 user=-1: set debug app
+05-26 11:45:51.879 1876 1876 V SettingsProvider: Notifying for 0: content://settings/global/debug_app
+05-26 11:45:51.910 1876 3012 I ActivityManager: START u0 {flg=0x10000000 cmp=org.mozilla.fennec_aurora/org.mozilla.fenix.HomeActivity (has extras)} from uid 0
+05-26 11:45:51.961 1876 1893 I ActivityManager: Start proc 2726:org.mozilla.fennec_aurora/u0a92 for activity org.mozilla.fennec_aurora/org.mozilla.fenix.HomeActivity
+05-26 11:45:51.964 1623 1623 D gralloc_ranchu: gralloc_alloc: Creating ashmem region of size 9334784
+05-26 11:45:51.967 2726 2726 W a.fennec_auror: Unexpected CPU variant for X86 using defaults: x86
+05-26 11:45:51.969 2482 3056 I Places : ?: PlacesBleScanner start() with priority 2
+05-26 11:45:51.979 1876 1940 D : HostConnection::get() New Host Connection established 0xcf15cd00, tid 1940
+05-26 11:45:52.016 2482 3056 I Places : ?: PlacesBleScanner start() with priority 2
+05-26 11:45:52.020 2482 3056 I PlaceInferenceEngine: [anon] Changed inference mode: 1
+05-26 11:45:52.033 2726 2726 W ActivityThread: Application org.mozilla.fennec_aurora can be debugged on port 8100...
+05-26 11:45:52.036 2726 2726 I a.fennec_auror: The ClassLoaderContext is a special shared library.
+05-26 11:45:52.046 2482 3056 I Places : Converted 0 out of 1 WiFi scans
+05-26 11:45:52.059 1904 1911 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:52.060 1904 1911 D : HostConnection::get() New Host Connection established 0xe6d43140, tid 1911
+05-26 11:45:52.060 1904 1911 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:52.061 1904 1911 D : HostConnection::get() New Host Connection established 0xe6d43140, tid 1911
+05-26 11:45:52.061 1904 1911 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:52.062 1904 1911 D : HostConnection::get() New Host Connection established 0xe6d43140, tid 1911
+05-26 11:45:52.062 1904 1911 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:52.063 1623 1623 D gralloc_ranchu: gralloc_alloc: Creating ashmem region of size 9334784
+05-26 11:45:52.063 2534 2825 D EGL_emulation: eglMakeCurrent: 0xe8b06aa0: ver 3 0 (tinfo 0xe8b03b50)
+05-26 11:45:52.066 1733 2119 D : HostConnection::get() New Host Connection established 0xed9604c0, tid 2119
+05-26 11:45:52.067 1733 2119 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:52.067 1733 2119 D : HostConnection::get() New Host Connection established 0xed9604c0, tid 2119
+05-26 11:45:52.070 2499 2681 D EGL_emulation: eglMakeCurrent: 0xd35359e0: ver 3 0 (tinfo 0xd353b4f0)
+05-26 11:45:52.074 1733 2205 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:52.075 1733 2205 D : HostConnection::get() New Host Connection established 0xe9a2f7c0, tid 2205
+05-26 11:45:52.075 1733 2205 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:52.075 1733 2205 D : HostConnection::get() New Host Connection established 0xe9a2f7c0, tid 2205
+05-26 11:45:52.076 1733 2205 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:52.076 1733 2205 D : HostConnection::get() New Host Connection established 0xe9a2f7c0, tid 2205
+05-26 11:45:52.078 1733 2119 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:52.079 1733 2119 D : HostConnection::get() New Host Connection established 0xed9604c0, tid 2119
+05-26 11:45:52.079 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:45:52.079 1733 2119 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:52.079 1733 2119 D : HostConnection::get() New Host Connection established 0xed9604c0, tid 2119
+05-26 11:45:52.080 1904 1911 D : HostConnection::get() New Host Connection established 0xe6d43140, tid 1911
+05-26 11:45:52.086 1733 2205 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:52.088 1733 2119 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:52.090 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:45:52.095 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:45:52.102 2482 6321 I PlaceInferenceEngine: No beacon scan available - ignoring candidates.
+05-26 11:45:52.106 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:45:52.128 2012 2265 I chatty : uid=10024(com.android.systemui) RenderThread identical 2 lines
+05-26 11:45:52.132 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:45:52.134 2482 3056 I Places : ?: PlacesBleScanner start() with priority 2
+05-26 11:45:52.143 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:45:52.147 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:45:52.155 2482 3056 I Places : ?: PlacesBleScanner start() with priority 2
+05-26 11:45:52.162 2482 3056 I PlaceInferenceEngine: [anon] Changed inference mode: 1
+05-26 11:45:52.163 2726 2726 D FirebaseApp: com.google.firebase.auth.FirebaseAuth is not linked. Skipping initialization.
+05-26 11:45:52.164 2726 2726 D FirebaseApp: com.google.firebase.crash.FirebaseCrash is not linked. Skipping initialization.
+05-26 11:45:52.165 2726 2726 I FirebaseInitProvider: FirebaseApp initialization successful
+05-26 11:45:52.167 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:45:52.179 2012 2265 I chatty : uid=10024(com.android.systemui) RenderThread identical 2 lines
+05-26 11:45:52.183 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:45:52.190 2482 3056 I Places : Converted 0 out of 1 WiFi scans
+05-26 11:45:52.191 2726 2752 W a.fennec_auror: Accessing hidden method Landroid/content/res/Resources$Theme;->rebase()V (dark greylist, linking)
+05-26 11:45:52.199 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:45:52.204 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:45:52.223 2726 2726 D FennecProfile: profiles.ini: false
+05-26 11:45:52.223 2726 2726 D FennecProfile: No profiles found
+05-26 11:45:52.227 2726 2726 D FennecMigrator: No migrations to run. Fennec install - false.
+05-26 11:45:52.235 2726 2726 D FenixApplication: Initializing Glean (uploadEnabled=true, isFennec=true)
+05-26 11:45:52.263 2482 6321 I PlaceInferenceEngine: No beacon scan available - ignoring candidates.
+05-26 11:45:52.264 2482 6319 W ctxmgr : [AclManager]No 2 for (accnt=account#-517948760#, com.google.android.gms(10008):PlacesProducer, vrsn=13280000, 0, 3pPkg = null , 3pMdlId = null , pid = 2482). Was: 3 for 18, account#-517948760#
+05-26 11:45:52.280 2726 2762 D RustNativeSupport: findMegazordLibraryName(viaduct, 0.58.1
+05-26 11:45:52.280 2726 2762 D RustNativeSupport: lib in use: none
+05-26 11:45:52.280 2726 2762 D RustNativeSupport: lib configured: megazord
+05-26 11:45:52.280 2726 2762 D RustNativeSupport: lib version configured: 0.58.1
+05-26 11:45:52.280 2726 2762 D RustNativeSupport: settled on megazord
+05-26 11:45:52.338 2726 2752 I FA : Collection disabled with firebase_analytics_collection_enabled=0
+05-26 11:45:52.339 2726 2752 I FA : App measurement is starting up, version: 12780
+05-26 11:45:52.339 2726 2752 I FA : To enable debug logging run: adb shell setprop log.tag.FA VERBOSE
+05-26 11:45:52.342 2726 2752 I FA : To enable faster debug mode event logging run:
+05-26 11:45:52.342 2726 2752 I FA : adb shell setprop debug.firebase.analytics.app org.mozilla.fennec_aurora
+05-26 11:45:52.366 2726 2726 W ActivityThread: ClassLoader.loadClass: The class loader returned by Thread.getContextClassLoader() may fail for processes that host multiple applications. You should explicitly specify a context class loader. For example: Thread.setContextClassLoader(getClass().getClassLoader());
+05-26 11:45:52.377 2726 2726 I GeckoRuntime: Adding debug configuration from: /data/local/tmp/org.mozilla.fennec_aurora-geckoview-config.yaml
+05-26 11:45:52.377 2726 2726 D GeckoDebugConfig: Adding environment variables from debug config: {MOZ_CRASHREPORTER=1, MOZ_CRASHREPORTER_NO_REPORT=1, MOZ_CRASHREPORTER_SHUTDOWN=1}
+05-26 11:45:52.377 2726 2726 D GeckoDebugConfig: Adding arguments from debug config: [-marionette, -profile, /mnt/sdcard/org.mozilla.fennec_aurora-geckodriver-profile]
+05-26 11:45:52.378 2726 2726 D GeckoThread: State changed to LAUNCHED
+05-26 11:45:52.390 2726 2773 I GeckoThread: preparing to run Gecko
+05-26 11:45:52.399 2726 2773 D GeckoThread: env var: MOZ_CRASHREPORTER=1
+05-26 11:45:52.399 2726 2773 D GeckoThread: env var: MOZ_CRASHREPORTER_NO_REPORT=1
+05-26 11:45:52.399 2726 2773 D GeckoThread: env var: MOZ_CRASHREPORTER_SHUTDOWN=1
+05-26 11:45:52.409 2726 2726 D GeckoRuntime: Lifecycle: onCreate
+05-26 11:45:52.479 2726 2773 D GeckoThread: State changed to MOZGLUE_READY
+05-26 11:45:52.480 2726 2744 I a.fennec_auror: Background concurrent copying GC freed 13284(2MB) AllocSpace objects, 9(436KB) LOS objects, 49% free, 1904KB/3MB, paused 3.706ms total 105.789ms
+05-26 11:45:52.484 2726 2760 D libglean_ffi: glean_ffi: Android logging should be hooked up!
+05-26 11:45:52.485 2726 2762 D RustNativeSupport: findMegazordLibraryName(rustlog, 0.58.1
+05-26 11:45:52.485 2726 2762 D RustNativeSupport: lib in use: none
+05-26 11:45:52.485 2726 2760 I glean/Glean: Registering pings for mozilla.telemetry.glean.GleanMetrics.Pings
+05-26 11:45:52.485 2726 2762 D RustNativeSupport: lib configured: megazord
+05-26 11:45:52.485 2726 2762 D RustNativeSupport: lib version configured: 0.58.1
+05-26 11:45:52.486 2726 2762 D RustNativeSupport: settled on megazord
+05-26 11:45:52.486 2726 2760 I libglean_ffi: glean_core: Creating new Glean
+05-26 11:45:52.486 2726 2762 I rc_log_ffi::ios: rc_log adapter initialized!
+05-26 11:45:52.486 2726 2760 D libglean_ffi: glean_core::database: Database path: "/data/user/0/org.mozilla.fennec_aurora/glean_data/db"
+05-26 11:45:52.487 2726 2760 I libglean_ffi: glean_core::database: Database initialized
+05-26 11:45:52.488 2726 2726 D GleanMetricsService: Enabling Glean.
+05-26 11:45:52.488 2726 2773 W Settings: Setting animator_duration_scale has moved from android.provider.Settings.System to android.provider.Settings.Global, returning read-only global URI.
+05-26 11:45:52.490 2726 2726 I AdjustMetricsService: No adjust token defined
+05-26 11:45:52.490 2726 2726 D PushConfig: Creating push configuration for autopush.
+05-26 11:45:52.497 2726 2773 E GeckoLibLoad: Load sqlite start
+05-26 11:45:52.499 2726 2726 I App : AutoPushFeature is configured, initializing it...
+05-26 11:45:52.502 2726 2726 I AutoPushFeature: Checking validity of push subscriptions.
+05-26 11:45:52.519 2726 2726 D FennecProfile: profiles.ini: false
+05-26 11:45:52.520 2726 2726 D FennecProfile: No profiles found
+05-26 11:45:52.522 2726 2760 I libglean_ffi: glean_ffi: Glean initialized
+05-26 11:45:52.523 2726 2773 E GeckoLibLoad: Load sqlite done
+05-26 11:45:52.524 2726 2773 E GeckoLibLoad: Load nss start
+05-26 11:45:52.524 2726 2773 E GeckoLibLoad: Load nss done
+05-26 11:45:52.530 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@99c3fe3)
+05-26 11:45:52.530 2726 2762 W [WARNING][Leanplum]: [com.leanplum.internal.ActionManager::getLocationManager::8]: Geofencing support requires leanplum-location module and Google Play Services v8.1 and higher.
+05-26 11:45:52.530 2726 2762 W [WARNING][Leanplum]: Add this to your build.gradle file:
+05-26 11:45:52.530 2726 2762 W [WARNING][Leanplum]: implementation 'com.google.android.gms:play-services-location:8.3.0+'
+05-26 11:45:52.530 2726 2762 W [WARNING][Leanplum]: implementation 'com.leanplum:leanplum-location:+'
+05-26 11:45:52.530 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@99c3fe3)
+05-26 11:45:52.533 2726 2762 I [INFO][Leanplum]: [com.leanplum.monitoring.ExceptionHandler::setContext::6]: LeanplumExceptionHandler could not initialize Exception Reporting.This is expected if you have not included the leanplum-monitoring module
+05-26 11:45:52.562 2726 2726 D FennecMigrator: This is not a Fennec installation. No migration needed.
+05-26 11:45:52.584 2726 2782 I [INFO][Leanplum]: [com.leanplum.LeanplumFcmProvider::isManifestSetup::11]: Firebase Messaging is setup correctly.
+05-26 11:45:52.592 2726 2726 W a.fennec_auror: Accessing hidden method Landroid/content/res/Resources$Theme;->rebase()V (dark greylist, reflection)
+05-26 11:45:52.595 2726 2726 I ResourcesCompat: Failed to retrieve rebase() method
+05-26 11:45:52.595 2726 2726 I ResourcesCompat: java.lang.NoSuchMethodException: rebase []
+05-26 11:45:52.595 2726 2726 I ResourcesCompat: at java.lang.Class.getMethod(Class.java:2068)
+05-26 11:45:52.595 2726 2726 I ResourcesCompat: at java.lang.Class.getDeclaredMethod(Class.java:2047)
+05-26 11:45:52.595 2726 2726 I ResourcesCompat: at androidx.core.content.res.ResourcesCompat$ThemeCompat$ImplApi23.rebase(ResourcesCompat.java:3)
+05-26 11:45:52.595 2726 2726 I ResourcesCompat: at androidx.appcompat.app.AppCompatActivity.attachBaseContext(AppCompatActivity.java:80)
+05-26 11:45:52.595 2726 2726 I ResourcesCompat: at mozilla.components.support.locale.LocaleAwareAppCompatActivity.attachBaseContext(LocaleAwareAppCompatActivity.kt:2)
+05-26 11:45:52.595 2726 2726 I ResourcesCompat: at android.app.Activity.attach(Activity.java:7051)
+05-26 11:45:52.595 2726 2726 I ResourcesCompat: at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2873)
+05-26 11:45:52.595 2726 2726 I ResourcesCompat: at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:3048)
+05-26 11:45:52.595 2726 2726 I ResourcesCompat: at android.app.servertransaction.LaunchActivityItem.execute(LaunchActivityItem.java:78)
+05-26 11:45:52.595 2726 2726 I ResourcesCompat: at android.app.servertransaction.TransactionExecutor.executeCallbacks(TransactionExecutor.java:108)
+05-26 11:45:52.595 2726 2726 I ResourcesCompat: at android.app.servertransaction.TransactionExecutor.execute(TransactionExecutor.java:68)
+05-26 11:45:52.595 2726 2726 I ResourcesCompat: at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1808)
+05-26 11:45:52.595 2726 2726 I ResourcesCompat: at android.os.Handler.dispatchMessage(Handler.java:106)
+05-26 11:45:52.595 2726 2726 I ResourcesCompat: at android.os.Looper.loop(Looper.java:193)
+05-26 11:45:52.595 2726 2726 I ResourcesCompat: at android.app.ActivityThread.main(ActivityThread.java:6669)
+05-26 11:45:52.595 2726 2726 I ResourcesCompat: at java.lang.reflect.Method.invoke(Native Method)
+05-26 11:45:52.595 2726 2726 I ResourcesCompat: at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+05-26 11:45:52.595 2726 2726 I ResourcesCompat: at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+05-26 11:45:52.609 2726 2773 E GeckoLibLoad: Loaded libs in 84.685000ms total, 0ms(60ms) user, 30ms(100ms) system, 0(0) faults
+05-26 11:45:52.610 2726 2773 D GeckoThread: State changed to LIBS_READY
+05-26 11:45:52.612 2726 2773 W GeckoThread: zerdatime 190872808 - runGecko
+05-26 11:45:52.613 2726 2773 D GeckoProfile: Loading profile at: null name: default
+05-26 11:45:52.614 2726 2773 D GeckoProfile: Created new profile dir.
+05-26 11:45:52.614 2726 2773 I GeckoProfile: Enqueuing profile init.
+05-26 11:45:52.616 2726 2773 D GeckoProfile: Found profile dir: /data/user/0/org.mozilla.fennec_aurora/files/mozilla/acwxvl2q.default
+05-26 11:45:52.616 2726 2773 D GeckoProfile: Attempting to write new client ID properties
+05-26 11:45:52.617 2726 2773 D GeckoProfile: Creating profile dir: /data/user/0/org.mozilla.fennec_aurora/files/mozilla/acwxvl2q.default
+05-26 11:45:52.671 2726 2760 I glean/MetricsPingSched: The application just updated. Send metrics ping now.
+05-26 11:45:52.672 2726 2760 I glean/MetricsPingSched: Collecting the 'metrics' ping, now = Tue May 26 11:45:52 EDT 2020, startup = true, reason = upgrade
+05-26 11:45:52.672 2726 2760 I libglean_ffi: glean_core::ping: Collecting metrics
+05-26 11:45:52.672 2726 2760 I libglean_ffi: glean_core::ping: Storage for metrics empty. Bailing out.
+05-26 11:45:52.672 2726 2760 I libglean_ffi: glean_core: No content for ping 'metrics', therefore no ping queued.
+05-26 11:45:52.673 2726 2760 D glean/MetricsPingSched: Scheduling the 'metrics' ping in 58447335ms
+05-26 11:45:52.678 2726 2773 I Gecko:DumpUtils: Fifo watcher disabled via pref.
+05-26 11:45:52.738 2726 2773 D GeckoSysInfo: System memory: 1494MB.
+05-26 11:45:52.740 2726 2773 D GeckoThread: State changed to JNI_READY
+05-26 11:45:52.753 2726 2812 D ServiceAllocator: org.mozilla.gecko.process.GeckoChildProcessServices$tab0 updateBindings: BACKGROUND priority, 0 importance, 2 successful binds, 0 failed binds, 0 successful unbinds, 0 failed unbinds
+05-26 11:45:52.759 2819 2819 W nec_aurora:tab: Unexpected CPU variant for X86 using defaults: x86
+05-26 11:45:52.763 1876 1893 I ActivityManager: Start proc 2819:org.mozilla.fennec_aurora:tab0/u0a92 for service org.mozilla.fennec_aurora/org.mozilla.gecko.process.GeckoChildProcessServices$tab0
+05-26 11:45:52.794 2819 2819 I nec_aurora:tab: The ClassLoaderContext is a special shared library.
+05-26 11:45:52.803 2726 2726 I FirefoxAccountStateMachine: Enabling/updating sync with a new SyncConfig: SyncConfig(supportedEngines=[mozilla.components.service.fxa.SyncEngine$History@5c45c02, mozilla.components.service.fxa.SyncEngine$Bookmarks@42ff913, mozilla.components.service.fxa.SyncEngine$Passwords@e030750], syncPeriodInMinutes=240)
+05-26 11:45:52.806 2726 2726 I BgSyncManager: Periodic syncing enabled at a 240 interval
+05-26 11:45:52.807 2726 2726 I FirefoxAccountStateMachine: Sync is enabled
+05-26 11:45:52.831 2726 2844 I FirefoxAccountStateMachine: Processing event Event$Init for state Start. Next state is Start
+05-26 11:45:52.842 2819 2819 D GeckoThread: State changed to LAUNCHED
+05-26 11:45:52.843 2819 2847 I GeckoThread: preparing to run Gecko
+05-26 11:45:52.849 2726 2726 E ActivityThread: Failed to find provider info for org.mozilla.fennec_aurora.fxa.auth
+05-26 11:45:52.873 1739 1739 I keystore: del USRPKEY_org.mozilla.fennec_aurora 10092
+05-26 11:45:52.874 1739 1739 I keystore: del USRCERT_org.mozilla.fennec_aurora 10092
+05-26 11:45:52.874 1739 1739 I keystore: del CACERT_org.mozilla.fennec_aurora 10092
+05-26 11:45:52.884 2726 2726 D GeckoRuntime: Lifecycle: onStart
+05-26 11:45:52.895 2726 2726 D GeckoRuntime: Lifecycle: onResume
+05-26 11:45:52.899 2726 2726 D GeckoNetworkManager: Incoming event start for state OffNoListeners -> OnNoListeners
+05-26 11:45:52.900 2726 2726 D GeckoNetworkManager: New network state: UP, WIFI, WIFI
+05-26 11:45:52.905 2726 2844 I FirefoxAccountStateMachine: Ran 'Event$Init' side-effects for state Start, got successive event Event$AccountNotFound
+05-26 11:45:52.905 2726 2844 I FirefoxAccountStateMachine: Processing event Event$AccountNotFound for state Start. Next state is NotAuthenticated
+05-26 11:45:52.906 2726 2726 D OpenGLRenderer: Skia GL Pipeline
+05-26 11:45:52.911 2726 2844 D RustNativeSupport: findMegazordLibraryName(fxaclient, 0.58.1
+05-26 11:45:52.911 2726 2844 D RustNativeSupport: lib in use: none
+05-26 11:45:52.911 2726 2844 D RustNativeSupport: lib configured: megazord
+05-26 11:45:52.912 2726 2844 D RustNativeSupport: lib version configured: 0.58.1
+05-26 11:45:52.912 2726 2844 D RustNativeSupport: settled on megazord
+05-26 11:45:52.913 2726 2844 D fxaclient_ffi: fxa_new
+05-26 11:45:52.920 2726 2782 D NetworkSecurityConfig: No Network Security Config specified, using platform default
+05-26 11:45:52.927 1733 2119 E SurfaceFlinger: ro.sf.lcd_density must be defined as a build property
+05-26 11:45:52.972 2726 2760 I libglean_ffi: glean_core::ping: Collecting baseline
+05-26 11:45:52.979 2726 2726 D GeckoNetworkManager: Incoming event receivedUpdate for state OnNoListeners -> OnNoListeners
+05-26 11:45:52.979 2726 2844 W FirefoxAccountStateMachine: Got invalid event Event$Init for state NotAuthenticated.
+05-26 11:45:52.981 2726 2726 D GeckoNetworkManager: New network state: UP, WIFI, WIFI
+05-26 11:45:52.995 2726 2760 D libglean_ffi: glean_core::ping: Storing ping 'f465c424-af4c-4a9b-bfc4-c1f4a2161777' at '/data/user/0/org.mozilla.fennec_aurora/glean_data/pending_pings/f465c424-af4c-4a9b-bfc4-c1f4a2161777'
+05-26 11:45:52.995 2726 2760 I libglean_ffi: glean_core: The ping 'baseline' was submitted and will be sent as soon as possible
+05-26 11:45:53.041 2726 2744 I a.fennec_auror: Background concurrent copying GC freed 8507(1030KB) AllocSpace objects, 24(1248KB) LOS objects, 49% free, 3MB/6MB, paused 878us total 103.693ms
+05-26 11:45:53.053 1623 1623 D gralloc_ranchu: gralloc_alloc: Creating ashmem region of size 9334784
+05-26 11:45:53.061 1733 2205 D : HostConnection::get() New Host Connection established 0xed960980, tid 2205
+05-26 11:45:53.061 1623 1623 D gralloc_ranchu: gralloc_alloc: Creating ashmem region of size 9334784
+05-26 11:45:53.088 1876 16736 D ConnectivityService: requestNetwork for uid/pid:10092/2726 NetworkRequest [ TRACK_DEFAULT id=236, [ Capabilities: INTERNET&NOT_RESTRICTED&TRUSTED Unwanted: Uid: 10092] ]
+05-26 11:45:53.068 1623 1623 D gralloc_ranchu: gralloc_alloc: Creating ashmem region of size 9334784
+05-26 11:45:53.090 1876 1975 D WIFI : got request NetworkRequest [ TRACK_DEFAULT id=236, [ Capabilities: INTERNET&NOT_RESTRICTED&TRUSTED Unwanted: Uid: 10092] ] with score 60
+05-26 11:45:53.090 1876 1975 D WIFI_UT : got request NetworkRequest [ TRACK_DEFAULT id=236, [ Capabilities: INTERNET&NOT_RESTRICTED&TRUSTED Unwanted: Uid: 10092] ] with score 60
+05-26 11:45:53.091 2131 2131 D PhoneSwitcherNetworkRequstListener: got request NetworkRequest [ TRACK_DEFAULT id=236, [ Capabilities: INTERNET&NOT_RESTRICTED&TRUSTED Unwanted: Uid: 10092] ] with score 60
+05-26 11:45:53.139 2726 2853 D : HostConnection::get() New Host Connection established 0xe5eeb400, tid 2853
+05-26 11:45:53.147 2726 2853 I ConfigStore: android::hardware::configstore::V1_0::ISurfaceFlingerConfigs::hasWideColorDisplay retrieved: 0
+05-26 11:45:53.147 2726 2853 I ConfigStore: android::hardware::configstore::V1_0::ISurfaceFlingerConfigs::hasHDRDisplay retrieved: 0
+05-26 11:45:53.147 2726 2853 I OpenGLRenderer: Initialized EGL, version 1.4
+05-26 11:45:53.147 2726 2853 D OpenGLRenderer: Swap behavior 1
+05-26 11:45:53.147 2726 2853 W OpenGLRenderer: Failed to choose config with EGL_SWAP_BEHAVIOR_PRESERVED, retrying without...
+05-26 11:45:53.147 2726 2853 D OpenGLRenderer: Swap behavior 0
+05-26 11:45:53.148 2726 2853 D EGL_emulation: eglCreateContext: 0xd25033c0: maj 3 min 0 rcv 3
+05-26 11:45:53.149 2726 2853 D EGL_emulation: eglMakeCurrent: 0xd25033c0: ver 3 0 (tinfo 0xe7113e50)
+05-26 11:45:53.152 1733 2205 E SurfaceFlinger: ro.sf.lcd_density must be defined as a build property
+05-26 11:45:53.231 2726 2853 D EGL_emulation: eglMakeCurrent: 0xd25033c0: ver 3 0 (tinfo 0xe7113e50)
+05-26 11:45:53.271 2726 2726 D MigrationPushRenewer: Migration state: NONE
+05-26 11:45:53.271 2726 2726 D MigrationTelemetryListener: Migration state: NONE
+05-26 11:45:53.271 2000 2000 I GoogleInputMethod: onFinishInput() : Dummy InputConnection bound
+05-26 11:45:53.272 2000 2000 I GoogleInputMethod: onStartInput() : Dummy InputConnection bound
+05-26 11:45:53.275 2726 2726 D GeckoNetworkManager: Incoming event enableNotifications for state OnNoListeners -> OnWithListeners
+05-26 11:45:53.277 2726 2726 D GeckoNetworkManager: New network state: UP, WIFI, WIFI
+05-26 11:45:53.277 1876 16736 W ActivityManager: Receiver with filter android.content.IntentFilter@513011f already registered for pid 2726, callerPackage is org.mozilla.fennec_aurora
+05-26 11:45:53.283 2726 2882 D glean/PingUploadWorker: Processing persisted pings at /data/user/0/org.mozilla.fennec_aurora/glean_data/pending_pings
+05-26 11:45:53.283 2726 2882 D glean/PingUploadWorker: Processing ping: f465c424-af4c-4a9b-bfc4-c1f4a2161777
+05-26 11:45:53.284 2726 2882 D glean/ConceptFetchHttpUploader: Submitting ping to: https://incoming.telemetry.mozilla.org/submit/org-mozilla-fennec-aurora/baseline/1/f465c424-af4c-4a9b-bfc4-c1f4a2161777
+05-26 11:45:53.285 1876 1899 I ActivityManager: Displayed org.mozilla.fennec_aurora/org.mozilla.fenix.HomeActivity: +1s325ms
+05-26 11:45:53.287 2726 2726 E ActivityThread: Failed to find provider info for org.mozilla.fennec_aurora.fxa.auth
+05-26 11:45:53.291 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:45:53.307 2726 2726 D GeckoNetworkManager: Incoming event receivedUpdate for state OnWithListeners -> OnWithListeners
+05-26 11:45:53.307 2726 2726 D GeckoNetworkManager: New network state: UP, WIFI, WIFI
+05-26 11:45:53.396 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:45:53.462 1876 1940 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:53.463 1876 1940 D : HostConnection::get() New Host Connection established 0xcf15cd00, tid 1940
+05-26 11:45:53.463 1876 1940 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:53.464 1876 1940 D : HostConnection::get() New Host Connection established 0xcf15cd00, tid 1940
+05-26 11:45:53.465 1876 1940 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:53.466 1876 1940 D : HostConnection::get() New Host Connection established 0xcf15cd00, tid 1940
+05-26 11:45:53.467 1876 1940 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:53.467 1733 1759 W SurfaceFlinger: Attempting to set client state on removed layer: Splash Screen org.mozilla.fennec_aurora#0
+05-26 11:45:53.467 1733 1759 W SurfaceFlinger: Attempting to destroy on removed layer: Splash Screen org.mozilla.fennec_aurora#0
+05-26 11:45:53.477 1904 1911 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:53.478 1904 1911 D : HostConnection::get() New Host Connection established 0xe6d43140, tid 1911
+05-26 11:45:53.478 1904 1911 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:53.479 1904 1911 D : HostConnection::get() New Host Connection established 0xe6d43140, tid 1911
+05-26 11:45:53.479 1904 1911 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:53.479 1904 1911 D : HostConnection::get() New Host Connection established 0xe6d43140, tid 1911
+05-26 11:45:53.479 1904 1911 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:45:53.487 1904 1911 D : HostConnection::get() New Host Connection established 0xe6d43140, tid 1911
+05-26 11:45:53.498 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:45:53.529 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@ca37570)
+05-26 11:45:53.530 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@ca37570)
+05-26 11:45:53.546 2726 2726 I [INFO][Leanplum]: [com.leanplum.LeanplumCloudMessagingProvider::onRegistrationIdReceived::3]: Device registered for push notifications with registration token, cLw1ufUAypU:APA91bHwfEq1krMCzf_jHv5X8dBYH06lrQ2wpvlxNyYV8_MykbUmRTposZZdoFxensMQrmcByfnWHr-LSb3YUIsBTDKhSf42XmbTB1pI8PFxQQT0aoQU6n1Xba4dyepXUu_1ENC-UDHA
+05-26 11:45:53.600 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:45:53.669 2726 2744 I a.fennec_auror: Background concurrent copying GC freed 20991(1619KB) AllocSpace objects, 35(892KB) LOS objects, 49% free, 4MB/8MB, paused 2.056ms total 174.646ms
+05-26 11:45:53.674 2726 2896 I [INFO][Leanplum]: [com.leanplum.LeanplumCloudMessagingProvider::onRegistrationIdReceived::3]: Device registered for push notifications with registration token, cLw1ufUAypU:APA91bHwfEq1krMCzf_jHv5X8dBYH06lrQ2wpvlxNyYV8_MykbUmRTposZZdoFxensMQrmcByfnWHr-LSb3YUIsBTDKhSf42XmbTB1pI8PFxQQT0aoQU6n1Xba4dyepXUu_1ENC-UDHA
+05-26 11:45:53.689 2726 2897 I AutoPushFeature: Received a new registration token from push service.
+05-26 11:45:53.691 2726 2897 D RustNativeSupport: findMegazordLibraryName(push, 0.58.1
+05-26 11:45:53.691 2726 2897 D RustNativeSupport: lib in use: none
+05-26 11:45:53.691 2726 2897 D RustNativeSupport: lib configured: megazord
+05-26 11:45:53.691 2726 2897 D RustNativeSupport: lib version configured: 0.58.1
+05-26 11:45:53.691 2726 2897 D RustNativeSupport: settled on megazord
+05-26 11:45:53.702 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:45:53.715 1733 1733 W SurfaceFlinger: couldn't log to binary event log: overflow.
+05-26 11:45:53.771 2726 2773 D GeckoThread: State changed to PROFILE_READY
+05-26 11:45:53.802 2726 2773 D GeckoThread: State changed to RUNNING
+05-26 11:45:53.807 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:45:53.810 2726 2773 I Gecko : -*- nsDNSServiceDiscovery.js : nsDNSServiceDiscovery
+05-26 11:45:53.825 2534 2004 I PBSessionCacheImpl: Deleted sessionId[290782986459] from persistence.
+05-26 11:45:53.832 2726 2773 I Gecko : 1590507953832 Marionette TRACE Marionette enabled
+05-26 11:45:53.836 2534 2534 W SearchService: Abort, client detached.
+05-26 11:45:53.915 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:45:53.929 2726 2726 I DefaultSupportedAddonsChecker: Register check for new supported add-ons
+05-26 11:45:53.941 2726 2773 I Gecko : 1590507953941 Marionette TRACE Received observer notification marionette-startup-requested
+05-26 11:45:53.942 2726 2773 I Gecko : 1590507953942 Marionette TRACE Waiting until startup recorder finished recording startup scripts...
+05-26 11:45:53.976 2726 2858 I SupportedAddonsWorker: Trying to check for new supported add-ons
+05-26 11:45:54.017 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:45:54.125 4313 4313 I chatty : uid=0(root) /system/bin/adbd identical 1 line
+05-26 11:45:54.229 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:45:54.241 2726 2726 D App : Installed browser-icons extension
+05-26 11:45:54.315 2726 2773 D : HostConnection::get() New Host Connection established 0xd2559d80, tid 2773
+05-26 11:45:54.317 2726 2773 E EGL_emulation: tid 2773: eglBindAPI(1259): error 0x300c (EGL_BAD_PARAMETER)
+05-26 11:45:54.319 2726 2773 D EGL_emulation: eglCreateContext: 0xee947400: maj 3 min 0 rcv 3
+05-26 11:45:54.321 2726 2773 D EGL_emulation: eglMakeCurrent: 0xee947400: ver 3 0 (tinfo 0xe7113aa0)
+05-26 11:45:54.331 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:45:54.435 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:45:54.501 2726 2882 D glean/ConceptFetchHttpUploader: Ping successfully sent (200)
+05-26 11:45:54.501 2726 2882 D glean/PingUploadWorker: f465c424-af4c-4a9b-bfc4-c1f4a2161777 was deleted: true
+05-26 11:45:54.505 2726 2770 I WM-WorkerWrapper: Worker result SUCCESS for Work [ id=82564a1f-761f-43fa-a170-fb28e9f6e99e, tags={ mozilla.telemetry.glean.scheduler.PingUploadWorker, mozac_service_glean_ping_upload_worker } ]
+05-26 11:45:54.511 1876 1978 D ConnectivityService: releasing NetworkRequest [ TRACK_DEFAULT id=236, [ Capabilities: INTERNET&NOT_RESTRICTED&TRUSTED Unwanted: Uid: 10092] ] (release request)
+05-26 11:45:54.531 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@383f15d)
+05-26 11:45:54.531 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@383f15d)
+05-26 11:45:54.539 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:45:54.641 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:45:54.714 2726 2726 D mozac-webcompat: Installed WebCompat webextension: webcompat@mozilla.com
+05-26 11:45:54.724 2726 2773 I Gecko : 1590507954724 Marionette TRACE All scripts recorded.
+05-26 11:45:54.725 2726 2773 I Gecko : 1590507954725 Marionette DEBUG Setting recommended pref apz.content_response_timeout to 60000
+05-26 11:45:54.725 2726 2773 I Gecko : 1590507954725 Marionette DEBUG Setting recommended pref browser.contentblocking.introCount to 99
+05-26 11:45:54.725 2726 2773 I Gecko : 1590507954725 Marionette DEBUG Setting recommended pref browser.download.panel.shown to true
+05-26 11:45:54.725 2726 2773 I Gecko : 1590507954725 Marionette DEBUG Setting recommended pref browser.newtabpage.enabled to false
+05-26 11:45:54.726 2726 2773 I Gecko : 1590507954726 Marionette DEBUG Setting recommended pref browser.safebrowsing.malware.enabled to false
+05-26 11:45:54.730 2726 2773 I Gecko : 1590507954730 Marionette DEBUG Setting recommended pref browser.safebrowsing.phishing.enabled to false
+05-26 11:45:54.733 2726 2773 I Gecko : 1590507954733 Marionette DEBUG Setting recommended pref browser.search.update to false
+05-26 11:45:54.733 2726 2773 I Gecko : 1590507954733 Marionette DEBUG Setting recommended pref browser.tabs.disableBackgroundZombification to false
+05-26 11:45:54.733 2726 2773 I Gecko : 1590507954733 Marionette DEBUG Setting recommended pref browser.tabs.remote.separatePrivilegedContentProcess to false
+05-26 11:45:54.734 2726 2773 I Gecko : 1590507954734 Marionette DEBUG Setting recommended pref browser.tabs.unloadOnLowMemory to false
+05-26 11:45:54.734 2726 2773 I Gecko : 1590507954734 Marionette DEBUG Setting recommended pref browser.tabs.warnOnCloseOtherTabs to false
+05-26 11:45:54.734 2726 2773 I Gecko : 1590507954734 Marionette DEBUG Setting recommended pref browser.tabs.warnOnOpen to false
+05-26 11:45:54.734 2726 2773 I Gecko : 1590507954734 Marionette DEBUG Setting recommended pref browser.usedOnWindows10.introURL to
+05-26 11:45:54.735 2726 2773 I Gecko : 1590507954735 Marionette DEBUG Setting recommended pref browser.urlbar.suggest.searches to false
+05-26 11:45:54.736 2726 2773 I Gecko : 1590507954736 Marionette DEBUG Setting recommended pref dom.disable_beforeunload to true
+05-26 11:45:54.736 2726 2773 I Gecko : 1590507954736 Marionette DEBUG Setting recommended pref dom.file.createInChild to true
+05-26 11:45:54.736 2726 2773 I Gecko : 1590507954736 Marionette DEBUG Setting recommended pref extensions.getAddons.cache.enabled to false
+05-26 11:45:54.736 2726 2773 I Gecko : 1590507954736 Marionette DEBUG Setting recommended pref network.http.prompt-temp-redirect to false
+05-26 11:45:54.737 2726 2773 I Gecko : 1590507954737 Marionette DEBUG Setting recommended pref security.notification_enable_delay to 0
+05-26 11:45:54.738 2726 2773 I Gecko : 1590507954737 Marionette DEBUG Setting recommended pref signon.autofillForms to false
+05-26 11:45:54.738 2726 2773 I Gecko : 1590507954738 Marionette DEBUG Setting recommended pref signon.rememberSignons to false
+05-26 11:45:54.738 2726 2773 I Gecko : 1590507954738 Marionette DEBUG Setting recommended pref toolkit.cosmeticAnimations.enabled to false
+05-26 11:45:54.743 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:45:54.809 2726 2773 I Gecko : 1590507954808 Marionette INFO Listening on port 2829
+05-26 11:45:54.809 2726 2773 I Gecko : 1590507954809 Marionette DEBUG Marionette is listening
+05-26 11:45:54.868 2726 2773 I Gecko : 1590507954868 Marionette DEBUG Accepted connection 0 from 127.0.0.1:56755
+05-26 11:45:54.879 2726 2773 I Gecko : 1590507954879 Marionette DEBUG 0 -> [0,1,"WebDriver:NewSession",{"browserName":"firefox","pageLoadStrategy":"none"}]
+05-26 11:45:55.535 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@da64ad2)
+05-26 11:45:55.536 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@da64ad2)
+05-26 11:45:55.540 2482 6319 W ctxmgr : [AclManager]No 2 for (accnt=account#-517948760#, com.google.android.gms(10008):IndoorOutdoorProducer, vrsn=13280000, 0, 3pPkg = null , 3pMdlId = null , pid = 2482). Was: 3 for 57, account#-517948760#
+05-26 11:45:55.567 2726 2770 I WM-WorkerWrapper: Worker result SUCCESS for Work [ id=1c2acf6f-dff9-4397-a0d6-bcd151200102, tags={ mozilla.components.feature.addons.migration.DefaultSupportedAddonsChecker.periodicWork, mozilla.components.feature.addons.migration.SupportedAddonsWorker } ]
+05-26 11:45:56.065 2958 2958 I lla.fenix.debu: Not late-enabling -Xcheck:jni (already on)
+05-26 11:45:56.071 1876 1893 I ActivityManager: Start proc 2958:org.mozilla.fenix.debug/u0a91 for service org.mozilla.fenix.debug/androidx.work.impl.background.systemjob.SystemJobService
+05-26 11:45:56.085 2958 2958 W lla.fenix.debu: Unexpected CPU variant for X86 using defaults: x86
+05-26 11:45:56.143 2958 2958 I lla.fenix.debu: The ClassLoaderContext is a special shared library.
+05-26 11:45:56.540 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@7677aff)
+05-26 11:45:56.540 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@7677aff)
+05-26 11:45:56.555 1876 4916 E memtrack: Couldn't load memtrack module
+05-26 11:45:56.555 1876 4916 W android.os.Debug: failed to get memory consumption info: -1
+05-26 11:45:56.786 2534 2016 I EventLogSendingHelper: Sending log events.
+05-26 11:45:56.796 2958 2958 D FirebaseApp: Default FirebaseApp failed to initialize because no default options were found. This usually means that com.google.gms:google-services was not applied to your gradle project.
+05-26 11:45:56.797 2958 2958 I FirebaseInitProvider: FirebaseApp initialization unsuccessful
+05-26 11:45:56.962 2958 2958 D FenixApplication: Initializing Glean (uploadEnabled=true, isFennec=false)
+05-26 11:45:57.003 2958 2984 D RustNativeSupport: findMegazordLibraryName(viaduct, 0.59.0
+05-26 11:45:57.003 2958 2984 D RustNativeSupport: lib in use: none
+05-26 11:45:57.003 2958 2984 D RustNativeSupport: lib configured: megazord
+05-26 11:45:57.003 2958 2984 D RustNativeSupport: lib version configured: 0.59.0
+05-26 11:45:57.003 2958 2984 D RustNativeSupport: settled on megazord
+05-26 11:45:57.158 2958 2983 D libglean_ffi: glean_ffi: Android logging should be hooked up!
+05-26 11:45:57.162 2958 2984 D RustNativeSupport: findMegazordLibraryName(rustlog, 0.59.0
+05-26 11:45:57.163 2958 2984 D RustNativeSupport: lib in use: none
+05-26 11:45:57.163 2958 2984 D RustNativeSupport: lib configured: megazord
+05-26 11:45:57.163 2958 2984 D RustNativeSupport: lib version configured: 0.59.0
+05-26 11:45:57.163 2958 2984 D RustNativeSupport: settled on megazord
+05-26 11:45:57.171 2958 2983 I glean/Glean: Registering pings for mozilla.telemetry.glean.GleanMetrics.Pings
+05-26 11:45:57.176 2958 2984 I rc_log_ffi::ios: rc_log adapter initialized!
+05-26 11:45:57.176 2958 2983 I libglean_ffi: glean_core: Creating new Glean
+05-26 11:45:57.182 2958 2983 D libglean_ffi: glean_core::database: Database path: "/data/user/0/org.mozilla.fenix.debug/glean_data/db"
+05-26 11:45:57.186 2958 2983 I libglean_ffi: glean_core::database: Database initialized
+05-26 11:45:57.195 2958 2983 I libglean_ffi: glean_ffi: Glean initialized
+05-26 11:45:57.235 2958 2958 I GeckoRuntime: Adding debug configuration from: /data/local/tmp/org.mozilla.fenix.debug-geckoview-config.yaml
+05-26 11:45:57.235 2958 2958 D GeckoDebugConfig: Adding environment variables from debug config: {MOZ_CRASHREPORTER=1, MOZ_CRASHREPORTER_NO_REPORT=1, MOZ_CRASHREPORTER_SHUTDOWN=1}
+05-26 11:45:57.236 2958 2958 D GeckoDebugConfig: Adding arguments from debug config: [-marionette, -profile, /mnt/sdcard/org.mozilla.fenix.debug-geckodriver-profile]
+05-26 11:45:57.241 2958 2958 D GeckoThread: State changed to LAUNCHED
+05-26 11:45:57.243 2958 2993 I GeckoThread: preparing to run Gecko
+05-26 11:45:57.243 2958 2983 D glean/MetricsPingSched: The 'metrics' ping was last sent on Tue May 26 00:00:00 EDT 2020
+05-26 11:45:57.243 2958 2993 D GeckoThread: env var: MOZ_CRASHREPORTER=1
+05-26 11:45:57.243 2958 2993 D GeckoThread: env var: MOZ_CRASHREPORTER_NO_REPORT=1
+05-26 11:45:57.244 2958 2993 D GeckoThread: env var: MOZ_CRASHREPORTER_SHUTDOWN=1
+05-26 11:45:57.244 2958 2983 I glean/MetricsPingSched: The 'metrics' ping was already sent today, Tue May 26 11:45:57 EDT 2020.
+05-26 11:45:57.244 2958 2983 D glean/MetricsPingSched: Scheduling the 'metrics' ping in 58442800ms
+05-26 11:45:57.247 2958 2983 D libglean_ffi: glean_core: Clearing Lifetime::Application metrics
+05-26 11:45:57.268 2958 2958 D GeckoRuntime: Lifecycle: onCreate
+05-26 11:45:57.305 2958 2993 D GeckoThread: State changed to MOZGLUE_READY
+05-26 11:45:57.323 2958 2993 W Settings: Setting animator_duration_scale has moved from android.provider.Settings.System to android.provider.Settings.Global, returning read-only global URI.
+05-26 11:45:57.328 2958 2993 E GeckoLibLoad: Load sqlite start
+05-26 11:45:57.350 2958 2993 E GeckoLibLoad: Load sqlite done
+05-26 11:45:57.350 2958 2993 E GeckoLibLoad: Load nss start
+05-26 11:45:57.350 2958 2993 E GeckoLibLoad: Load nss done
+05-26 11:45:57.499 2958 2958 D LeakCanary: Updated AppWatcher.config: Config(no changes)
+05-26 11:45:57.542 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@b66791)
+05-26 11:45:57.543 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@b66791)
+05-26 11:45:57.555 2958 2993 E GeckoLibLoad: Loaded libs in 204.632000ms total, 10ms(220ms) user, 90ms(130ms) system, 37(37) faults
+05-26 11:45:57.557 2958 2993 D GeckoThread: State changed to LIBS_READY
+05-26 11:45:57.561 2958 2958 D LeakCanary: Updated LeakCanary.config: Config(no changes)
+05-26 11:45:57.564 2958 2993 W GeckoThread: zerdatime 190877760 - runGecko
+05-26 11:45:57.568 2958 2958 D App : DebugMetricController: start
+05-26 11:45:57.568 2958 2958 D App : DebugMetricController: start
+05-26 11:45:57.570 2958 2993 D GeckoProfile: Loading profile at: null name: default
+05-26 11:45:57.570 2958 2958 W PushConfig: No firebase configuration found; cannot support push service.
+05-26 11:45:57.570 2958 2993 D GeckoProfile: Found profile dir: /data/user/0/org.mozilla.fenix.debug/files/mozilla/u71tud99.default
+05-26 11:45:57.604 2958 2958 D StrictMode: StrictMode policy violation; ~duration=417 ms: android.os.strictmode.DiskReadViolation
+05-26 11:45:57.604 2958 2958 D StrictMode: at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+05-26 11:45:57.604 2958 2958 D StrictMode: at java.io.FileInputStream.<init>(FileInputStream.java:163)
+05-26 11:45:57.604 2958 2958 D StrictMode: at org.mozilla.gecko.util.DebugConfig.fromFile(DebugConfig.java:49)
+05-26 11:45:57.604 2958 2958 D StrictMode: at org.mozilla.geckoview.GeckoRuntime.init(GeckoRuntime.java:363)
+05-26 11:45:57.604 2958 2958 D StrictMode: at org.mozilla.geckoview.GeckoRuntime.create(GeckoRuntime.java:574)
+05-26 11:45:57.604 2958 2958 D StrictMode: at GeckoProvider.createRuntime(GeckoProvider.kt:58)
+05-26 11:45:57.604 2958 2958 D StrictMode: at GeckoProvider.getOrCreateRuntime(GeckoProvider.kt:28)
+05-26 11:45:57.604 2958 2958 D StrictMode: at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:79)
+05-26 11:45:57.604 2958 2958 D StrictMode: at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:57)
+05-26 11:45:57.604 2958 2958 D StrictMode: at kotlin.SynchronizedLazyImpl.getValue(LazyJVM.kt:74)
+05-26 11:45:57.604 2958 2958 D StrictMode: at org.mozilla.fenix.components.Core.getEngine(Unknown Source:7)
+05-26 11:45:57.604 2958 2958 D StrictMode: at org.mozilla.fenix.FenixApplication.setupInMainProcessOnly(FenixApplication.kt:128)
+05-26 11:45:57.604 2958 2958 D StrictMode: at org.mozilla.fenix.FenixApplication.onCreate(FenixApplication.kt:90)
+05-26 11:45:57.604 2958 2958 D StrictMode: at android.app.Instrumentation.callApplicationOnCreate(Instrumentation.java:1154)
+05-26 11:45:57.604 2958 2958 D StrictMode: at android.app.ActivityThread.handleBindApplication(ActivityThread.java:5871)
+05-26 11:45:57.604 2958 2958 D StrictMode: at android.app.ActivityThread.access$1100(ActivityThread.java:199)
+05-26 11:45:57.604 2958 2958 D StrictMode: at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1650)
+05-26 11:45:57.604 2958 2958 D StrictMode: at android.os.Handler.dispatchMessage(Handler.java:106)
+05-26 11:45:57.604 2958 2958 D StrictMode: at android.os.Looper.loop(Looper.java:193)
+05-26 11:45:57.604 2958 2958 D StrictMode: at android.app.ActivityThread.main(ActivityThread.java:6669)
+05-26 11:45:57.604 2958 2958 D StrictMode: at java.lang.reflect.Method.invoke(Native Method)
+05-26 11:45:57.604 2958 2958 D StrictMode: at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+05-26 11:45:57.604 2958 2958 D StrictMode: at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+05-26 11:45:57.608 2958 2958 D StrictMode: StrictMode policy violation; ~duration=400 ms: android.os.strictmode.DiskReadViolation
+05-26 11:45:57.608 2958 2958 D StrictMode: at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+05-26 11:45:57.608 2958 2958 D StrictMode: at libcore.io.BlockGuardOs.read(BlockGuardOs.java:253)
+05-26 11:45:57.608 2958 2958 D StrictMode: at libcore.io.IoBridge.read(IoBridge.java:501)
+05-26 11:45:57.608 2958 2958 D StrictMode: at java.io.FileInputStream.read(FileInputStream.java:307)
+05-26 11:45:57.608 2958 2958 D StrictMode: at java.io.FilterInputStream.read(FilterInputStream.java:133)
+05-26 11:45:57.608 2958 2958 D StrictMode: at java.io.PushbackInputStream.read(PushbackInputStream.java:186)
+05-26 11:45:57.608 2958 2958 D StrictMode: at org.yaml.snakeyaml.reader.UnicodeReader.init(UnicodeReader.java:92)
+05-26 11:45:57.608 2958 2958 D StrictMode: at org.yaml.snakeyaml.reader.UnicodeReader.read(UnicodeReader.java:124)
+05-26 11:45:57.608 2958 2958 D StrictMode: at org.yaml.snakeyaml.reader.StreamReader.update(StreamReader.java:183)
+05-26 11:45:57.608 2958 2958 D StrictMode: at org.yaml.snakeyaml.reader.StreamReader.ensureEnoughData(StreamReader.java:176)
+05-26 11:45:57.608 2958 2958 D StrictMode: at org.yaml.snakeyaml.reader.StreamReader.ensureEnoughData(StreamReader.java:171)
+05-26 11:45:57.608 2958 2958 D StrictMode: at org.yaml.snakeyaml.reader.StreamReader.peek(StreamReader.java:126)
+05-26 11:45:57.608 2958 2958 D StrictMode: at org.yaml.snakeyaml.scanner.ScannerImpl.scanToNextToken(ScannerImpl.java:1177)
+05-26 11:45:57.608 2958 2958 D StrictMode: at org.yaml.snakeyaml.scanner.ScannerImpl.fetchMoreTokens(ScannerImpl.java:287)
+05-26 11:45:57.608 2958 2958 D StrictMode: at org.yaml.snakeyaml.scanner.ScannerImpl.checkToken(ScannerImpl.java:227)
+05-26 11:45:57.608 2958 2958 D StrictMode: at org.yaml.snakeyaml.parser.ParserImpl$ParseImplicitDocumentStart.produce(ParserImpl.java:195)
+05-26 11:45:57.608 2958 2958 D StrictMode: at org.yaml.snakeyaml.parser.ParserImpl.peekEvent(ParserImpl.java:158)
+05-26 11:45:57.608 2958 2958 D StrictMode: at org.yaml.snakeyaml.parser.ParserImpl.checkEvent(ParserImpl.java:148)
+05-26 11:45:57.608 2958 2958 D StrictMode: at org.yaml.snakeyaml.composer.Composer.getSingleNode(Composer.java:107)
+05-26 11:45:57.608 2958 2958 D StrictMode: at org.yaml.snakeyaml.constructor.BaseConstructor.getSingleData(BaseConstructor.java:141)
+05-26 11:45:57.608 2958 2958 D StrictMode: at org.yaml.snakeyaml.Yaml.loadFromReader(Yaml.java:525)
+05-26 11:45:57.608 2958 2958 D StrictMode: at org.yaml.snakeyaml.Yaml.load(Yaml.java:453)
+05-26 11:45:57.608 2958 2958 D StrictMode: at org.mozilla.gecko.util.DebugConfig.fromFile(DebugConfig.java:51)
+05-26 11:45:57.608 2958 2958 D StrictMode: at org.mozilla.geckoview.GeckoRuntime.init(GeckoRuntime.java:363)
+05-26 11:45:57.608 2958 2958 D StrictMode: at org.mozilla.geckoview.GeckoRuntime.create(GeckoRuntime.java:574)
+05-26 11:45:57.608 2958 2958 D StrictMode: at GeckoProvider.createRuntime(GeckoProvider.kt:58)
+05-26 11:45:57.608 2958 2958 D StrictMode: at GeckoProvider.getOrCreateRuntime(GeckoProvider.kt:28)
+05-26 11:45:57.608 2958 2958 D StrictMode: at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:79)
+05-26 11:45:57.608 2958 2958 D StrictMode: at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:57)
+05-26 11:45:57.608 2958 2958 D StrictMode: at kotlin.SynchronizedLazyImpl.getValue(LazyJVM.kt:74)
+05-26 11:45:57.608 2958 2958 D StrictMode: at org.mozilla.fenix.components.Core.getEngine(Unknown Source:7)
+05-26 11:45:57.608 2958 2958 D StrictMode: at org.mozilla.fenix.FenixApplication.setupInMainProcessOnly(FenixApplication.kt:128)
+05-26 11:45:57.608 2958 2958 D StrictMode: at org.mozilla.fenix.FenixApplication.onCreate(FenixApplication.kt:90)
+05-26 11:45:57.608 2958 2958 D StrictMode: at android.app.Instrumentation.callApplicationOnCreate(Instrumentation.java:1154)
+05-26 11:45:57.608 2958 2958 D StrictMode: at android.app.ActivityThread.handleBindApplication(ActivityThread.java:5871)
+05-26 11:45:57.608 2958 2958 D StrictMode: at android.app.ActivityThread.access$1100(ActivityThread.java:199)
+05-26 11:45:57.608 2958 2958 D StrictMode: at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1650)
+05-26 11:45:57.608 2958 2958 D StrictMode: at android.os.Handler.dispatchMessage(Handler.java:106)
+05-26 11:45:57.608 2958 2958 D StrictMode: at android.os.Looper.loop(Looper.java:193)
+05-26 11:45:57.608 2958 2958 D StrictMode: at android.app.ActivityThread.main(ActivityThread.java:6669)
+05-26 11:45:57.608 2958 2958 D StrictMode: at java.lang.reflect.Method.invoke(Native Method)
+05-26 11:45:57.608 2958 2958 D StrictMode: at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+05-26 11:45:57.608 2958 2958 D StrictMode: at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+05-26 11:45:57.611 2958 2958 D StrictMode: StrictMode policy violation; ~duration=398 ms: android.os.strictmode.DiskReadViolation
+05-26 11:45:57.611 2958 2958 D StrictMode: at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+05-26 11:45:57.611 2958 2958 D StrictMode: at libcore.io.BlockGuardOs.read(BlockGuardOs.java:253)
+05-26 11:45:57.611 2958 2958 D StrictMode: at libcore.io.IoBridge.read(IoBridge.java:501)
+05-26 11:45:57.611 2958 2958 D StrictMode: at java.io.FileInputStream.read(FileInputStream.java:307)
+05-26 11:45:57.611 2958 2958 D StrictMode: at java.io.FilterInputStream.read(FilterInputStream.java:133)
+05-26 11:45:57.611 2958 2958 D StrictMode: at java.io.PushbackInputStream.read(PushbackInputStream.java:186)
+05-26 11:45:57.611 2958 2958 D StrictMode: at sun.nio.cs.StreamDecoder.readBytes(StreamDecoder.java:288)
+05-26 11:45:57.611 2958 2958 D StrictMode: at sun.nio.cs.StreamDecoder.implRead(StreamDecoder.java:351)
+05-26 11:45:57.611 2958 2958 D StrictMode: at sun.nio.cs.StreamDecoder.read(StreamDecoder.java:180)
+05-26 11:45:57.611 2958 2958 D StrictMode: at java.io.InputStreamReader.read(InputStreamReader.java:184)
+05-26 11:45:57.611 2958 2958 D StrictMode: at org.yaml.snakeyaml.reader.UnicodeReader.read(UnicodeReader.java:125)
+05-26 11:45:57.611 2958 2958 D StrictMode: at org.yaml.snakeyaml.reader.StreamReader.update(StreamReader.java:183)
+05-26 11:45:57.611 2958 2958 D StrictMode: at org.yaml.snakeyaml.reader.StreamReader.ensureEnoughData(StreamReader.java:176)
+05-26 11:45:57.611 2958 2958 D StrictMode: at org.yaml.snakeyaml.reader.StreamReader.ensureEnoughData(StreamReader.java:171)
+05-26 11:45:57.611 2958 2958 D StrictMode: at org.yaml.snakeyaml.reader.StreamReader.peek(StreamReader.java:126)
+05-26 11:45:57.611 2958 2958 D StrictMode: at org.yaml.snakeyaml.scanner.ScannerImpl.scanToNextToken(ScannerImpl.java:1177)
+05-26 11:45:57.611 2958 2958 D StrictMode: at org.yaml.snakeyaml.scanner.ScannerImpl.fetchMoreTokens(ScannerImpl.java:287)
+05-26 11:45:57.611 2958 2958 D StrictMode: at org.yaml.snakeyaml.scanner.ScannerImpl.checkToken(ScannerImpl.java:227)
+05-26 11:45:57.611 2958 2958 D StrictMode: at org.yaml.snakeyaml.parser.ParserImpl$ParseImplicitDocumentStart.produce(ParserImpl.java:195)
+05-26 11:45:57.611 2958 2958 D StrictMode: at org.yaml.snakeyaml.parser.ParserImpl.peekEvent(ParserImpl.java:158)
+05-26 11:45:57.611 2958 2958 D StrictMode: at org.yaml.snakeyaml.parser.ParserImpl.checkEvent(ParserImpl.java:148)
+05-26 11:45:57.611 2958 2958 D StrictMode: at org.yaml.snakeyaml.composer.Composer.getSingleNode(Composer.java:107)
+05-26 11:45:57.611 2958 2958 D StrictMode: at org.yaml.snakeyaml.constructor.BaseConstructor.getSingleData(BaseConstructor.java:141)
+05-26 11:45:57.611 2958 2958 D StrictMode: at org.yaml.snakeyaml.Yaml.loadFromReader(Yaml.java:525)
+05-26 11:45:57.611 2958 2958 D StrictMode: at org.yaml.snakeyaml.Yaml.load(Yaml.java:453)
+05-26 11:45:57.611 2958 2958 D StrictMode: at org.mozilla.gecko.util.DebugConfig.fromFile(DebugConfig.java:51)
+05-26 11:45:57.611 2958 2958 D StrictMode: at org.mozilla.geckoview.GeckoRuntime.init(GeckoRuntime.java:363)
+05-26 11:45:57.611 2958 2958 D StrictMode: at org.mozilla.geckoview.GeckoRuntime.create(GeckoRuntime.java:574)
+05-26 11:45:57.611 2958 2958 D StrictMode: at GeckoProvider.createRuntime(GeckoProvider.kt:58)
+05-26 11:45:57.611 2958 2958 D StrictMode: at GeckoProvider.getOrCreateRuntime(GeckoProvider.kt:28)
+05-26 11:45:57.611 2958 2958 D StrictMode: at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:79)
+05-26 11:45:57.611 2958 2958 D StrictMode: at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:57)
+05-26 11:45:57.611 2958 2958 D StrictMode: at kotlin.SynchronizedLazyImpl.getValue(LazyJVM.kt:74)
+05-26 11:45:57.611 2958 2958 D StrictMode: at org.mozilla.fenix.components.Core.getEngine(Unknown Source:7)
+05-26 11:45:57.611 2958 2958 D StrictMode: at org.mozilla.fenix.FenixApplication.setupInMainProcessOnly(FenixApplication.kt:128)
+05-26 11:45:57.611 2958 2958 D StrictMode: at org.mozilla.fenix.FenixApplication.onCreate(FenixApplication.kt:90)
+05-26 11:45:57.611 2958 2958 D StrictMode: at android.app.Instrumentation.callApplicationOnCreate(Instrumentation.java:1154)
+05-26 11:45:57.611 2958 2958 D StrictMode: at android.app.ActivityThread.handleBindApplication(ActivityThread.java:5871)
+05-26 11:45:57.611 2958 2958 D StrictMode: at android.app.ActivityThread.access$1100(ActivityThread.java:199)
+05-26 11:45:57.611 2958 2958 D StrictMode: at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1650)
+05-26 11:45:57.611 2958 2958 D StrictMode: at android.os.Handler.dispatchMessage(Handler.java:106)
+05-26 11:45:57.611 2958 2958 D StrictMode: at android.os.Looper.loop(Looper.java:193)
+05-26 11:45:57.611 2958 2958 D StrictMode: at android.app.ActivityThread.main(ActivityThread.java:6669)
+05-26 11:45:57.611 2958 2958 D StrictMode: at java.lang.reflect.Method.invoke(Native Method)
+05-26 11:45:57.611 2958 2958 D StrictMode: at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+05-26 11:45:57.611 2958 2958 D StrictMode: at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+05-26 11:45:57.620 2958 2958 D StrictMode: StrictMode policy violation; ~duration=374 ms: android.os.strictmode.DiskReadViolation
+05-26 11:45:57.620 2958 2958 D StrictMode: at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+05-26 11:45:57.620 2958 2958 D StrictMode: at libcore.io.BlockGuardOs.read(BlockGuardOs.java:253)
+05-26 11:45:57.620 2958 2958 D StrictMode: at libcore.io.IoBridge.read(IoBridge.java:501)
+05-26 11:45:57.620 2958 2958 D StrictMode: at java.io.FileInputStream.read(FileInputStream.java:307)
+05-26 11:45:57.620 2958 2958 D StrictMode: at java.io.FilterInputStream.read(FilterInputStream.java:133)
+05-26 11:45:57.620 2958 2958 D StrictMode: at java.io.PushbackInputStream.read(PushbackInputStream.java:186)
+05-26 11:45:57.620 2958 2958 D StrictMode: at sun.nio.cs.StreamDecoder.readBytes(StreamDecoder.java:288)
+05-26 11:45:57.620 2958 2958 D StrictMode: at sun.nio.cs.StreamDecoder.implRead(StreamDecoder.java:351)
+05-26 11:45:57.620 2958 2958 D StrictMode: at sun.nio.cs.StreamDecoder.read(StreamDecoder.java:180)
+05-26 11:45:57.620 2958 2958 D StrictMode: at java.io.InputStreamReader.read(InputStreamReader.java:184)
+05-26 11:45:57.620 2958 2958 D StrictMode: at org.yaml.snakeyaml.reader.UnicodeReader.read(UnicodeReader.java:125)
+05-26 11:45:57.620 2958 2958 D StrictMode: at org.yaml.snakeyaml.reader.StreamReader.update(StreamReader.java:183)
+05-26 11:45:57.620 2958 2958 D StrictMode: at org.yaml.snakeyaml.reader.StreamReader.ensureEnoughData(StreamReader.java:176)
+05-26 11:45:57.620 2958 2958 D StrictMode: at org.yaml.snakeyaml.reader.StreamReader.peek(StreamReader.java:136)
+05-26 11:45:57.620 2958 2958 D StrictMode: at org.yaml.snakeyaml.scanner.ScannerImpl.scanPlain(ScannerImpl.java:1999)
+05-26 11:45:57.620 2958 2958 D StrictMode: at org.yaml.snakeyaml.scanner.ScannerImpl.fetchPlain(ScannerImpl.java:1044)
+05-26 11:45:57.620 2958 2958 D StrictMode: at org.yaml.snakeyaml.scanner.ScannerImpl.fetchMoreTokens(ScannerImpl.java:399)
+05-26 11:45:57.620 2958 2958 D StrictMode: at org.yaml.snakeyaml.scanner.ScannerImpl.checkToken(ScannerImpl.java:227)
+05-26 11:45:57.620 2958 2958 D StrictMode: at org.yaml.snakeyaml.parser.ParserImpl$ParseBlockSequenceEntry.produce(ParserImpl.java:504)
+05-26 11:45:57.620 2958 2958 D StrictMode: at org.yaml.snakeyaml.parser.ParserImpl.peekEvent(ParserImpl.java:158)
+05-26 11:45:57.620 2958 2958 D StrictMode: at org.yaml.snakeyaml.parser.ParserImpl.checkEvent(ParserImpl.java:148)
+05-26 11:45:57.620 2958 2958 D StrictMode: at org.yaml.snakeyaml.composer.Composer.composeSequenceNode(Composer.java:188)
+05-26 11:45:57.620 2958 2958 D StrictMode: at org.yaml.snakeyaml.composer.Composer.composeNode(Composer.java:142)
+05-26 11:45:57.620 2958 2958 D StrictMode: at org.yaml.snakeyaml.composer.Composer.composeValueNode(Composer.java:236)
+05-26 11:45:57.620 2958 2958 D StrictMode: at org.yaml.snakeyaml.composer.Composer.composeMappingChildren(Composer.java:227)
+05-26 11:45:57.620 2958 2958 D StrictMode: at org.yaml.snakeyaml.composer.Composer.composeMappingNode(Composer.java:215)
+05-26 11:45:57.620 2958 2958 D StrictMode: at org.yaml.snakeyaml.composer.Composer.composeNode(Composer.java:144)
+05-26 11:45:57.620 2958 2958 D StrictMode: at org.yaml.snakeyaml.composer.Composer.getNode(Composer.java:85)
+05-26 11:45:57.620 2958 2958 D StrictMode: at org.yaml.snakeyaml.composer.Composer.getSingleNode(Composer.java:108)
+05-26 11:45:57.620 2958 2958 D StrictMode: at org.yaml.snakeyaml.constructor.BaseConstructor.getSingleData(BaseConstructor.java:141)
+05-26 11:45:57.620 2958 2958 D StrictMode: at org.yaml.snakeyaml.Yaml.loadFromReader(Yaml.java:525)
+05-26 11:45:57.620 2958 2958 D StrictMode: at org.yaml.snakeyaml.Yaml.load(Yaml.java:453)
+05-26 11:45:57.620 2958 2958 D StrictMode: at org.mozilla.gecko.util.DebugConfig.fromFile(DebugConfig.java:51)
+05-26 11:45:57.620 2958 2958 D StrictMode: at org.mozilla.geckoview.GeckoRuntime.init(GeckoRuntime.java:363)
+05-26 11:45:57.620 2958 2958 D StrictMode: at org.mozilla.geckoview.GeckoRuntime.create(GeckoRuntime.java:574)
+05-26 11:45:57.620 2958 2958 D StrictMode: at GeckoProvider.createRuntime(GeckoProvider.kt:58)
+05-26 11:45:57.620 2958 2958 D StrictMode: at GeckoProvider.getOrCreateRuntime(GeckoProvider.kt:28)
+05-26 11:45:57.620 2958 2958 D StrictMode: at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:79)
+05-26 11:45:57.620 2958 2958 D StrictMode: at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:57)
+05-26 11:45:57.620 2958 2958 D StrictMode: at kotlin.SynchronizedLazyImpl.getValue(LazyJVM.kt:74)
+05-26 11:45:57.620 2958 2958 D StrictMode: at org.mozilla.fenix.components.Core.getEngine(Unknown Source:7)
+05-26 11:45:57.620 2958 2958 D StrictMode: at org.mozilla.fenix.FenixApplication.setupInMainProcessOnly(FenixApplication.kt:128)
+05-26 11:45:57.620 2958 2958 D StrictMode: at org.mozilla.fenix.FenixApplication.onCreate(FenixApplication.kt:90)
+05-26 11:45:57.620 2958 2958 D StrictMode: at android.app.Instrumentation.callApplicationOnCreate(Instrumentation.java:1154)
+05-26 11:45:57.620 2958 2958 D StrictMode: at android.app.ActivityThread.handleBindApplication(ActivityThread.java:5871)
+05-26 11:45:57.620 2958 2958 D StrictMode: at android.app.ActivityThread.access$1100(ActivityThread.java:199)
+05-26 11:45:57.620 2958 2958 D StrictMode: at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1650)
+05-26 11:45:57.620 2958 2958 D StrictMode: at android.os.Handler.dispatchMessage(Handler.java:106)
+05-26 11:45:57.620 2958 2958 D StrictMode: at android.os.Looper.loop(Looper.java:193)
+05-26 11:45:57.620 2958 2958 D StrictMode: at android.app.ActivityThread.main(ActivityThread.java:6669)
+05-26 11:45:57.620 2958 2958 D StrictMode: at java.lang.reflect.Method.invoke(Native Method)
+05-26 11:45:57.620 2958 2958 D StrictMode: at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+05-26 11:45:57.620 2958 2958 D StrictMode: at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+05-26 11:45:57.625 2958 2958 D StrictMode: StrictMode policy violation; ~duration=327 ms: android.os.strictmode.DiskReadViolation
+05-26 11:45:57.625 2958 2958 D StrictMode: at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+05-26 11:45:57.625 2958 2958 D StrictMode: at java.io.UnixFileSystem.checkAccess(UnixFileSystem.java:251)
+05-26 11:45:57.625 2958 2958 D StrictMode: at java.io.File.exists(File.java:815)
+05-26 11:45:57.625 2958 2958 D StrictMode: at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:605)
+05-26 11:45:57.625 2958 2958 D StrictMode: at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:596)
+05-26 11:45:57.625 2958 2958 D StrictMode: at android.app.ContextImpl.getPreferencesDir(ContextImpl.java:552)
+05-26 11:45:57.625 2958 2958 D StrictMode: at android.app.ContextImpl.getSharedPreferencesPath(ContextImpl.java:747)
+05-26 11:45:57.625 2958 2958 D StrictMode: at android.app.ContextImpl.getSharedPreferences(ContextImpl.java:400)
+05-26 11:45:57.625 2958 2958 D StrictMode: at android.content.ContextWrapper.getSharedPreferences(ContextWrapper.java:174)
+05-26 11:45:57.625 2958 2958 D StrictMode: at mozilla.components.browser.engine.gecko.GeckoEngine.<init>(GeckoEngine.kt:68)
+05-26 11:45:57.625 2958 2958 D StrictMode: at mozilla.components.browser.engine.gecko.GeckoEngine.<init>(GeckoEngine.kt:63)
+05-26 11:45:57.625 2958 2958 D StrictMode: at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:76)
+05-26 11:45:57.625 2958 2958 D StrictMode: at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:57)
+05-26 11:45:57.625 2958 2958 D StrictMode: at kotlin.SynchronizedLazyImpl.getValue(LazyJVM.kt:74)
+05-26 11:45:57.625 2958 2958 D StrictMode: at org.mozilla.fenix.components.Core.getEngine(Unknown Source:7)
+05-26 11:45:57.625 2958 2958 D StrictMode: at org.mozilla.fenix.FenixApplication.setupInMainProcessOnly(FenixApplication.kt:128)
+05-26 11:45:57.625 2958 2958 D StrictMode: at org.mozilla.fenix.FenixApplication.onCreate(FenixApplication.kt:90)
+05-26 11:45:57.625 2958 2958 D StrictMode: at android.app.Instrumentation.callApplicationOnCreate(Instrumentation.java:1154)
+05-26 11:45:57.625 2958 2958 D StrictMode: at android.app.ActivityThread.handleBindApplication(ActivityThread.java:5871)
+05-26 11:45:57.625 2958 2958 D StrictMode: at android.app.ActivityThread.access$1100(ActivityThread.java:199)
+05-26 11:45:57.625 2958 2958 D StrictMode: at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1650)
+05-26 11:45:57.625 2958 2958 D StrictMode: at android.os.Handler.dispatchMessage(Handler.java:106)
+05-26 11:45:57.625 2958 2958 D StrictMode: at android.os.Looper.loop(Looper.java:193)
+05-26 11:45:57.625 2958 2958 D StrictMode: at android.app.ActivityThread.main(ActivityThread.java:6669)
+05-26 11:45:57.625 2958 2958 D StrictMode: at java.lang.reflect.Method.invoke(Native Method)
+05-26 11:45:57.625 2958 2958 D StrictMode: at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+05-26 11:45:57.625 2958 2958 D StrictMode: at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+05-26 11:45:57.629 2958 2958 D StrictMode: StrictMode policy violation; ~duration=110 ms: android.os.strictmode.DiskReadViolation
+05-26 11:45:57.629 2958 2958 D StrictMode: at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+05-26 11:45:57.629 2958 2958 D StrictMode: at java.io.UnixFileSystem.checkAccess(UnixFileSystem.java:251)
+05-26 11:45:57.629 2958 2958 D StrictMode: at java.io.File.exists(File.java:815)
+05-26 11:45:57.629 2958 2958 D StrictMode: at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:605)
+05-26 11:45:57.629 2958 2958 D StrictMode: at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:596)
+05-26 11:45:57.629 2958 2958 D StrictMode: at android.app.ContextImpl.getPreferencesDir(ContextImpl.java:552)
+05-26 11:45:57.629 2958 2958 D StrictMode: at android.app.ContextImpl.getSharedPreferencesPath(ContextImpl.java:747)
+05-26 11:45:57.629 2958 2958 D StrictMode: at android.app.ContextImpl.getSharedPreferences(ContextImpl.java:400)
+05-26 11:45:57.629 2958 2958 D StrictMode: at android.content.ContextWrapper.getSharedPreferences(ContextWrapper.java:174)
+05-26 11:45:57.629 2958 2958 D StrictMode: at androidx.preference.PreferenceManager.getDefaultSharedPreferences(PreferenceManager.java:119)
+05-26 11:45:57.629 2958 2958 D StrictMode: at org.mozilla.fenix.DebugFenixApplication.setupLeakCanary(DebugFenixApplication.kt:15)
+05-26 11:45:57.629 2958 2958 D StrictMode: at org.mozilla.fenix.FenixApplication.setupInMainProcessOnly(FenixApplication.kt:140)
+05-26 11:45:57.629 2958 2958 D StrictMode: at org.mozilla.fenix.FenixApplication.onCreate(FenixApplication.kt:90)
+05-26 11:45:57.629 2958 2958 D StrictMode: at android.app.Instrumentation.callApplicationOnCreate(Instrumentation.java:1154)
+05-26 11:45:57.629 2958 2958 D StrictMode: at android.app.ActivityThread.handleBindApplication(ActivityThread.java:5871)
+05-26 11:45:57.629 2958 2958 D StrictMode: at android.app.ActivityThread.access$1100(ActivityThread.java:199)
+05-26 11:45:57.629 2958 2958 D StrictMode: at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1650)
+05-26 11:45:57.629 2958 2958 D StrictMode: at android.os.Handler.dispatchMessage(Handler.java:106)
+05-26 11:45:57.629 2958 2958 D StrictMode: at android.os.Looper.loop(Looper.java:193)
+05-26 11:45:57.629 2958 2958 D StrictMode: at android.app.ActivityThread.main(ActivityThread.java:6669)
+05-26 11:45:57.629 2958 2958 D StrictMode: at java.lang.reflect.Method.invoke(Native Method)
+05-26 11:45:57.629 2958 2958 D StrictMode: at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+05-26 11:45:57.629 2958 2958 D StrictMode: at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+05-26 11:45:57.630 2958 2958 D StrictMode: StrictMode policy violation; ~duration=107 ms: android.os.strictmode.DiskReadViolation
+05-26 11:45:57.630 2958 2958 D StrictMode: at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+05-26 11:45:57.630 2958 2958 D StrictMode: at android.app.SharedPreferencesImpl.awaitLoadedLocked(SharedPreferencesImpl.java:256)
+05-26 11:45:57.630 2958 2958 D StrictMode: at android.app.SharedPreferencesImpl.getBoolean(SharedPreferencesImpl.java:325)
+05-26 11:45:57.630 2958 2958 D StrictMode: at org.mozilla.fenix.DebugFenixApplication.setupLeakCanary(DebugFenixApplication.kt:16)
+05-26 11:45:57.630 2958 2958 D StrictMode: at org.mozilla.fenix.FenixApplication.setupInMainProcessOnly(FenixApplication.kt:140)
+05-26 11:45:57.630 2958 2958 D StrictMode: at org.mozilla.fenix.FenixApplication.onCreate(FenixApplication.kt:90)
+05-26 11:45:57.630 2958 2958 D StrictMode: at android.app.Instrumentation.callApplicationOnCreate(Instrumentation.java:1154)
+05-26 11:45:57.630 2958 2958 D StrictMode: at android.app.ActivityThread.handleBindApplication(ActivityThread.java:5871)
+05-26 11:45:57.630 2958 2958 D StrictMode: at android.app.ActivityThread.access$1100(ActivityThread.java:199)
+05-26 11:45:57.630 2958 2958 D StrictMode: at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1650)
+05-26 11:45:57.630 2958 2958 D StrictMode: at android.os.Handler.dispatchMessage(Handler.java:106)
+05-26 11:45:57.630 2958 2958 D StrictMode: at android.os.Looper.loop(Looper.java:193)
+05-26 11:45:57.630 2958 2958 D StrictMode: at android.app.ActivityThread.main(ActivityThread.java:6669)
+05-26 11:45:57.630 2958 2958 D StrictMode: at java.lang.reflect.Method.invoke(Native Method)
+05-26 11:45:57.630 2958 2958 D StrictMode: at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+05-26 11:45:57.630 2958 2958 D StrictMode: at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+05-26 11:45:57.635 2958 2958 D LeakCanary: LeakCanary is running and ready to detect leaks
+05-26 11:45:57.700 2958 2984 I SupportedAddonsWorker: Trying to check for new supported add-ons
+05-26 11:45:57.782 2958 2993 I Gecko:DumpUtils: Fifo watcher disabled via pref.
+05-26 11:45:58.027 2958 2993 D GeckoSysInfo: System memory: 1494MB.
+05-26 11:45:58.027 2958 2993 W lla.fenix.debu: Accessing hidden method Landroid/os/MessageQueue;->next()Landroid/os/Message; (light greylist, JNI)
+05-26 11:45:58.028 2958 2993 D StrictMode: StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/os/MessageQueue;->next()Landroid/os/Message;
+05-26 11:45:58.028 2958 2993 D StrictMode: at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+05-26 11:45:58.028 2958 2993 D StrictMode: at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+05-26 11:45:58.028 2958 2993 D StrictMode: at org.mozilla.gecko.mozglue.GeckoLoader.nativeRun(Native Method)
+05-26 11:45:58.028 2958 2993 D StrictMode: at org.mozilla.gecko.GeckoThread.run(GeckoThread.java:449)
+05-26 11:45:58.028 2958 2993 W lla.fenix.debu: Accessing hidden field Landroid/os/MessageQueue;->mMessages:Landroid/os/Message; (light greylist, JNI)
+05-26 11:45:58.029 2958 2993 D StrictMode: StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/os/MessageQueue;->mMessages:Landroid/os/Message;
+05-26 11:45:58.029 2958 2993 D StrictMode: at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+05-26 11:45:58.029 2958 2993 D StrictMode: at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+05-26 11:45:58.029 2958 2993 D StrictMode: at org.mozilla.gecko.mozglue.GeckoLoader.nativeRun(Native Method)
+05-26 11:45:58.029 2958 2993 D StrictMode: at org.mozilla.gecko.GeckoThread.run(GeckoThread.java:449)
+05-26 11:45:58.030 2958 2993 W lla.fenix.debu: Accessing hidden field Ljava/lang/Boolean;->value:Z (light greylist, JNI)
+05-26 11:45:58.030 2958 2993 D StrictMode: StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Ljava/lang/Boolean;->value:Z
+05-26 11:45:58.030 2958 2993 D StrictMode: at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+05-26 11:45:58.030 2958 2993 D StrictMode: at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+05-26 11:45:58.030 2958 2993 D StrictMode: at org.mozilla.gecko.mozglue.GeckoLoader.nativeRun(Native Method)
+05-26 11:45:58.030 2958 2993 D StrictMode: at org.mozilla.gecko.GeckoThread.run(GeckoThread.java:449)
+05-26 11:45:58.030 2958 2993 W lla.fenix.debu: Accessing hidden field Ljava/lang/Integer;->value:I (light greylist, JNI)
+05-26 11:45:58.031 2958 2993 D StrictMode: StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Ljava/lang/Integer;->value:I
+05-26 11:45:58.031 2958 2993 D StrictMode: at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+05-26 11:45:58.031 2958 2993 D StrictMode: at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+05-26 11:45:58.031 2958 2993 D StrictMode: at org.mozilla.gecko.mozglue.GeckoLoader.nativeRun(Native Method)
+05-26 11:45:58.031 2958 2993 D StrictMode: at org.mozilla.gecko.GeckoThread.run(GeckoThread.java:449)
+05-26 11:45:58.031 2958 2993 W lla.fenix.debu: Accessing hidden field Ljava/lang/Double;->value:D (light greylist, JNI)
+05-26 11:45:58.031 2958 2993 D StrictMode: StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Ljava/lang/Double;->value:D
+05-26 11:45:58.031 2958 2993 D StrictMode: at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+05-26 11:45:58.031 2958 2993 D StrictMode: at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+05-26 11:45:58.031 2958 2993 D StrictMode: at org.mozilla.gecko.mozglue.GeckoLoader.nativeRun(Native Method)
+05-26 11:45:58.031 2958 2993 D StrictMode: at org.mozilla.gecko.GeckoThread.run(GeckoThread.java:449)
+05-26 11:45:58.035 2958 2993 D GeckoThread: State changed to JNI_READY
+05-26 11:45:58.045 2958 3014 D ServiceAllocator: org.mozilla.gecko.process.GeckoChildProcessServices$tab0 updateBindings: BACKGROUND priority, 0 importance, 2 successful binds, 0 failed binds, 0 successful unbinds, 0 failed unbinds
+05-26 11:45:58.054 3020 3020 I enix.debug:tab: Not late-enabling -Xcheck:jni (already on)
+05-26 11:45:58.056 1876 1893 I ActivityManager: Start proc 3020:org.mozilla.fenix.debug:tab0/u0a91 for service org.mozilla.fenix.debug/org.mozilla.gecko.process.GeckoChildProcessServices$tab0
+05-26 11:45:58.089 3020 3020 W enix.debug:tab: Unexpected CPU variant for X86 using defaults: x86
+05-26 11:45:58.093 2726 2726 I glean/Glean: Registering pings for org.mozilla.fenix.GleanMetrics.Pings
+05-26 11:45:58.110 2726 2726 I FenixApplication: Kicking-off account manager...
+05-26 11:45:58.110 2726 2726 I FenixApplication: 'Kicking-off account manager' took 0 ms
+05-26 11:45:58.114 2726 2951 I FenixApplication: Running post-visual completeness tasks...
+05-26 11:45:58.114 2726 2951 I FenixApplication: Storage initialization...
+05-26 11:45:58.115 2726 2951 I PlacesHistoryStorage: Warming up places storage...
+05-26 11:45:58.116 2726 2951 D RustNativeSupport: findMegazordLibraryName(places, 0.58.1
+05-26 11:45:58.116 2726 2951 D RustNativeSupport: lib in use: none
+05-26 11:45:58.116 2726 2951 D RustNativeSupport: lib configured: megazord
+05-26 11:45:58.116 2726 2951 D RustNativeSupport: lib version configured: 0.58.1
+05-26 11:45:58.116 2726 2951 D RustNativeSupport: settled on megazord
+05-26 11:45:58.117 2726 2951 D places_ffi: places_api_new
+05-26 11:45:58.147 3020 3020 I enix.debug:tab: The ClassLoaderContext is a special shared library.
+05-26 11:45:58.149 2726 2951 D places::db::schema: Creating schema
+05-26 11:45:58.160 2726 2858 I App : ActivationPing - generating ping with the hashed id
+05-26 11:45:58.161 2726 2858 I App : ActivationPing - generating ping (has `identifier`: true)
+05-26 11:45:58.168 2726 2760 I libglean_ffi: glean_core::ping: Collecting activation
+05-26 11:45:58.185 2726 2760 D libglean_ffi: glean_core::ping: Storing ping '74e89a59-9fb9-4c30-b711-7573f532c7e1' at '/data/user/0/org.mozilla.fennec_aurora/glean_data/pending_pings/74e89a59-9fb9-4c30-b711-7573f532c7e1'
+05-26 11:45:58.185 2726 2760 I libglean_ffi: glean_core: The ping 'activation' was submitted and will be sent as soon as possible
+05-26 11:45:58.207 1876 4916 D ConnectivityService: requestNetwork for uid/pid:10092/2726 NetworkRequest [ TRACK_DEFAULT id=237, [ Capabilities: INTERNET&NOT_RESTRICTED&TRUSTED Unwanted: Uid: 10092] ]
+05-26 11:45:58.209 1876 1975 D WIFI : got request NetworkRequest [ TRACK_DEFAULT id=237, [ Capabilities: INTERNET&NOT_RESTRICTED&TRUSTED Unwanted: Uid: 10092] ] with score 60
+05-26 11:45:58.209 1876 1975 D WIFI_UT : got request NetworkRequest [ TRACK_DEFAULT id=237, [ Capabilities: INTERNET&NOT_RESTRICTED&TRUSTED Unwanted: Uid: 10092] ] with score 60
+05-26 11:45:58.210 2131 2131 D PhoneSwitcherNetworkRequstListener: got request NetworkRequest [ TRACK_DEFAULT id=237, [ Capabilities: INTERNET&NOT_RESTRICTED&TRUSTED Unwanted: Uid: 10092] ] with score 60
+05-26 11:45:58.214 2726 2951 D sql_support::conn_ext: Transaction commited after 65.158ms
+05-26 11:45:58.216 2726 2951 D places_ffi: places_connection_new
+05-26 11:45:58.218 2726 2951 D places_ffi: places_connection_new
+05-26 11:45:58.220 2726 2951 I PlacesHistoryStorage: 'Warming up places storage' took 104 ms
+05-26 11:45:58.220 2726 2951 I PlacesBookmarksStorage: Warming up places storage...
+05-26 11:45:58.220 2726 2951 D places_ffi: places_connection_new
+05-26 11:45:58.226 2726 2951 I PlacesBookmarksStorage: 'Warming up places storage' took 6 ms
+05-26 11:45:58.228 2726 3042 D glean/PingUploadWorker: Processing persisted pings at /data/user/0/org.mozilla.fennec_aurora/glean_data/pending_pings
+05-26 11:45:58.228 2726 3042 D glean/PingUploadWorker: Processing ping: 74e89a59-9fb9-4c30-b711-7573f532c7e1
+05-26 11:45:58.230 2726 3042 D glean/ConceptFetchHttpUploader: Submitting ping to: https://incoming.telemetry.mozilla.org/submit/org-mozilla-fennec-aurora/activation/1/74e89a59-9fb9-4c30-b711-7573f532c7e1
+05-26 11:45:58.246 1739 1739 I keystore: 1 0
+05-26 11:45:58.253 2726 2951 I SyncableLoginsStorage: Warming up storage...
+05-26 11:45:58.260 2726 2951 D RustNativeSupport: findMegazordLibraryName(logins, 0.58.1
+05-26 11:45:58.260 2726 2951 D RustNativeSupport: lib in use: none
+05-26 11:45:58.260 2726 2951 D RustNativeSupport: lib configured: megazord
+05-26 11:45:58.260 2726 2951 D RustNativeSupport: lib version configured: 0.58.1
+05-26 11:45:58.260 2726 2951 D RustNativeSupport: settled on megazord
+05-26 11:45:58.263 2726 2951 D logins_ffi: sync15_passwords_state_new
+05-26 11:45:58.293 1876 1890 E memtrack: Couldn't load memtrack module
+05-26 11:45:58.293 1876 1890 W android.os.Debug: failed to get memory consumption info: -1
+05-26 11:45:58.299 2726 2951 D logins::schema: Creating schema
+05-26 11:45:58.311 2958 2993 D GeckoViewStartup: observe: app-startup
+05-26 11:45:58.334 2958 2993 D GeckoViewConsole: enabled = true
+05-26 11:45:58.411 2726 2951 I SyncableLoginsStorage: 'Warming up storage' took 157 ms
+05-26 11:45:58.411 2726 2951 I FenixApplication: 'Storage initialization' took 296 ms
+05-26 11:45:58.416 2958 2993 I GeckoConsole: No chrome package registered for chrome://browser/content/built_in_addons.json
+05-26 11:45:58.506 2958 2993 D GeckoThread: State changed to PROFILE_READY
+05-26 11:45:58.526 2958 2993 D GeckoViewStartup: observe: profile-after-change
+05-26 11:45:58.528 2958 2993 D GeckoViewTelemetryController: setup - canRecordPrereleaseData true, canRecordReleaseData true
+05-26 11:45:58.542 2958 2993 D GeckoThread: State changed to RUNNING
+05-26 11:45:58.547 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@94e6ffb)
+05-26 11:45:58.548 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@94e6ffb)
+05-26 11:45:58.551 2958 2993 I Gecko : 1590507958551 Marionette TRACE Marionette enabled
+05-26 11:45:58.551 2958 2993 I Gecko : 1590507958551 Marionette TRACE Received observer notification profile-after-change
+05-26 11:45:58.557 2958 2993 I Gecko : -*- nsDNSServiceDiscovery.js : nsDNSServiceDiscovery
+05-26 11:45:58.599 2726 3042 D glean/ConceptFetchHttpUploader: Ping successfully sent (200)
+05-26 11:45:58.599 2726 3042 D glean/PingUploadWorker: 74e89a59-9fb9-4c30-b711-7573f532c7e1 was deleted: true
+05-26 11:45:58.601 2726 2765 I WM-WorkerWrapper: Worker result SUCCESS for Work [ id=7537e864-720e-48e9-9923-7ae430e3fcdc, tags={ mozilla.telemetry.glean.scheduler.PingUploadWorker, mozac_service_glean_ping_upload_worker } ]
+05-26 11:45:58.606 1876 1978 D ConnectivityService: releasing NetworkRequest [ TRACK_DEFAULT id=237, [ Capabilities: INTERNET&NOT_RESTRICTED&TRUSTED Unwanted: Uid: 10092] ] (release request)
+05-26 11:45:58.611 2958 2993 I Gecko : 1590507958611 Marionette TRACE Received observer notification command-line-startup
+05-26 11:45:58.629 2958 2958 D GeckoNetworkManager: Incoming event enableNotifications for state OffNoListeners -> OffWithListeners
+05-26 11:45:58.636 2958 2958 D GeckoNetworkManager: New network state: UP, WIFI, WIFI
+05-26 11:45:58.636 2958 2993 D GeckoViewStartup: onEvent GeckoView:SetLocale
+05-26 11:45:58.636 2958 2993 D GeckoViewStartup: onEvent GeckoView:ResetUserPrefs
+05-26 11:45:58.640 2958 2993 D GeckoViewRemoteDebugger: onInit
+05-26 11:45:58.641 2958 2993 D GeckoViewConsole: enabled = false
+05-26 11:45:58.655 2958 2993 D GeckoViewStartup: onEvent GeckoView:SetLocale
+05-26 11:45:58.656 2958 2993 D GeckoViewStartup: onEvent GeckoView:SetDefaultPrefs
+05-26 11:45:58.675 2958 2993 I chatty : uid=10091(org.mozilla.fenix.debug) Gecko identical 9 lines
+05-26 11:45:58.676 2958 2993 D GeckoViewStartup: onEvent GeckoView:SetDefaultPrefs
+05-26 11:45:58.680 2958 2993 D GeckoViewConsole: onEvent GeckoView:RegisterWebExtension {"allowContentMessaging":true,"id":"webcompat@mozilla.com","locationUri":"resource://android/assets/extensions/webcompat/"}
+05-26 11:45:58.684 2958 2993 D GeckoViewConsole: onEvent GeckoView:WebExtension:List null
+05-26 11:45:58.693 2958 2993 D GeckoViewConsole: onEvent GeckoView:RegisterWebExtension {"allowContentMessaging":true,"id":"mozacBrowserIcons","locationUri":"resource://android/assets/extensions/browser-icons/"}
+05-26 11:45:58.695 2958 2993 D GeckoViewConsole: onEvent GeckoView:RegisterWebExtension {"allowContentMessaging":true,"id":"mozacBrowserAds","locationUri":"resource://android/assets/extensions/ads/"}
+05-26 11:45:58.695 2958 2993 D GeckoViewConsole: onEvent GeckoView:RegisterWebExtension {"allowContentMessaging":true,"id":"BrowserCookiesExtension","locationUri":"resource://android/assets/extensions/cookies/"}
+05-26 11:45:58.749 2958 2993 I Gecko : 1590507958749 Marionette TRACE Received observer notification marionette-startup-requested
+05-26 11:45:58.750 2958 2993 I Gecko : 1590507958750 Marionette TRACE Waiting until startup recorder finished recording startup scripts...
+05-26 11:45:58.764 2958 2993 I Gecko : 1590507958764 Marionette TRACE All scripts recorded.
+05-26 11:45:58.768 2958 2993 I Gecko : 1590507958768 Marionette DEBUG Setting recommended pref browser.safebrowsing.malware.enabled to false
+05-26 11:45:58.770 2958 2993 I Gecko : 1590507958770 Marionette DEBUG Setting recommended pref browser.safebrowsing.phishing.enabled to false
+05-26 11:45:58.772 2958 2993 I Gecko : 1590507958771 Marionette DEBUG Setting recommended pref browser.search.update to false
+05-26 11:45:58.772 2958 2993 I Gecko : 1590507958772 Marionette DEBUG Setting recommended pref browser.tabs.disableBackgroundZombification to false
+05-26 11:45:58.772 2958 2993 I Gecko : 1590507958772 Marionette DEBUG Setting recommended pref browser.tabs.remote.separatePrivilegedContentProcess to false
+05-26 11:45:58.773 2958 2993 I Gecko : 1590507958773 Marionette DEBUG Setting recommended pref network.http.prompt-temp-redirect to false
+05-26 11:45:58.857 2958 2993 I Gecko : 1590507958853 Marionette FATAL Remote protocol server failed to start: Error: Could not bind to port 2829 (NS_ERROR_SOCKET_ADDRESS_IN_USE)(chrome://marionette/content/server.js:94:17) JS Stack trace: set acceptConnections@server.js:94:17
+05-26 11:45:58.857 2958 2993 I Gecko : start@server.js:124:5
+05-26 11:45:58.857 2958 2993 I Gecko : init/<@marionette.js:510:21
+05-26 11:45:58.857 2958 2993 I Gecko : 1590507958857 Marionette DEBUG Resetting recommended pref browser.safebrowsing.malware.enabled
+05-26 11:45:58.863 2958 2993 I Gecko : 1590507958863 Marionette DEBUG Resetting recommended pref browser.safebrowsing.phishing.enabled
+05-26 11:45:58.865 2958 2993 I Gecko : 1590507958865 Marionette DEBUG Resetting recommended pref browser.search.update
+05-26 11:45:58.866 2958 2993 I Gecko : 1590507958866 Marionette DEBUG Resetting recommended pref browser.tabs.disableBackgroundZombification
+05-26 11:45:58.866 2958 2993 I Gecko : 1590507958866 Marionette DEBUG Resetting recommended pref browser.tabs.remote.separatePrivilegedContentProcess
+05-26 11:45:58.866 2958 2993 I Gecko : 1590507958866 Marionette DEBUG Resetting recommended pref network.http.prompt-temp-redirect
+05-26 11:45:58.879 2958 2993 D GeckoThread: State changed to EXITING
+05-26 11:45:58.896 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:45:58.965 2958 2993 E GeckoConsole: [JavaScript Error: "NetworkError when attempting to fetch resource."]
+05-26 11:45:58.965 2958 2993 E GeckoConsole: get@resource://services-settings/RemoteSettingsClient.jsm:348:12
+05-26 11:45:58.978 3020 3020 D GeckoThread: State changed to LAUNCHED
+05-26 11:45:58.979 3020 3074 I GeckoThread: preparing to run Gecko
+05-26 11:45:59.080 2958 2958 D App : Installed browser-icons extension
+05-26 11:45:59.095 2958 2993 I GeckoConsole: 1590507959095 addons.xpi WARN Exception running bootstrap method shutdown on default-theme@mozilla.org: [Exception... "Component returned failure code: 0x80004002 (NS_NOINTERFACE) [nsISupports.QueryInterface]" nsresult: "0x80004002 (NS_NOINTERFACE)" location: "JS frame :: resource://gre/modules/Extension.jsm :: shutdown :: line 2586" data: no] Stack trace: shutdown()@resource://gre/modules/Extension.jsm:2586
+05-26 11:45:59.095 2958 2993 I GeckoConsole: shutdown()@resource://gre/modules/Extension.jsm:1703
+05-26 11:45:59.095 2958 2993 I GeckoConsole: callBootstrapMethod()@resource://gre/modules/addons/XPIProvider.jsm:1819
+05-26 11:45:59.095 2958 2993 I GeckoConsole: _shutdown()@resource://gre/modules/addons/XPIProvider.jsm:1948
+05-26 11:45:59.095 2958 2993 I GeckoConsole: observe()@resource://gre/modules/AsyncShutdown.jsm:554
+05-26 11:45:59.106 2958 2993 D : HostConnection::get() New Host Connection established 0xe6b53580, tid 2993
+05-26 11:45:59.109 2958 2993 I ConfigStore: android::hardware::configstore::V1_0::ISurfaceFlingerConfigs::hasWideColorDisplay retrieved: 0
+05-26 11:45:59.109 2958 2993 I ConfigStore: android::hardware::configstore::V1_0::ISurfaceFlingerConfigs::hasHDRDisplay retrieved: 0
+05-26 11:45:59.110 2958 2993 E EGL_emulation: tid 2993: eglBindAPI(1259): error 0x300c (EGL_BAD_PARAMETER)
+05-26 11:45:59.111 2958 2993 D EGL_emulation: eglCreateContext: 0xea8dc120: maj 3 min 0 rcv 3
+05-26 11:45:59.112 2958 2993 D EGL_emulation: eglMakeCurrent: 0xea8dc120: ver 3 0 (tinfo 0xee8fd9d0)
+05-26 11:45:59.388 2958 2993 E GeckoConsole: [JavaScript Error: "Error: Phase "profile-change-teardown" is finished, it is too late to register completion condition "ServiceWorkerShutdownBlocker: shutting down Service Workers"" {file: "resource://gre/modules/AsyncShutdown.jsm" line: 697}]
+05-26 11:45:59.388 2958 2993 E GeckoConsole: addBlocker@resource://gre/modules/AsyncShutdown.jsm:697:15
+05-26 11:45:59.388 2958 2993 E GeckoConsole: addBlocker@resource://gre/modules/AsyncShutdown.jsm:505:26
+05-26 11:45:59.388 2958 2993 E GeckoConsole: addBlocker@resource://gre/modules/AsyncShutdown.jsm:444:15
+05-26 11:45:59.388 2958 2993 E GeckoConsole: addBlocker@resource://gre/modules/nsAsyncShutdown.jsm:162:24
+05-26 11:45:59.388 2958 2993 E GeckoConsole: observe@resource://gre/modules/AsyncShutdown.jsm:554:16
+05-26 11:45:59.541 2958 2993 W GeckoConsole: [JavaScript Warning: "Security wrapper denied access to property "ONE_QUARTER" on privileged Javascript object. Support for exposing privileged objects to untrusted content via __exposedProps__ has been removed - use WebIDL bindings or Components.utils.cloneInto instead. Note that only the first denied property access from a given global object will be reported." {file: "moz-extension://9670d999-4378-46ae-b907-6d0c226b09b6/data/picture_in_picture_overrides.js" line: 26}]
+05-26 11:45:59.547 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@3a65818)
+05-26 11:45:59.548 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@3a65818)
+05-26 11:45:59.576 2958 2958 D mozac-webcompat: Installed WebCompat webextension: webcompat@mozilla.com
+05-26 11:45:59.636 2958 2993 E GeckoConsole: [JavaScript Error: "can't access property "startupData", state is undefined" {file: "resource://gre/modules/addons/XPIProvider.jsm" line: 3079}]
+05-26 11:45:59.636 2958 2993 E GeckoConsole: setStartupData@resource://gre/modules/addons/XPIProvider.jsm:3079:5
+05-26 11:45:59.636 2958 2993 E GeckoConsole: saveStartupData@resource://gre/modules/Extension.jsm:2035:17
+05-26 11:45:59.636 2958 2993 E GeckoConsole: _writePersistentListeners@resource://gre/modules/ExtensionCommon.jsm:2271:15
+05-26 11:45:59.636 2958 2993 E GeckoConsole: savePersistentListener@resource://gre/modules/ExtensionCommon.jsm:2362:18
+05-26 11:45:59.636 2958 2993 E GeckoConsole: addListener@resource://gre/modules/ExtensionCommon.jsm:2495:20
+05-26 11:45:59.636 2958 2993 E GeckoConsole: addListener@resource://gre/modules/ExtensionCommon.jsm:2550:38
+05-26 11:45:59.636 2958 2993 E GeckoConsole: recvAddListener@resource://gre/modules/ExtensionParent.jsm:1079:13
+05-26 11:45:59.636 2958 2993 E GeckoConsole: observe@resource://gre/modules/AsyncShutdown.jsm:554:16
+05-26 11:45:59.657 2958 2993 I chatty : uid=10091(org.mozilla.fenix.debug) Gecko identical 24 lines
+05-26 11:45:59.705 2958 2993 E GeckoConsole: [JavaScript Error: "can't access property "startupData", state is undefined" {file: "resource://gre/modules/addons/XPIProvider.jsm" line: 3079}]
+05-26 11:45:59.705 2958 2993 E GeckoConsole: setStartupData@resource://gre/modules/addons/XPIProvider.jsm:3079:5
+05-26 11:45:59.705 2958 2993 E GeckoConsole: saveStartupData@resource://gre/modules/Extension.jsm:2035:17
+05-26 11:45:59.705 2958 2993 E GeckoConsole: _writePersistentListeners@resource://gre/modules/ExtensionCommon.jsm:2271:15
+05-26 11:45:59.705 2958 2993 E GeckoConsole: savePersistentListener@resource://gre/modules/ExtensionCommon.jsm:2362:18
+05-26 11:45:59.705 2958 2993 E GeckoConsole: addListener@resource://gre/modules/ExtensionCommon.jsm:2495:20
+05-26 11:45:59.705 2958 2993 E GeckoConsole: addListener@resource://gre/modules/ExtensionCommon.jsm:2550:38
+05-26 11:45:59.705 2958 2993 E GeckoConsole: recvAddListener@resource://gre/modules/ExtensionParent.jsm:1079:13
+05-26 11:45:59.705 2958 2993 E GeckoConsole: observe@resource://gre/modules/AsyncShutdown.jsm:554:16
+05-26 11:45:59.748 2958 2993 I Gecko : 1590507959748 Marionette TRACE Received observer notification xpcom-will-shutdown
+05-26 11:45:59.759 2958 2958 I DefaultSupportedAddonsChecker: Register check for new supported add-ons
+05-26 11:45:59.875 2958 2979 I WM-WorkerWrapper: Work [ id=c00de910-17a6-4ace-93f7-da1dd8387abe, tags={ mozilla.components.feature.addons.migration.DefaultSupportedAddonsChecker.periodicWork, mozilla.components.feature.addons.migration.SupportedAddonsWorker } ] was cancelled
+05-26 11:45:59.875 2958 2979 I WM-WorkerWrapper: java.util.concurrent.CancellationException: Task was cancelled.
+05-26 11:45:59.875 2958 2979 I WM-WorkerWrapper: at androidx.work.impl.utils.futures.AbstractFuture.cancellationExceptionWithCause(AbstractFuture.java:1184)
+05-26 11:45:59.875 2958 2979 I WM-WorkerWrapper: at androidx.work.impl.utils.futures.AbstractFuture.getDoneValue(AbstractFuture.java:514)
+05-26 11:45:59.875 2958 2979 I WM-WorkerWrapper: at androidx.work.impl.utils.futures.AbstractFuture.get(AbstractFuture.java:475)
+05-26 11:45:59.875 2958 2979 I WM-WorkerWrapper: at androidx.work.impl.WorkerWrapper$2.run(WorkerWrapper.java:284)
+05-26 11:45:59.875 2958 2979 I WM-WorkerWrapper: at androidx.work.impl.utils.SerialExecutor$Task.run(SerialExecutor.java:91)
+05-26 11:45:59.875 2958 2979 I WM-WorkerWrapper: at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1167)
+05-26 11:45:59.875 2958 2979 I WM-WorkerWrapper: at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:641)
+05-26 11:45:59.875 2958 2979 I WM-WorkerWrapper: at java.lang.Thread.run(Thread.java:764)
+05-26 11:45:59.899 2958 2987 I SupportedAddonsWorker: Trying to check for new supported add-ons
+05-26 11:45:59.906 2958 2984 E SupportedAddonsWorker: An exception happened trying to check for new supported add-ons, re-schedule Job was cancelled
+05-26 11:45:59.906 2958 2984 E SupportedAddonsWorker: kotlinx.coroutines.JobCancellationException: Job was cancelled; job=JobImpl{Cancelling}@1e7bb6
+05-26 11:45:59.959 1876 16736 I ActivityManager: Process org.mozilla.fenix.debug (pid 2958) has died: vis TRNB
+05-26 11:45:59.961 1876 1894 W libprocessgroup: kill(-2958, 9) failed: No such process
+05-26 11:45:59.961 1876 1894 I libprocessgroup: Successfully killed process cgroup uid 10091 pid 2958 in 0ms
+05-26 11:45:59.961 1734 1734 I Zygote : Process 2958 exited cleanly (0)
+05-26 11:45:59.961 1876 16736 W ActivityManager: Scheduling restart of crashed service org.mozilla.fenix.debug/androidx.work.impl.background.systemjob.SystemJobService in 1000ms
+05-26 11:45:59.962 3020 3020 I ServiceChildProcess: Service has been unbound. Stopping.
+05-26 11:45:59.973 3020 3020 I Process : Sending signal. PID: 3020 SIG: 9
+05-26 11:46:00.005 1876 4916 I ActivityManager: Process org.mozilla.fenix.debug:tab0 (pid 3020) has died: fore SVC
+05-26 11:46:00.006 1734 1734 I Zygote : Process 3020 exited due to signal (9)
+05-26 11:46:00.006 1876 1894 W libprocessgroup: kill(-3020, 9) failed: No such process
+05-26 11:46:00.006 1876 1894 I libprocessgroup: Successfully killed process cgroup uid 10091 pid 3020 in 0ms
+05-26 11:46:00.549 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@1041230)
+05-26 11:46:00.551 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@1041230)
+05-26 11:46:00.556 2482 7043 W ctxmgr : [AclManager]No 2 for (accnt=account#-517948760#, com.google.android.gms(10008):IndoorOutdoorProducer, vrsn=13280000, 0, 3pPkg = null , 3pMdlId = null , pid = 2482). Was: 3 for 57, account#-517948760#
+05-26 11:46:00.937 1876 2445 I ActivityManager: Force stopping org.mozilla.fennec_aurora appid=10092 user=0: clear data
+05-26 11:46:00.937 1876 2445 I ActivityManager: Killing 2819:org.mozilla.fennec_aurora:tab0/u0a92 (adj 100): stop org.mozilla.fennec_aurora
+05-26 11:46:00.937 1876 2445 W ActivityManager: Scheduling restart of crashed service org.mozilla.fennec_aurora/org.mozilla.gecko.process.GeckoChildProcessServices$tab0 in 1000ms
+05-26 11:46:00.937 1876 1894 W libprocessgroup: kill(-2819, 9) failed: No such process
+05-26 11:46:00.940 1876 2445 I ActivityManager: Killing 2726:org.mozilla.fennec_aurora/u0a92 (adj 0): stop org.mozilla.fennec_aurora
+05-26 11:46:00.942 1876 2445 W ActivityManager: Force removing ActivityRecord{6297cb2 u0 org.mozilla.fennec_aurora/org.mozilla.fenix.HomeActivity t283}: app died, no saved state
+05-26 11:46:00.947 1904 1907 D hwcomposer: hw_composer sent 66 syncs in 61s
+05-26 11:46:00.966 1904 1911 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:46:00.967 1904 1911 D : HostConnection::get() New Host Connection established 0xe6d43140, tid 1911
+05-26 11:46:00.967 1904 1911 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:46:00.967 1904 1911 D : HostConnection::get() New Host Connection established 0xe6d43140, tid 1911
+05-26 11:46:00.968 1904 1911 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:46:00.968 1904 1911 D : HostConnection::get() New Host Connection established 0xe6d43140, tid 1911
+05-26 11:46:00.968 1904 1911 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:46:00.968 1904 1911 D : HostConnection::get() New Host Connection established 0xe6d43140, tid 1911
+05-26 11:46:00.969 1904 1911 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:46:00.969 1904 1911 D : HostConnection::get() New Host Connection established 0xe6d43140, tid 1911
+05-26 11:46:00.969 1904 1911 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:46:00.974 1876 1894 W libprocessgroup: kill(-2819, 9) failed: No such process
+05-26 11:46:00.975 1734 1734 I Zygote : Process 2819 exited due to signal (9)
+05-26 11:46:00.976 1876 1894 W libprocessgroup: kill(2819, 9) failed: No such process
+05-26 11:46:00.989 1904 1911 D : HostConnection::get() New Host Connection established 0xe6d43140, tid 1911
+05-26 11:46:00.994 1623 1623 D gralloc_ranchu: gralloc_alloc: Creating ashmem region of size 9334784
+05-26 11:46:01.003 1876 1891 I ActivityManager: Force stopping org.mozilla.fennec_aurora appid=10092 user=-1: clearApplicationUserData
+05-26 11:46:01.012 1623 5774 D gralloc_ranchu: gralloc_alloc: Creating ashmem region of size 9334784
+05-26 11:46:01.014 1876 1894 W libprocessgroup: kill(-2819, 9) failed: No such process
+05-26 11:46:01.014 1876 1894 I libprocessgroup: Successfully killed process cgroup uid 10092 pid 2819 in 77ms
+05-26 11:46:01.015 1876 1894 W libprocessgroup: kill(-2726, 9) failed: No such process
+05-26 11:46:01.019 1876 2445 D ZenLog : config: removeAutomaticZenRules,ZenModeConfig[user=0,allowAlarms=true,allowMedia=true,allowSystem=false,allowReminders=false,allowEvents=false,allowCalls=true,allowRepeatCallers=true,allowMessages=false,allowCallsFrom=stars,allowMessagesFrom=contacts,suppressedVisualEffects=511,areChannelsBypassingDnd=false,automaticRules={EVENTS_DEFAULT_RULE=ZenRule[enabled=false,snoozing=false,name=Event,zenMode=ZEN_MODE_IMPORTANT_INTERRUPTIONS,conditionId=condition://android/event?userId=-10000&calendar=&reply=1,condition=Condition[id=condition://android/event?userId=-10000&calendar=&reply=1,summary=...,line1=...,line2=...,icon=0,state=STATE_FALSE,flags=2],component=ComponentInfo{android/com.android.server.notification.EventConditionProvider},id=EVENTS_DEFAULT_RULE,creationTime=1587308662810,enabler=null], EVERY_NIGHT_DEFAULT_RULE=ZenRule[enabled=false,snoozing=false,name=Sleeping,zenMode=ZEN_MODE_IMPORTANT_INTERRUPTIONS,conditionId=condition://android/schedule?days=1.2.3.4.5.6.7&start=22.0&end=7.0&exitAtAlarm=true,condition=Condition[id=condition://android/schedule?days=1.2.3.4.5.6.7&start=22.0&end=7.0&exitAtAlarm=true,summary=...,line1=...,line2=...,icon=0,state=STATE_FALSE,flags=2],component=ComponentInfo{android/com.android.server.notification.ScheduleConditionProvider},id=EVERY_NIGHT_DEFAULT_RULE,creationTime=1587308662810,enabler=null]},manualRule=null],Diff[]
+05-26 11:46:01.019 1876 2445 I ConditionProviders: Disallowing condition provider org.mozilla.fennec_aurora
+05-26 11:46:01.020 1876 1876 D ZenLog : set_zen_mode: off,removeAutomaticZenRules
+05-26 11:46:01.025 1739 1739 I keystore: clear_uid 10092
+05-26 11:46:01.033 1623 5774 D gralloc_ranchu: gralloc_alloc: Creating ashmem region of size 9334784
+05-26 11:46:01.050 1876 1961 W InputDispatcher: channel '3c6531a org.mozilla.fennec_aurora/org.mozilla.fenix.HomeActivity (server)' ~ Consumer closed input channel or an error occurred. events=0x9
+05-26 11:46:01.050 1876 1961 E InputDispatcher: channel '3c6531a org.mozilla.fennec_aurora/org.mozilla.fenix.HomeActivity (server)' ~ Channel is unrecoverably broken and will be disposed!
+05-26 11:46:01.060 1876 1894 W libprocessgroup: kill(-2726, 9) failed: No such process
+05-26 11:46:01.063 1733 1733 D SurfaceFlinger: duplicate layer name: changing com.google.android.apps.nexuslauncher/com.google.android.apps.nexuslauncher.NexusLauncherActivity to com.google.android.apps.nexuslauncher/com.google.android.apps.nexuslauncher.NexusLauncherActivity#1
+05-26 11:46:01.066 1623 5774 D gralloc_ranchu: gralloc_alloc: Creating ashmem region of size 9334784
+05-26 11:46:01.071 1734 1734 I Zygote : Process 2726 exited due to signal (9)
+05-26 11:46:01.072 1733 2119 D : HostConnection::get() New Host Connection established 0xede98680, tid 2119
+05-26 11:46:01.072 1733 2119 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:46:01.072 1733 2119 D : HostConnection::get() New Host Connection established 0xede98680, tid 2119
+05-26 11:46:01.077 1876 5182 I WindowManager: WIN DEATH: Window{3c6531a u0 org.mozilla.fennec_aurora/org.mozilla.fenix.HomeActivity}
+05-26 11:46:01.077 1876 5182 W InputDispatcher: Attempted to unregister already unregistered input channel '3c6531a org.mozilla.fennec_aurora/org.mozilla.fenix.HomeActivity (server)'
+05-26 11:46:01.081 1876 8003 I ActivityManager: Force stopping org.mozilla.fennec_aurora appid=10092 user=0: from pid 3095
+05-26 11:46:01.084 1733 1759 W SurfaceFlinger: Attempting to destroy on removed layer: AppWindowToken{c874f80 token=Token{7548403 ActivityRecord{6297cb2 u0 org.mozilla.fennec_aurora/org.mozilla.fenix.HomeActivity t283}}}#0
+05-26 11:46:01.084 1733 1759 W SurfaceFlinger: Attempting to destroy on removed layer: Task=283#0
+05-26 11:46:01.087 1623 1623 D gralloc_ranchu: gralloc_alloc: Creating ashmem region of size 9334784
+05-26 11:46:01.089 1733 2119 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:46:01.091 1733 2119 D : HostConnection::get() New Host Connection established 0xecc60840, tid 2119
+05-26 11:46:01.091 1733 2119 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:46:01.092 1733 2119 D : HostConnection::get() New Host Connection established 0xecc60840, tid 2119
+05-26 11:46:01.092 1733 2119 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:46:01.096 1623 5774 D gralloc_ranchu: gralloc_alloc: Creating ashmem region of size 9334784
+05-26 11:46:01.096 2534 2825 D EGL_emulation: eglMakeCurrent: 0xe8b06aa0: ver 3 0 (tinfo 0xe8b03b50)
+05-26 11:46:01.100 1876 1890 E memtrack: Couldn't load memtrack module
+05-26 11:46:01.100 1876 1890 W android.os.Debug: failed to get memory consumption info: -1
+05-26 11:46:01.101 1876 1899 W ActivityManager: setHasOverlayUi called on unknown pid: 2726
+05-26 11:46:01.104 1876 1894 W libprocessgroup: kill(-2726, 9) failed: No such process
+05-26 11:46:01.104 1876 1894 I libprocessgroup: Successfully killed process cgroup uid 10092 pid 2726 in 89ms
+05-26 11:46:01.113 2131 2131 D CarrierSvcBindHelper: No carrier app for: 0
+05-26 11:46:01.116 2000 2000 I GoogleInputMethod: onFinishInput() : Dummy InputConnection bound
+05-26 11:46:01.121 2000 2000 I GoogleInputMethod: onStartInput() : Dummy InputConnection bound
+05-26 11:46:01.135 2534 2534 W SessionLifecycleManager: Handover failed. Creating new session controller.
+05-26 11:46:01.141 2499 2681 D EGL_emulation: eglMakeCurrent: 0xd35359e0: ver 3 0 (tinfo 0xd353b4f0)
+05-26 11:46:01.161 2482 2482 I GeofencerStateMachine: removeGeofences: removeRequest=RemoveGeofencingRequest[REMOVE_ALL packageName=org.mozilla.fennec_aurora]
+05-26 11:46:01.163 1876 1890 E system_server: No package ID 7f found for ID 0x7f08019f.
+05-26 11:46:01.163 1876 1890 E system_server: No package ID 7f found for ID 0x7f130122.
+05-26 11:46:01.163 1876 1890 E system_server: No package ID 7f found for ID 0x7f130122.
+05-26 11:46:01.163 1876 1890 E system_server: No package ID 7f found for ID 0x7f08019d.
+05-26 11:46:01.166 2131 2131 D CarrierSvcBindHelper: No carrier app for: 0
+05-26 11:46:01.166 1876 1890 E system_server: No package ID 7f found for ID 0x7f130121.
+05-26 11:46:01.167 1876 1890 E system_server: No package ID 7f found for ID 0x7f130121.
+05-26 11:46:01.225 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:46:01.231 1876 1876 I GnssLocationProvider: WakeLock acquired by sendMessage(SET_REQUEST, 0, com.android.server.location.GnssLocationProvider$GpsRequest@85f09b6)
+05-26 11:46:01.232 1560 1573 D vold : Remounting 10092 as mode read
+05-26 11:46:01.247 2534 2038 W LocationOracle: No location history returned by ContextManager
+05-26 11:46:01.248 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:46:01.248 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(SET_REQUEST, 0, com.android.server.location.GnssLocationProvider$GpsRequest@85f09b6)
+05-26 11:46:01.261 1876 1876 V SettingsProvider: Notifying for 0: content://settings/global/debug_app
+05-26 11:46:01.264 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:46:01.267 7086 2682 I LocationSettingsChecker: Removing dialog suppression flag for package org.mozilla.fennec_aurora
+05-26 11:46:01.277 1876 1890 E memtrack: Couldn't load memtrack module
+05-26 11:46:01.277 1876 1890 W android.os.Debug: failed to get memory consumption info: -1
+05-26 11:46:01.278 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:46:01.291 2012 2265 I chatty : uid=10024(com.android.systemui) RenderThread identical 1 line
+05-26 11:46:01.308 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:46:01.320 7086 11609 I Icing : doRemovePackageData org.mozilla.fennec_aurora
+05-26 11:46:01.325 1560 1573 D vold : Remounting 10092 as mode write
+05-26 11:46:01.326 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:46:01.328 1876 1876 I GnssLocationProvider: WakeLock acquired by sendMessage(SET_REQUEST, 0, com.android.server.location.GnssLocationProvider$GpsRequest@37bd5af)
+05-26 11:46:01.341 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(SET_REQUEST, 0, com.android.server.location.GnssLocationProvider$GpsRequest@37bd5af)
+05-26 11:46:01.341 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:46:01.377 2012 2265 I chatty : uid=10024(com.android.systemui) RenderThread identical 2 lines
+05-26 11:46:01.399 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:46:01.413 2482 6321 W ctxmgr : [AclManager]No 3 for (accnt=account#-517948760#, com.google.android.gms(10008):UserVelocityProducer, vrsn=13280022, 0, 3pPkg = null , 3pMdlId = null , pid = 2482). Was: 3 for 1, account#-517948760#
+05-26 11:46:01.418 2534 2534 I MicroDetectionWorker: #startMicroDetector [speakerMode: 0]
+05-26 11:46:01.419 2534 2534 I AudioController: Using mInputStreamFactoryBuilder
+05-26 11:46:01.420 2534 2534 I AudioController: Created new AudioSource
+05-26 11:46:01.421 2534 2534 I MicroDetectionWorker: onReady
+05-26 11:46:01.425 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:46:01.489 2534 2034 I MicroRecognitionRunner: Starting detection.
+05-26 11:46:01.490 2116 3100 I ProvidersCache: Provider returned no roots. Possibly naughty: com.google.android.apps.docs.storage
+05-26 11:46:01.501 1876 8003 I ActivityManager: Force stopping org.mozilla.fennec_aurora appid=10092 user=-1: set debug app
+05-26 11:46:01.502 1876 1876 V SettingsProvider: Notifying for 0: content://settings/global/debug_app
+05-26 11:46:01.506 2534 2004 I MicrophoneInputStream: mic_starting SR : 16000 CC : 16 SO : 6
+05-26 11:46:01.509 1631 1682 E : Request requires android.permission.RECORD_AUDIO
+05-26 11:46:01.509 1631 1682 E AudioPolicyIntefaceImpl: getInputForAttr permission denied: recording not allowed for uid 10039 pid 2534
+05-26 11:46:01.510 1631 1682 E AudioFlinger: createRecord() checkRecordThread_l failed
+05-26 11:46:01.510 2534 2004 E IAudioFlinger: createRecord returned error -22
+05-26 11:46:01.510 2534 2004 E AudioRecord: AudioFlinger could not create record track, status: -22
+05-26 11:46:01.510 2534 2004 E AudioRecord-JNI: Error creating AudioRecord instance: initialization check failed with status -22.
+05-26 11:46:01.512 2534 2004 E android.media.AudioRecord: Error code -20 when initializing native AudioRecord object.
+05-26 11:46:01.512 2534 2004 I MicrophoneInputStream: mic_started SR : 16000 CC : 16 SO : 6
+05-26 11:46:01.512 2534 2004 E ActivityThread: Failed to find provider info for com.google.android.apps.gsa.testing.ui.audio.recorded
+05-26 11:46:01.514 2534 2534 I MicroDetectionWorker: onReady
+05-26 11:46:01.516 2534 2004 I MicrophoneInputStream: mic_close SR : 16000 CC : 16 SO : 6
+05-26 11:46:01.517 2534 2034 I MicroRecognitionRunner: Detection finished
+05-26 11:46:01.517 2534 2034 W ErrorReporter: reportError [type: 211, code: 524300]: Error reading from input stream
+05-26 11:46:01.520 2534 2954 I MicroRecognitionRunner: Stopping hotword detection.
+05-26 11:46:01.527 2534 2034 W ErrorProcessor: onFatalError, processing error from engine(4)
+05-26 11:46:01.527 2534 2034 W ErrorProcessor: com.google.android.apps.gsa.shared.speech.b.g: Error reading from input stream
+05-26 11:46:01.527 2534 2034 W ErrorProcessor: at com.google.android.apps.gsa.staticplugins.microdetection.d.k.a(SourceFile:91)
+05-26 11:46:01.527 2534 2034 W ErrorProcessor: at com.google.android.apps.gsa.staticplugins.microdetection.d.l.run(Unknown Source:14)
+05-26 11:46:01.527 2534 2034 W ErrorProcessor: at com.google.android.libraries.gsa.runner.a.a.b(SourceFile:32)
+05-26 11:46:01.527 2534 2034 W ErrorProcessor: at com.google.android.libraries.gsa.runner.a.c.call(Unknown Source:4)
+05-26 11:46:01.527 2534 2034 W ErrorProcessor: at java.util.concurrent.FutureTask.run(FutureTask.java:266)
+05-26 11:46:01.527 2534 2034 W ErrorProcessor: at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:458)
+05-26 11:46:01.527 2534 2034 W ErrorProcessor: at java.util.concurrent.FutureTask.run(FutureTask.java:266)
+05-26 11:46:01.527 2534 2034 W ErrorProcessor: at com.google.android.apps.gsa.shared.util.concurrent.b.g.run(Unknown Source:4)
+05-26 11:46:01.527 2534 2034 W ErrorProcessor: at com.google.android.apps.gsa.shared.util.concurrent.b.aw.run(SourceFile:4)
+05-26 11:46:01.527 2534 2034 W ErrorProcessor: at com.google.android.apps.gsa.shared.util.concurrent.b.aw.run(SourceFile:4)
+05-26 11:46:01.527 2534 2034 W ErrorProcessor: at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1167)
+05-26 11:46:01.527 2534 2034 W ErrorProcessor: at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:641)
+05-26 11:46:01.527 2534 2034 W ErrorProcessor: at java.lang.Thread.run(Thread.java:764)
+05-26 11:46:01.527 2534 2034 W ErrorProcessor: at com.google.android.apps.gsa.shared.util.concurrent.b.i.run(SourceFile:6)
+05-26 11:46:01.527 2534 2034 W ErrorProcessor: Caused by: com.google.android.apps.gsa.shared.exception.GsaIOException: Error code: 393238 | Buffer overflow, no available space.
+05-26 11:46:01.527 2534 2034 W ErrorProcessor: at com.google.android.apps.gsa.speech.audio.Tee.j(SourceFile:103)
+05-26 11:46:01.527 2534 2034 W ErrorProcessor: at com.google.android.apps.gsa.speech.audio.au.read(SourceFile:2)
+05-26 11:46:01.527 2534 2034 W ErrorProcessor: at java.io.InputStream.read(InputStream.java:101)
+05-26 11:46:01.527 2534 2034 W ErrorProcessor: at com.google.android.apps.gsa.speech.audio.ao.run(SourceFile:17)
+05-26 11:46:01.527 2534 2034 W ErrorProcessor: at com.google.android.apps.gsa.speech.audio.an.run(SourceFile:2)
+05-26 11:46:01.527 2534 2034 W ErrorProcessor: at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:458)
+05-26 11:46:01.527 2534 2034 W ErrorProcessor: ... 10 more
+05-26 11:46:01.527 2534 2034 I AudioController: internalShutdown
+05-26 11:46:01.528 2534 2534 I MicroDetector: Keeping mic open: false
+05-26 11:46:01.528 2534 2534 I MicroDetectionWorker: #onError(false)
+05-26 11:46:01.528 2534 2038 I DeviceStateChecker: DeviceStateChecker cancelled
+05-26 11:46:01.539 1876 3809 I ActivityManager: START u0 {flg=0x10000000 cmp=org.mozilla.fennec_aurora/org.mozilla.fenix.HomeActivity (has extras)} from uid 0
+05-26 11:46:01.548 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@89b4c66)
+05-26 11:46:01.572 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@89b4c66)
+05-26 11:46:01.580 3143 3143 W a.fennec_auror: Unexpected CPU variant for X86 using defaults: x86
+05-26 11:46:01.586 1876 1893 I ActivityManager: Start proc 3143:org.mozilla.fennec_aurora/u0a92 for activity org.mozilla.fennec_aurora/org.mozilla.fenix.HomeActivity
+05-26 11:46:01.599 1623 5774 D gralloc_ranchu: gralloc_alloc: Creating ashmem region of size 9334784
+05-26 11:46:01.607 1876 1940 D : HostConnection::get() New Host Connection established 0xc8655580, tid 1940
+05-26 11:46:01.617 2482 3056 I Places : ?: PlacesBleScanner start() with priority 2
+05-26 11:46:01.631 3143 3143 W ActivityThread: Application org.mozilla.fennec_aurora can be debugged on port 8100...
+05-26 11:46:01.633 3143 3143 I a.fennec_auror: The ClassLoaderContext is a special shared library.
+05-26 11:46:01.634 2482 3056 I Places : ?: PlacesBleScanner start() with priority 2
+05-26 11:46:01.635 1623 5774 D gralloc_ranchu: gralloc_alloc: Creating ashmem region of size 9334784
+05-26 11:46:01.637 2482 3056 I PlaceInferenceEngine: [anon] Changed inference mode: 1
+05-26 11:46:01.671 2499 2681 D EGL_emulation: eglMakeCurrent: 0xd35359e0: ver 3 0 (tinfo 0xd353b4f0)
+05-26 11:46:01.674 1733 1967 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:46:01.675 1733 1967 D : HostConnection::get() New Host Connection established 0xed960bc0, tid 1967
+05-26 11:46:01.675 1733 1967 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:46:01.676 1733 1967 D : HostConnection::get() New Host Connection established 0xed960bc0, tid 1967
+05-26 11:46:01.676 1733 1967 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:46:01.679 1733 1967 D : HostConnection::get() New Host Connection established 0xed960bc0, tid 1967
+05-26 11:46:01.679 1733 1967 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:46:01.687 1904 1911 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:46:01.688 1904 1911 D : HostConnection::get() New Host Connection established 0xe6d43140, tid 1911
+05-26 11:46:01.688 1904 1911 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:46:01.689 1904 1911 D : HostConnection::get() New Host Connection established 0xe6d43140, tid 1911
+05-26 11:46:01.690 1904 1911 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:46:01.690 1904 1911 D : HostConnection::get() New Host Connection established 0xe6d43140, tid 1911
+05-26 11:46:01.690 1904 1911 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:46:01.692 2534 2825 D EGL_emulation: eglMakeCurrent: 0xe8b06aa0: ver 3 0 (tinfo 0xe8b03b50)
+05-26 11:46:01.694 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:46:01.696 1904 1911 D : HostConnection::get() New Host Connection established 0xe6d43140, tid 1911
+05-26 11:46:01.696 1733 2205 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:46:01.697 1733 2205 D : HostConnection::get() New Host Connection established 0xed960980, tid 2205
+05-26 11:46:01.697 1733 2205 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:46:01.697 1733 2205 D : HostConnection::get() New Host Connection established 0xed960980, tid 2205
+05-26 11:46:01.697 1733 2205 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:46:01.698 1733 2205 D : HostConnection::get() New Host Connection established 0xed960980, tid 2205
+05-26 11:46:01.698 1733 2205 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:46:01.707 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:46:01.714 2012 2265 I chatty : uid=10024(com.android.systemui) RenderThread identical 1 line
+05-26 11:46:01.726 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:46:01.731 2482 3056 I Places : ?: PlacesBleScanner start() with priority 2
+05-26 11:46:01.734 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:46:01.742 2012 2265 I chatty : uid=10024(com.android.systemui) RenderThread identical 1 line
+05-26 11:46:01.748 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:46:01.751 2482 3056 I Places : ?: PlacesBleScanner start() with priority 2
+05-26 11:46:01.753 2534 2034 I EventLogSendingHelper: Sending log events.
+05-26 11:46:01.755 2482 3056 I PlaceInferenceEngine: [anon] Changed inference mode: 1
+05-26 11:46:01.763 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:46:01.796 2012 2265 I chatty : uid=10024(com.android.systemui) RenderThread identical 4 lines
+05-26 11:46:01.804 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:46:01.805 3143 3163 W a.fennec_auror: Accessing hidden method Landroid/content/res/Resources$Theme;->rebase()V (dark greylist, linking)
+05-26 11:46:01.807 2482 3056 I Places : Converted 0 out of 1 WiFi scans
+05-26 11:46:01.809 3143 3143 D FirebaseApp: com.google.firebase.auth.FirebaseAuth is not linked. Skipping initialization.
+05-26 11:46:01.810 3143 3143 D FirebaseApp: com.google.firebase.crash.FirebaseCrash is not linked. Skipping initialization.
+05-26 11:46:01.810 3143 3143 I FirebaseInitProvider: FirebaseApp initialization successful
+05-26 11:46:01.816 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:46:01.825 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:46:01.834 2482 11975 I PlaceInferenceEngine: No beacon scan available - ignoring candidates.
+05-26 11:46:01.868 3143 3143 D FennecProfile: profiles.ini: false
+05-26 11:46:01.871 3143 3143 D FennecProfile: No profiles found
+05-26 11:46:01.877 3143 3143 D FennecMigrator: No migrations to run. Fennec install - false.
+05-26 11:46:01.888 3143 3143 D FenixApplication: Initializing Glean (uploadEnabled=true, isFennec=true)
+05-26 11:46:01.901 2482 6321 W ctxmgr : [AclManager]No 2 for (accnt=account#-517948760#, com.google.android.gms(10008):PlacesProducer, vrsn=13280000, 0, 3pPkg = null , 3pMdlId = null , pid = 2482). Was: 3 for 18, account#-517948760#
+05-26 11:46:01.916 3143 3175 D RustNativeSupport: findMegazordLibraryName(viaduct, 0.58.1
+05-26 11:46:01.916 3143 3175 D RustNativeSupport: lib in use: none
+05-26 11:46:01.916 3143 3175 D RustNativeSupport: lib configured: megazord
+05-26 11:46:01.916 3143 3175 D RustNativeSupport: lib version configured: 0.58.1
+05-26 11:46:01.916 3143 3175 D RustNativeSupport: settled on megazord
+05-26 11:46:01.967 3143 3163 I FA : Collection disabled with firebase_analytics_collection_enabled=0
+05-26 11:46:01.969 3143 3163 I FA : App measurement is starting up, version: 12780
+05-26 11:46:01.970 3143 3163 I FA : To enable debug logging run: adb shell setprop log.tag.FA VERBOSE
+05-26 11:46:01.973 3143 3163 I FA : To enable faster debug mode event logging run:
+05-26 11:46:01.973 3143 3163 I FA : adb shell setprop debug.firebase.analytics.app org.mozilla.fennec_aurora
+05-26 11:46:02.036 3143 3173 D libglean_ffi: glean_ffi: Android logging should be hooked up!
+05-26 11:46:02.037 3143 3173 I glean/Glean: Registering pings for mozilla.telemetry.glean.GleanMetrics.Pings
+05-26 11:46:02.038 3143 3173 I libglean_ffi: glean_core: Creating new Glean
+05-26 11:46:02.038 3143 3173 D libglean_ffi: glean_core::database: Database path: "/data/user/0/org.mozilla.fennec_aurora/glean_data/db"
+05-26 11:46:02.042 3143 3143 W ActivityThread: ClassLoader.loadClass: The class loader returned by Thread.getContextClassLoader() may fail for processes that host multiple applications. You should explicitly specify a context class loader. For example: Thread.setContextClassLoader(getClass().getClassLoader());
+05-26 11:46:02.043 3143 3143 I GeckoRuntime: Adding debug configuration from: /data/local/tmp/org.mozilla.fennec_aurora-geckoview-config.yaml
+05-26 11:46:02.043 3143 3143 D GeckoDebugConfig: Adding environment variables from debug config: {MOZ_CRASHREPORTER=1, MOZ_CRASHREPORTER_NO_REPORT=1, MOZ_CRASHREPORTER_SHUTDOWN=1}
+05-26 11:46:02.043 3143 3143 D GeckoDebugConfig: Adding arguments from debug config: [-marionette, -profile, /mnt/sdcard/org.mozilla.fennec_aurora-geckodriver-profile]
+05-26 11:46:02.043 3143 3143 D GeckoThread: State changed to LAUNCHED
+05-26 11:46:02.044 3143 3175 D RustNativeSupport: findMegazordLibraryName(rustlog, 0.58.1
+05-26 11:46:02.045 3143 3175 D RustNativeSupport: lib in use: none
+05-26 11:46:02.045 3143 3175 D RustNativeSupport: lib configured: megazord
+05-26 11:46:02.045 3143 3175 D RustNativeSupport: lib version configured: 0.58.1
+05-26 11:46:02.045 3143 3175 D RustNativeSupport: settled on megazord
+05-26 11:46:02.048 3143 3175 I rc_log_ffi::ios: rc_log adapter initialized!
+05-26 11:46:02.049 3143 3179 I GeckoThread: preparing to run Gecko
+05-26 11:46:02.050 3143 3173 I libglean_ffi: glean_core::database: Database initialized
+05-26 11:46:02.051 3143 3179 D GeckoThread: env var: MOZ_CRASHREPORTER=1
+05-26 11:46:02.051 3143 3179 D GeckoThread: env var: MOZ_CRASHREPORTER_NO_REPORT=1
+05-26 11:46:02.051 3143 3179 D GeckoThread: env var: MOZ_CRASHREPORTER_SHUTDOWN=1
+05-26 11:46:02.054 3143 3143 D GeckoRuntime: Lifecycle: onCreate
+05-26 11:46:02.090 3143 3173 I libglean_ffi: glean_ffi: Glean initialized
+05-26 11:46:02.129 3143 3179 D GeckoThread: State changed to MOZGLUE_READY
+05-26 11:46:02.132 3143 3143 D GleanMetricsService: Enabling Glean.
+05-26 11:46:02.138 3143 3143 I glean/Dispatchers: Task queued for execution and delayed until flushed
+05-26 11:46:02.140 3143 3143 I AdjustMetricsService: No adjust token defined
+05-26 11:46:02.141 3143 3143 D PushConfig: Creating push configuration for autopush.
+05-26 11:46:02.141 3143 3179 W Settings: Setting animator_duration_scale has moved from android.provider.Settings.System to android.provider.Settings.Global, returning read-only global URI.
+05-26 11:46:02.147 3143 3179 E GeckoLibLoad: Load sqlite start
+05-26 11:46:02.158 3143 3143 I App : AutoPushFeature is configured, initializing it...
+05-26 11:46:02.160 3143 3143 I AutoPushFeature: Checking validity of push subscriptions.
+05-26 11:46:02.169 3143 3155 I a.fennec_auror: Background concurrent copying GC freed 14684(2MB) AllocSpace objects, 4(208KB) LOS objects, 50% free, 2047KB/3MB, paused 4.855ms total 125.455ms
+05-26 11:46:02.170 3143 3143 D FennecProfile: profiles.ini: false
+05-26 11:46:02.170 3143 3143 D FennecProfile: No profiles found
+05-26 11:46:02.182 3143 3175 W [WARNING][Leanplum]: [com.leanplum.internal.ActionManager::getLocationManager::8]: Geofencing support requires leanplum-location module and Google Play Services v8.1 and higher.
+05-26 11:46:02.182 3143 3175 W [WARNING][Leanplum]: Add this to your build.gradle file:
+05-26 11:46:02.182 3143 3175 W [WARNING][Leanplum]: implementation 'com.google.android.gms:play-services-location:8.3.0+'
+05-26 11:46:02.182 3143 3175 W [WARNING][Leanplum]: implementation 'com.leanplum:leanplum-location:+'
+05-26 11:46:02.192 3143 3175 I [INFO][Leanplum]: [com.leanplum.monitoring.ExceptionHandler::setContext::6]: LeanplumExceptionHandler could not initialize Exception Reporting.This is expected if you have not included the leanplum-monitoring module
+05-26 11:46:02.205 3143 3179 E GeckoLibLoad: Load sqlite done
+05-26 11:46:02.205 3143 3179 E GeckoLibLoad: Load nss start
+05-26 11:46:02.205 3143 3179 E GeckoLibLoad: Load nss done
+05-26 11:46:02.227 3143 3143 D FennecMigrator: This is not a Fennec installation. No migration needed.
+05-26 11:46:02.247 3143 3185 I [INFO][Leanplum]: [com.leanplum.LeanplumFcmProvider::isManifestSetup::11]: Firebase Messaging is setup correctly.
+05-26 11:46:02.250 3143 3143 W a.fennec_auror: Accessing hidden method Landroid/content/res/Resources$Theme;->rebase()V (dark greylist, reflection)
+05-26 11:46:02.251 3143 3143 I ResourcesCompat: Failed to retrieve rebase() method
+05-26 11:46:02.251 3143 3143 I ResourcesCompat: java.lang.NoSuchMethodException: rebase []
+05-26 11:46:02.251 3143 3143 I ResourcesCompat: at java.lang.Class.getMethod(Class.java:2068)
+05-26 11:46:02.251 3143 3143 I ResourcesCompat: at java.lang.Class.getDeclaredMethod(Class.java:2047)
+05-26 11:46:02.251 3143 3143 I ResourcesCompat: at androidx.core.content.res.ResourcesCompat$ThemeCompat$ImplApi23.rebase(ResourcesCompat.java:3)
+05-26 11:46:02.251 3143 3143 I ResourcesCompat: at androidx.appcompat.app.AppCompatActivity.attachBaseContext(AppCompatActivity.java:80)
+05-26 11:46:02.251 3143 3143 I ResourcesCompat: at mozilla.components.support.locale.LocaleAwareAppCompatActivity.attachBaseContext(LocaleAwareAppCompatActivity.kt:2)
+05-26 11:46:02.251 3143 3143 I ResourcesCompat: at android.app.Activity.attach(Activity.java:7051)
+05-26 11:46:02.251 3143 3143 I ResourcesCompat: at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2873)
+05-26 11:46:02.251 3143 3143 I ResourcesCompat: at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:3048)
+05-26 11:46:02.251 3143 3143 I ResourcesCompat: at android.app.servertransaction.LaunchActivityItem.execute(LaunchActivityItem.java:78)
+05-26 11:46:02.251 3143 3143 I ResourcesCompat: at android.app.servertransaction.TransactionExecutor.executeCallbacks(TransactionExecutor.java:108)
+05-26 11:46:02.251 3143 3143 I ResourcesCompat: at android.app.servertransaction.TransactionExecutor.execute(TransactionExecutor.java:68)
+05-26 11:46:02.251 3143 3143 I ResourcesCompat: at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1808)
+05-26 11:46:02.251 3143 3143 I ResourcesCompat: at android.os.Handler.dispatchMessage(Handler.java:106)
+05-26 11:46:02.251 3143 3143 I ResourcesCompat: at android.os.Looper.loop(Looper.java:193)
+05-26 11:46:02.251 3143 3143 I ResourcesCompat: at android.app.ActivityThread.main(ActivityThread.java:6669)
+05-26 11:46:02.251 3143 3143 I ResourcesCompat: at java.lang.reflect.Method.invoke(Native Method)
+05-26 11:46:02.251 3143 3143 I ResourcesCompat: at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+05-26 11:46:02.251 3143 3143 I ResourcesCompat: at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+05-26 11:46:02.427 3143 3179 E GeckoLibLoad: Loaded libs in 222.502000ms total, 0ms(340ms) user, 140ms(250ms) system, 9(18) faults
+05-26 11:46:02.429 3143 3179 D GeckoThread: State changed to LIBS_READY
+05-26 11:46:02.431 3143 3179 W GeckoThread: zerdatime 190882628 - runGecko
+05-26 11:46:02.432 3143 3179 D GeckoProfile: Loading profile at: null name: default
+05-26 11:46:02.432 3143 3179 D GeckoProfile: Created new profile dir.
+05-26 11:46:02.433 3143 3179 I GeckoProfile: Enqueuing profile init.
+05-26 11:46:02.434 3143 3179 D GeckoProfile: Found profile dir: /data/user/0/org.mozilla.fennec_aurora/files/mozilla/rvksmdxh.default
+05-26 11:46:02.434 3143 3179 D GeckoProfile: Attempting to write new client ID properties
+05-26 11:46:02.435 3143 3179 D GeckoProfile: Creating profile dir: /data/user/0/org.mozilla.fennec_aurora/files/mozilla/rvksmdxh.default
+05-26 11:46:02.454 3143 3173 I glean/MetricsPingSched: The application just updated. Send metrics ping now.
+05-26 11:46:02.477 3143 3173 I glean/MetricsPingSched: Collecting the 'metrics' ping, now = Tue May 26 11:46:02 EDT 2020, startup = true, reason = upgrade
+05-26 11:46:02.477 3143 3173 I libglean_ffi: glean_core::ping: Collecting metrics
+05-26 11:46:02.477 3143 3173 I libglean_ffi: glean_core::ping: Storage for metrics empty. Bailing out.
+05-26 11:46:02.477 3143 3173 I libglean_ffi: glean_core: No content for ping 'metrics', therefore no ping queued.
+05-26 11:46:02.480 3143 3173 D glean/MetricsPingSched: Scheduling the 'metrics' ping in 58437572ms
+05-26 11:46:02.485 3143 3173 I libglean_ffi: glean_core: Upload enabled: true
+05-26 11:46:02.501 3143 3179 I Gecko:DumpUtils: Fifo watcher disabled via pref.
+05-26 11:46:02.551 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@1b47420)
+05-26 11:46:02.552 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@1b47420)
+05-26 11:46:02.571 3143 3143 I FirefoxAccountStateMachine: Enabling/updating sync with a new SyncConfig: SyncConfig(supportedEngines=[mozilla.components.service.fxa.SyncEngine$History@83c4938, mozilla.components.service.fxa.SyncEngine$Bookmarks@94a6d11, mozilla.components.service.fxa.SyncEngine$Passwords@e11ce76], syncPeriodInMinutes=240)
+05-26 11:46:02.579 3143 3143 I BgSyncManager: Periodic syncing enabled at a 240 interval
+05-26 11:46:02.579 3143 3143 I FirefoxAccountStateMachine: Sync is enabled
+05-26 11:46:02.589 3143 3211 I FirefoxAccountStateMachine: Processing event Event$Init for state Start. Next state is Start
+05-26 11:46:02.594 3143 3179 D GeckoSysInfo: System memory: 1494MB.
+05-26 11:46:02.605 3143 3179 D GeckoThread: State changed to JNI_READY
+05-26 11:46:02.610 3143 3143 E ActivityThread: Failed to find provider info for org.mozilla.fennec_aurora.fxa.auth
+05-26 11:46:02.625 3143 3213 D ServiceAllocator: org.mozilla.gecko.process.GeckoChildProcessServices$tab0 updateBindings: BACKGROUND priority, 0 importance, 2 successful binds, 0 failed binds, 0 successful unbinds, 0 failed unbinds
+05-26 11:46:02.630 3222 3222 W nec_aurora:tab: Unexpected CPU variant for X86 using defaults: x86
+05-26 11:46:02.633 1876 1893 I ActivityManager: Start proc 3222:org.mozilla.fennec_aurora:tab0/u0a92 for service org.mozilla.fennec_aurora/org.mozilla.gecko.process.GeckoChildProcessServices$tab0
+05-26 11:46:02.651 1739 1739 I keystore: del USRPKEY_org.mozilla.fennec_aurora 10092
+05-26 11:46:02.653 1739 1739 I keystore: del USRCERT_org.mozilla.fennec_aurora 10092
+05-26 11:46:02.653 3143 3143 D GeckoRuntime: Lifecycle: onStart
+05-26 11:46:02.655 1739 1739 I keystore: del CACERT_org.mozilla.fennec_aurora 10092
+05-26 11:46:02.657 3143 3143 D GeckoRuntime: Lifecycle: onResume
+05-26 11:46:02.658 3143 3143 D GeckoNetworkManager: Incoming event start for state OffNoListeners -> OnNoListeners
+05-26 11:46:02.660 3143 3143 D GeckoNetworkManager: New network state: UP, WIFI, WIFI
+05-26 11:46:02.661 3143 3143 D OpenGLRenderer: Skia GL Pipeline
+05-26 11:46:02.661 3143 3185 D NetworkSecurityConfig: No Network Security Config specified, using platform default
+05-26 11:46:02.666 3222 3222 I nec_aurora:tab: The ClassLoaderContext is a special shared library.
+05-26 11:46:02.675 3143 3211 I FirefoxAccountStateMachine: Ran 'Event$Init' side-effects for state Start, got successive event Event$AccountNotFound
+05-26 11:46:02.675 3143 3211 I FirefoxAccountStateMachine: Processing event Event$AccountNotFound for state Start. Next state is NotAuthenticated
+05-26 11:46:02.676 1733 2119 E SurfaceFlinger: ro.sf.lcd_density must be defined as a build property
+05-26 11:46:02.676 3143 3211 D RustNativeSupport: findMegazordLibraryName(fxaclient, 0.58.1
+05-26 11:46:02.676 3143 3211 D RustNativeSupport: lib in use: none
+05-26 11:46:02.676 3143 3211 D RustNativeSupport: lib configured: megazord
+05-26 11:46:02.676 3143 3211 D RustNativeSupport: lib version configured: 0.58.1
+05-26 11:46:02.676 3143 3211 D RustNativeSupport: settled on megazord
+05-26 11:46:02.679 3143 3211 D fxaclient_ffi: fxa_new
+05-26 11:46:02.742 3222 3222 D GeckoThread: State changed to LAUNCHED
+05-26 11:46:02.743 3222 3251 I GeckoThread: preparing to run Gecko
+05-26 11:46:02.755 3143 3173 I libglean_ffi: glean_core::ping: Collecting baseline
+05-26 11:46:02.764 3143 3173 D libglean_ffi: glean_core::ping: Storing ping '8ce65411-ef87-4223-8242-6b48e5d710d4' at '/data/user/0/org.mozilla.fennec_aurora/glean_data/pending_pings/8ce65411-ef87-4223-8242-6b48e5d710d4'
+05-26 11:46:02.765 3143 3173 I libglean_ffi: glean_core: The ping 'baseline' was submitted and will be sent as soon as possible
+05-26 11:46:02.782 3143 3143 D GeckoNetworkManager: Incoming event receivedUpdate for state OnNoListeners -> OnNoListeners
+05-26 11:46:02.785 3143 3143 D GeckoNetworkManager: New network state: UP, WIFI, WIFI
+05-26 11:46:02.787 3143 3211 W FirefoxAccountStateMachine: Got invalid event Event$Init for state NotAuthenticated.
+05-26 11:46:02.820 1876 8003 D ConnectivityService: requestNetwork for uid/pid:10092/3143 NetworkRequest [ TRACK_DEFAULT id=238, [ Capabilities: INTERNET&NOT_RESTRICTED&TRUSTED Unwanted: Uid: 10092] ]
+05-26 11:46:02.823 1876 1975 D WIFI : got request NetworkRequest [ TRACK_DEFAULT id=238, [ Capabilities: INTERNET&NOT_RESTRICTED&TRUSTED Unwanted: Uid: 10092] ] with score 60
+05-26 11:46:02.823 1876 1975 D WIFI_UT : got request NetworkRequest [ TRACK_DEFAULT id=238, [ Capabilities: INTERNET&NOT_RESTRICTED&TRUSTED Unwanted: Uid: 10092] ] with score 60
+05-26 11:46:02.824 2131 2131 D PhoneSwitcherNetworkRequstListener: got request NetworkRequest [ TRACK_DEFAULT id=238, [ Capabilities: INTERNET&NOT_RESTRICTED&TRUSTED Unwanted: Uid: 10092] ] with score 60
+05-26 11:46:02.853 3143 3155 I a.fennec_auror: Background concurrent copying GC freed 8545(1150KB) AllocSpace objects, 24(1120KB) LOS objects, 49% free, 3MB/7MB, paused 2.483ms total 123.447ms
+05-26 11:46:02.862 1623 5774 D gralloc_ranchu: gralloc_alloc: Creating ashmem region of size 9334784
+05-26 11:46:02.872 1733 1967 D : HostConnection::get() New Host Connection established 0xe9a2f580, tid 1967
+05-26 11:46:02.873 1623 5774 D gralloc_ranchu: gralloc_alloc: Creating ashmem region of size 9334784
+05-26 11:46:02.880 1623 5774 D gralloc_ranchu: gralloc_alloc: Creating ashmem region of size 9334784
+05-26 11:46:02.897 3143 3241 D : HostConnection::get() New Host Connection established 0xd1ca43c0, tid 3241
+05-26 11:46:02.911 3143 3241 I ConfigStore: android::hardware::configstore::V1_0::ISurfaceFlingerConfigs::hasWideColorDisplay retrieved: 0
+05-26 11:46:02.911 3143 3241 I ConfigStore: android::hardware::configstore::V1_0::ISurfaceFlingerConfigs::hasHDRDisplay retrieved: 0
+05-26 11:46:02.911 3143 3241 I OpenGLRenderer: Initialized EGL, version 1.4
+05-26 11:46:02.911 3143 3241 D OpenGLRenderer: Swap behavior 1
+05-26 11:46:02.911 3143 3241 W OpenGLRenderer: Failed to choose config with EGL_SWAP_BEHAVIOR_PRESERVED, retrying without...
+05-26 11:46:02.911 3143 3241 D OpenGLRenderer: Swap behavior 0
+05-26 11:46:02.912 3143 3241 D EGL_emulation: eglCreateContext: 0xd0089240: maj 3 min 0 rcv 3
+05-26 11:46:02.912 3143 3143 D MigrationPushRenewer: Migration state: NONE
+05-26 11:46:02.913 3143 3143 D MigrationTelemetryListener: Migration state: NONE
+05-26 11:46:02.914 3143 3241 D EGL_emulation: eglMakeCurrent: 0xd0089240: ver 3 0 (tinfo 0xd00f6050)
+05-26 11:46:02.919 1733 1967 E SurfaceFlinger: ro.sf.lcd_density must be defined as a build property
+05-26 11:46:02.999 3143 3241 D EGL_emulation: eglMakeCurrent: 0xd0089240: ver 3 0 (tinfo 0xd00f6050)
+05-26 11:46:03.029 3143 3260 D glean/PingUploadWorker: Processing persisted pings at /data/user/0/org.mozilla.fennec_aurora/glean_data/pending_pings
+05-26 11:46:03.030 3143 3260 D glean/PingUploadWorker: Processing ping: 8ce65411-ef87-4223-8242-6b48e5d710d4
+05-26 11:46:03.036 1876 1899 I ActivityManager: Displayed org.mozilla.fennec_aurora/org.mozilla.fenix.HomeActivity: +1s462ms
+05-26 11:46:03.043 2000 2000 I GoogleInputMethod: onFinishInput() : Dummy InputConnection bound
+05-26 11:46:03.045 3143 3260 D glean/ConceptFetchHttpUploader: Submitting ping to: https://incoming.telemetry.mozilla.org/submit/org-mozilla-fennec-aurora/baseline/1/8ce65411-ef87-4223-8242-6b48e5d710d4
+05-26 11:46:03.047 2000 2000 I GoogleInputMethod: onStartInput() : Dummy InputConnection bound
+05-26 11:46:03.051 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:46:03.052 3143 3143 E ActivityThread: Failed to find provider info for org.mozilla.fennec_aurora.fxa.auth
+05-26 11:46:03.158 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:46:03.218 1876 1940 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:46:03.219 1876 1940 D : HostConnection::get() New Host Connection established 0xc8655580, tid 1940
+05-26 11:46:03.219 1876 1940 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:46:03.220 1733 2205 D : HostConnection::get() New Host Connection established 0xede987c0, tid 2205
+05-26 11:46:03.220 1733 2205 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:46:03.220 1733 2205 D : HostConnection::get() New Host Connection established 0xede987c0, tid 2205
+05-26 11:46:03.222 1733 2205 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:46:03.225 1876 1940 D : HostConnection::get() New Host Connection established 0xc8655580, tid 1940
+05-26 11:46:03.225 1876 1940 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:46:03.226 1904 1911 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:46:03.227 1904 1911 D : HostConnection::get() New Host Connection established 0xe6d43140, tid 1911
+05-26 11:46:03.228 1904 1911 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:46:03.228 1876 1940 D : HostConnection::get() New Host Connection established 0xc8655580, tid 1940
+05-26 11:46:03.230 1876 1940 D gralloc_ranchu: gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+05-26 11:46:03.236 1904 1911 D : HostConnection::get() New Host Connection established 0xe6d43140, tid 1911
+05-26 11:46:03.262 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:46:03.372 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:46:03.387 3143 3143 D GeckoNetworkManager: Incoming event enableNotifications for state OnNoListeners -> OnWithListeners
+05-26 11:46:03.391 3143 3143 D GeckoNetworkManager: New network state: UP, WIFI, WIFI
+05-26 11:46:03.392 1876 3809 W ActivityManager: Receiver with filter android.content.IntentFilter@3574dd6 already registered for pid 3143, callerPackage is org.mozilla.fennec_aurora
+05-26 11:46:03.403 3143 3143 D GeckoNetworkManager: Incoming event receivedUpdate for state OnWithListeners -> OnWithListeners
+05-26 11:46:03.414 3143 3143 D GeckoNetworkManager: New network state: UP, WIFI, WIFI
+05-26 11:46:03.489 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:46:03.502 3143 3143 I [INFO][Leanplum]: [com.leanplum.LeanplumCloudMessagingProvider::onRegistrationIdReceived::3]: Device registered for push notifications with registration token, dYV3t03voH0:APA91bHDV2GKf7W7v6eMdfzj7Yie1YfP1FaV5KX7ltgDKK4ju7E0B8qjmeNhUlTdiSHvgmTQZ18T0vUsERF1cl60KsD32HbsZMWYVKehmKpWFPZiGFd0OUPM47XynN0qn4bnZYyqCS1X
+05-26 11:46:03.551 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@cf252f3)
+05-26 11:46:03.552 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@cf252f3)
+05-26 11:46:03.599 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:46:03.705 3143 3276 I [INFO][Leanplum]: [com.leanplum.LeanplumCloudMessagingProvider::onRegistrationIdReceived::3]: Device registered for push notifications with registration token, dYV3t03voH0:APA91bHDV2GKf7W7v6eMdfzj7Yie1YfP1FaV5KX7ltgDKK4ju7E0B8qjmeNhUlTdiSHvgmTQZ18T0vUsERF1cl60KsD32HbsZMWYVKehmKpWFPZiGFd0OUPM47XynN0qn4bnZYyqCS1X
+05-26 11:46:03.723 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:46:03.728 3143 3277 I AutoPushFeature: Received a new registration token from push service.
+05-26 11:46:03.753 1733 1733 W SurfaceFlinger: couldn't log to binary event log: overflow.
+05-26 11:46:03.756 3143 3277 D RustNativeSupport: findMegazordLibraryName(push, 0.58.1
+05-26 11:46:03.756 3143 3277 D RustNativeSupport: lib in use: none
+05-26 11:46:03.757 3143 3277 D RustNativeSupport: lib configured: megazord
+05-26 11:46:03.757 3143 3277 D RustNativeSupport: lib version configured: 0.58.1
+05-26 11:46:03.757 3143 3277 D RustNativeSupport: settled on megazord
+05-26 11:46:03.829 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:46:03.873 2534 2005 I PBSessionCacheImpl: Deleted sessionId[290782986460] from persistence.
+05-26 11:46:03.882 2534 2534 W SearchService: Abort, client detached.
+05-26 11:46:03.905 2534 2034 I WorkController: WorkProxy is not enqueued because WorkController is disposed: WorkProxy{Name=context::j, WorkerId=context, id=bcd7568}
+05-26 11:46:03.905 2534 2034 I WorkController: WorkProxy is not enqueued because WorkController is disposed: WorkProxy{Name=context::m, WorkerId=context, id=6b0dd81}
+05-26 11:46:03.906 2534 2034 I WorkController: WorkProxy is not enqueued because WorkController is disposed: WorkProxy{Name=context::n, WorkerId=context, id=f91ce26}
+05-26 11:46:03.908 2534 2034 I WorkController: WorkProxy is not enqueued because WorkController is disposed: WorkProxy{Name=context::p, WorkerId=context, id=5488a67}
+05-26 11:46:03.908 1832 1832 E netmgr : Failed to open QEMU pipe 'qemud:network': Invalid argument
+05-26 11:46:03.908 1832 1832 E netmgr : WifiForwarder unable to open QEMU pipe: Invalid argument
+05-26 11:46:03.928 2012 2265 D EGL_emulation: eglMakeCurrent: 0xe8b05420: ver 3 0 (tinfo 0xe8b03980)
+05-26 11:46:03.935 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:46:03.976 3143 3179 D GeckoThread: State changed to PROFILE_READY
+05-26 11:46:04.004 3143 3179 D GeckoThread: State changed to RUNNING
+05-26 11:46:04.010 3143 3179 I Gecko : -*- nsDNSServiceDiscovery.js : nsDNSServiceDiscovery
+05-26 11:46:04.028 3143 3179 I Gecko : 1590507964028 Marionette TRACE Marionette enabled
+05-26 11:46:04.040 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:46:04.142 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:46:04.193 3143 3143 I DefaultSupportedAddonsChecker: Register check for new supported add-ons
+05-26 11:46:04.206 3143 3179 I Gecko : 1590507964206 Marionette TRACE Received observer notification marionette-startup-requested
+05-26 11:46:04.207 3143 3179 I Gecko : 1590507964207 Marionette TRACE Waiting until startup recorder finished recording startup scripts...
+05-26 11:46:04.239 3143 3187 I SupportedAddonsWorker: Trying to check for new supported add-ons
+05-26 11:46:04.248 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:46:04.349 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:46:04.421 3143 3143 D App : Installed browser-icons extension
+05-26 11:46:04.451 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:46:04.459 3143 3179 D : HostConnection::get() New Host Connection established 0xd1ca4f80, tid 3179
+05-26 11:46:04.461 3143 3179 E EGL_emulation: tid 3179: eglBindAPI(1259): error 0x300c (EGL_BAD_PARAMETER)
+05-26 11:46:04.463 3143 3179 D EGL_emulation: eglCreateContext: 0xe5e99640: maj 3 min 0 rcv 3
+05-26 11:46:04.465 3143 3179 D EGL_emulation: eglMakeCurrent: 0xe5e99640: ver 3 0 (tinfo 0xe71136e0)
+05-26 11:46:04.552 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@2bcd886)
+05-26 11:46:04.554 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@2bcd886)
+05-26 11:46:04.557 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:46:04.662 3143 3260 D glean/ConceptFetchHttpUploader: Ping successfully sent (200)
+05-26 11:46:04.662 3143 3260 D glean/PingUploadWorker: 8ce65411-ef87-4223-8242-6b48e5d710d4 was deleted: true
+05-26 11:46:04.662 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:46:04.663 3143 3177 I WM-WorkerWrapper: Worker result SUCCESS for Work [ id=30b8d5b3-35f6-406c-87ae-45da79372bef, tags={ mozilla.telemetry.glean.scheduler.PingUploadWorker, mozac_service_glean_ping_upload_worker } ]
+05-26 11:46:04.669 1876 1978 D ConnectivityService: releasing NetworkRequest [ TRACK_DEFAULT id=238, [ Capabilities: INTERNET&NOT_RESTRICTED&TRUSTED Unwanted: Uid: 10092] ] (release request)
+05-26 11:46:04.712 1876 1884 I system_server: Background concurrent copying GC freed 70960(3MB) AllocSpace objects, 55(2MB) LOS objects, 19% free, 25MB/31MB, paused 2.462ms total 125.489ms
+05-26 11:46:04.768 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:46:04.819 3143 3143 D mozac-webcompat: Installed WebCompat webextension: webcompat@mozilla.com
+05-26 11:46:04.865 3143 3179 I Gecko : 1590507964865 Marionette TRACE All scripts recorded.
+05-26 11:46:04.865 3143 3179 I Gecko : 1590507964865 Marionette DEBUG Setting recommended pref apz.content_response_timeout to 60000
+05-26 11:46:04.866 3143 3179 I Gecko : 1590507964866 Marionette DEBUG Setting recommended pref browser.contentblocking.introCount to 99
+05-26 11:46:04.866 3143 3179 I Gecko : 1590507964866 Marionette DEBUG Setting recommended pref browser.download.panel.shown to true
+05-26 11:46:04.866 3143 3179 I Gecko : 1590507964866 Marionette DEBUG Setting recommended pref browser.newtabpage.enabled to false
+05-26 11:46:04.866 3143 3179 I Gecko : 1590507964866 Marionette DEBUG Setting recommended pref browser.safebrowsing.malware.enabled to false
+05-26 11:46:04.870 3143 3179 I Gecko : 1590507964870 Marionette DEBUG Setting recommended pref browser.safebrowsing.phishing.enabled to false
+05-26 11:46:04.872 3143 3179 I Gecko : 1590507964872 Marionette DEBUG Setting recommended pref browser.search.update to false
+05-26 11:46:04.872 3143 3179 I Gecko : 1590507964872 Marionette DEBUG Setting recommended pref browser.tabs.disableBackgroundZombification to false
+05-26 11:46:04.872 3143 3179 I Gecko : 1590507964872 Marionette DEBUG Setting recommended pref browser.tabs.remote.separatePrivilegedContentProcess to false
+05-26 11:46:04.873 3143 3179 I Gecko : 1590507964873 Marionette DEBUG Setting recommended pref browser.tabs.unloadOnLowMemory to false
+05-26 11:46:04.873 3143 3179 I Gecko : 1590507964873 Marionette DEBUG Setting recommended pref browser.tabs.warnOnCloseOtherTabs to false
+05-26 11:46:04.873 3143 3179 I Gecko : 1590507964873 Marionette DEBUG Setting recommended pref browser.tabs.warnOnOpen to false
+05-26 11:46:04.873 3143 3179 I Gecko : 1590507964873 Marionette DEBUG Setting recommended pref browser.usedOnWindows10.introURL to
+05-26 11:46:04.874 3143 3179 I Gecko : 1590507964873 Marionette DEBUG Setting recommended pref browser.urlbar.suggest.searches to false
+05-26 11:46:04.874 3143 3179 I Gecko : 1590507964874 Marionette DEBUG Setting recommended pref dom.disable_beforeunload to true
+05-26 11:46:04.874 3143 3179 I Gecko : 1590507964874 Marionette DEBUG Setting recommended pref dom.file.createInChild to true
+05-26 11:46:04.874 3143 3179 I Gecko : 1590507964874 Marionette DEBUG Setting recommended pref extensions.getAddons.cache.enabled to false
+05-26 11:46:04.874 4313 4313 E adbd : failed to connect to socket 'tcp:2829': Connection refused
+05-26 11:46:04.875 3143 3179 I Gecko : 1590507964875 Marionette DEBUG Setting recommended pref network.http.prompt-temp-redirect to false
+05-26 11:46:04.875 3143 3179 I Gecko : 1590507964875 Marionette DEBUG Setting recommended pref security.notification_enable_delay to 0
+05-26 11:46:04.875 3143 3179 I Gecko : 1590507964875 Marionette DEBUG Setting recommended pref signon.autofillForms to false
+05-26 11:46:04.876 3143 3179 I Gecko : 1590507964876 Marionette DEBUG Setting recommended pref signon.rememberSignons to false
+05-26 11:46:04.876 3143 3179 I Gecko : 1590507964876 Marionette DEBUG Setting recommended pref toolkit.cosmeticAnimations.enabled to false
+05-26 11:46:04.943 3143 3179 I Gecko : 1590507964943 Marionette INFO Listening on port 2829
+05-26 11:46:04.944 3143 3179 I Gecko : 1590507964944 Marionette DEBUG Marionette is listening
+05-26 11:46:04.988 3143 3179 I Gecko : 1590507964988 Marionette DEBUG Accepted connection 0 from 127.0.0.1:44851
+05-26 11:46:05.011 3143 3179 I Gecko : 1590507965011 Marionette DEBUG 0 -> [0,1,"WebDriver:NewSession",{"browserName":"firefox","pageLoadStrategy":"none"}]
+05-26 11:46:05.558 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@2053547)
+05-26 11:46:05.558 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@2053547)
+05-26 11:46:05.562 2482 11975 W ctxmgr : [AclManager]No 2 for (accnt=account#-517948760#, com.google.android.gms(10008):IndoorOutdoorProducer, vrsn=13280000, 0, 3pPkg = null , 3pMdlId = null , pid = 2482). Was: 3 for 57, account#-517948760#
+05-26 11:46:05.659 3143 3177 I WM-WorkerWrapper: Worker result SUCCESS for Work [ id=7de11057-1656-49b2-aec1-1df4fd79ebc2, tags={ mozilla.components.feature.addons.migration.DefaultSupportedAddonsChecker.periodicWork, mozilla.components.feature.addons.migration.SupportedAddonsWorker } ]
+05-26 11:46:06.561 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@ef9a69d)
+05-26 11:46:06.563 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@ef9a69d)
+05-26 11:46:07.562 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@b7df712)
+05-26 11:46:07.565 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@b7df712)
+05-26 11:46:07.793 1876 1890 E memtrack: Couldn't load memtrack module
+05-26 11:46:07.793 1876 1890 W android.os.Debug: failed to get memory consumption info: -1
+05-26 11:46:07.900 3143 3143 I glean/Glean: Registering pings for org.mozilla.fenix.GleanMetrics.Pings
+05-26 11:46:07.928 3143 3188 I FenixApplication: Running post-visual completeness tasks...
+05-26 11:46:07.928 3143 3188 I FenixApplication: Storage initialization...
+05-26 11:46:07.928 3143 3143 I FenixApplication: Kicking-off account manager...
+05-26 11:46:07.929 3143 3143 I FenixApplication: 'Kicking-off account manager' took 0 ms
+05-26 11:46:07.930 3143 3188 I PlacesHistoryStorage: Warming up places storage...
+05-26 11:46:07.931 3143 3188 D RustNativeSupport: findMegazordLibraryName(places, 0.58.1
+05-26 11:46:07.931 3143 3188 D RustNativeSupport: lib in use: none
+05-26 11:46:07.931 3143 3188 D RustNativeSupport: lib configured: megazord
+05-26 11:46:07.931 3143 3188 D RustNativeSupport: lib version configured: 0.58.1
+05-26 11:46:07.931 3143 3188 D RustNativeSupport: settled on megazord
+05-26 11:46:07.932 3143 3188 D places_ffi: places_api_new
+05-26 11:46:07.955 3143 3188 D places::db::schema: Creating schema
+05-26 11:46:07.967 3143 3187 I App : ActivationPing - generating ping with the hashed id
+05-26 11:46:07.971 3143 3187 I App : ActivationPing - generating ping (has `identifier`: true)
+05-26 11:46:07.983 3143 3188 D sql_support::conn_ext: Transaction commited after 27.736ms
+05-26 11:46:07.983 3143 3188 D places_ffi: places_connection_new
+05-26 11:46:07.986 3143 3188 D places_ffi: places_connection_new
+05-26 11:46:07.987 3143 3188 I PlacesHistoryStorage: 'Warming up places storage' took 57 ms
+05-26 11:46:07.987 3143 3188 I PlacesBookmarksStorage: Warming up places storage...
+05-26 11:46:07.988 3143 3188 D places_ffi: places_connection_new
+05-26 11:46:07.988 3143 3173 I libglean_ffi: glean_core::ping: Collecting activation
+05-26 11:46:07.989 3143 3188 I PlacesBookmarksStorage: 'Warming up places storage' took 1 ms
+05-26 11:46:08.003 3143 3173 D libglean_ffi: glean_core::ping: Storing ping 'dbb2e9d4-aeca-440b-be38-c6d3f7a29037' at '/data/user/0/org.mozilla.fennec_aurora/glean_data/pending_pings/dbb2e9d4-aeca-440b-be38-c6d3f7a29037'
+05-26 11:46:08.003 3143 3173 I libglean_ffi: glean_core: The ping 'activation' was submitted and will be sent as soon as possible
+05-26 11:46:08.003 1739 1739 I keystore: 1 0
+05-26 11:46:08.011 3143 3188 I SyncableLoginsStorage: Warming up storage...
+05-26 11:46:08.015 3143 3188 D RustNativeSupport: findMegazordLibraryName(logins, 0.58.1
+05-26 11:46:08.015 3143 3188 D RustNativeSupport: lib in use: none
+05-26 11:46:08.015 3143 3188 D RustNativeSupport: lib configured: megazord
+05-26 11:46:08.015 3143 3188 D RustNativeSupport: lib version configured: 0.58.1
+05-26 11:46:08.015 3143 3188 D RustNativeSupport: settled on megazord
+05-26 11:46:08.016 3143 3188 D logins_ffi: sync15_passwords_state_new
+05-26 11:46:08.027 3143 3188 D logins::schema: Creating schema
+05-26 11:46:08.028 1876 2445 D ConnectivityService: requestNetwork for uid/pid:10092/3143 NetworkRequest [ TRACK_DEFAULT id=239, [ Capabilities: INTERNET&NOT_RESTRICTED&TRUSTED Unwanted: Uid: 10092] ]
+05-26 11:46:08.028 1876 1975 D WIFI : got request NetworkRequest [ TRACK_DEFAULT id=239, [ Capabilities: INTERNET&NOT_RESTRICTED&TRUSTED Unwanted: Uid: 10092] ] with score 60
+05-26 11:46:08.029 1876 1975 D WIFI_UT : got request NetworkRequest [ TRACK_DEFAULT id=239, [ Capabilities: INTERNET&NOT_RESTRICTED&TRUSTED Unwanted: Uid: 10092] ] with score 60
+05-26 11:46:08.029 2131 2131 D PhoneSwitcherNetworkRequstListener: got request NetworkRequest [ TRACK_DEFAULT id=239, [ Capabilities: INTERNET&NOT_RESTRICTED&TRUSTED Unwanted: Uid: 10092] ] with score 60
+05-26 11:46:08.041 3143 3325 D glean/PingUploadWorker: Processing persisted pings at /data/user/0/org.mozilla.fennec_aurora/glean_data/pending_pings
+05-26 11:46:08.042 3143 3325 D glean/PingUploadWorker: Processing ping: dbb2e9d4-aeca-440b-be38-c6d3f7a29037
+05-26 11:46:08.044 3143 3325 D glean/ConceptFetchHttpUploader: Submitting ping to: https://incoming.telemetry.mozilla.org/submit/org-mozilla-fennec-aurora/activation/1/dbb2e9d4-aeca-440b-be38-c6d3f7a29037
+05-26 11:46:08.133 3143 3188 I SyncableLoginsStorage: 'Warming up storage' took 121 ms
+05-26 11:46:08.133 3143 3188 I FenixApplication: 'Storage initialization' took 205 ms
+05-26 11:46:08.567 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@6d5b4c5)
+05-26 11:46:08.569 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@6d5b4c5)
+05-26 11:46:08.589 3143 3325 D glean/ConceptFetchHttpUploader: Ping successfully sent (200)
+05-26 11:46:08.589 3143 3325 D glean/PingUploadWorker: dbb2e9d4-aeca-440b-be38-c6d3f7a29037 was deleted: true
+05-26 11:46:08.592 3143 3207 I WM-WorkerWrapper: Worker result SUCCESS for Work [ id=6620b187-5b00-46cd-a050-e01293de4e5f, tags={ mozilla.telemetry.glean.scheduler.PingUploadWorker, mozac_service_glean_ping_upload_worker } ]
+05-26 11:46:08.595 1876 1978 D ConnectivityService: releasing NetworkRequest [ TRACK_DEFAULT id=239, [ Capabilities: INTERNET&NOT_RESTRICTED&TRUSTED Unwanted: Uid: 10092] ] (release request)
+05-26 11:46:09.571 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@b702e1a)
+05-26 11:46:09.573 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@b702e1a)
+05-26 11:46:10.574 1876 2204 I GnssLocationProvider: WakeLock acquired by sendMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@eebbd4b)
+05-26 11:46:10.576 1876 1890 I GnssLocationProvider: WakeLock released by handleMessage(REPORT_SV_STATUS, 0, com.android.server.location.GnssLocationProvider$SvStatusInfo@eebbd4b)
+05-26 11:46:10.581 2482 11975 W ctxmgr : [AclManager]No 2 for (accnt=account#-517948760#, com.google.android.gms(10008):IndoorOutdoorProducer, vrsn=13280000, 0, 3pPkg = null , 3pMdlId = null , pid = 2482). Was: 3 for 57, account#-517948760#
+05-26 11:46:10.877 3143 3179 I Gecko : 1590507970877 Marionette DEBUG Closed connection 0
diff --git a/python/mozperftest/mozperftest/tests/data/hook.py b/python/mozperftest/mozperftest/tests/data/hook.py
new file mode 100644
index 0000000000..a49406e150
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/data/hook.py
@@ -0,0 +1,7 @@
+def doit(env):
+ return "OK"
+
+
+def on_exception(env, layer, exc):
+ # swallow the error and abort the run
+ return False
diff --git a/python/mozperftest/mozperftest/tests/data/hook_raises.py b/python/mozperftest/mozperftest/tests/data/hook_raises.py
new file mode 100644
index 0000000000..aaba445e81
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/data/hook_raises.py
@@ -0,0 +1,3 @@
+def on_exception(env, layer, exc):
+ # re-raise
+ raise exc
diff --git a/python/mozperftest/mozperftest/tests/data/hook_resume.py b/python/mozperftest/mozperftest/tests/data/hook_resume.py
new file mode 100644
index 0000000000..5460b2e770
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/data/hook_resume.py
@@ -0,0 +1,3 @@
+def on_exception(env, layer, exc):
+ # swallow the error and resume
+ return True
diff --git a/python/mozperftest/mozperftest/tests/data/hooks_iteration.py b/python/mozperftest/mozperftest/tests/data/hooks_iteration.py
new file mode 100644
index 0000000000..f7a30ad817
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/data/hooks_iteration.py
@@ -0,0 +1,2 @@
+def before_iterations(kwargs):
+ kwargs["test_iterations"] = 5
diff --git a/python/mozperftest/mozperftest/tests/data/hooks_state.py b/python/mozperftest/mozperftest/tests/data/hooks_state.py
new file mode 100644
index 0000000000..4847e3b650
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/data/hooks_state.py
@@ -0,0 +1,11 @@
+_GLOBAL = False
+
+
+def before_iterations(kw):
+ global _GLOBAL
+ _GLOBAL = True
+
+
+def before_runs(env, **kw):
+ if not _GLOBAL:
+ raise Exception("oops")
diff --git a/python/mozperftest/mozperftest/tests/data/logcat b/python/mozperftest/mozperftest/tests/data/logcat
new file mode 100644
index 0000000000..a7a5a0d56f
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/data/logcat
@@ -0,0 +1,5511 @@
+--------- beginning of main
+06-02 16:37:53.390 E/memtrack( 1869): Couldn't load memtrack module
+06-02 16:37:53.390 W/android.os.Debug( 1869): failed to get memory consumption info: -1
+06-02 16:37:54.156 W/ctxmgr ( 2473): [AclManager]No 2 for (accnt=account#-517948760#, com.google.android.gms(10008):IndoorOutdoorProducer, vrsn=13280000, 0, 3pPkg = null , 3pMdlId = null , pid = 2473). Was: 3 for 57, account#-517948760#
+06-02 16:37:54.725 I/log ( 8804): logcat cleared
+06-02 16:37:59.165 W/ctxmgr ( 2473): [AclManager]No 2 for (accnt=account#-517948760#, com.google.android.gms(10008):IndoorOutdoorProducer, vrsn=13280000, 0, 3pPkg = null , 3pMdlId = null , pid = 2473). Was: 3 for 57, account#-517948760#
+06-02 16:38:00.114 W/system_server( 1869): Long monitor contention with owner PackageInstaller (1929) at boolean com.android.server.pm.PackageInstallerSession$3.handleMessage(android.os.Message)(PackageInstallerSession.java:292) waiters=0 in android.content.pm.PackageInstaller$SessionInfo com.android.server.pm.PackageInstallerSession.generateInfo(boolean) for 1.379s
+06-02 16:38:00.117 D/hwcomposer( 1897): hw_composer sent 6 syncs in 60s
+--------- beginning of system
+06-02 16:38:00.336 I/ActivityManager( 1869): Force stopping org.mozilla.fenix.debug appid=10099 user=-1: installPackageLI
+06-02 16:38:00.336 I/ActivityManager( 1869): Killing 8565:org.mozilla.fenix.debug/u0a99 (adj 0): stop org.mozilla.fenix.debug
+06-02 16:38:00.339 W/libprocessgroup( 1869): kill(-8565, 9) failed: No such process
+06-02 16:38:00.341 W/ActivityManager( 1869): Force removing ActivityRecord{8c0a4c0 u0 org.mozilla.fenix.debug/.App t387}: app died, no saved state
+06-02 16:38:00.343 I/ServiceChildProcess( 8615): Service has been unbound. Stopping.
+06-02 16:38:00.378 D/gralloc_ranchu( 1897): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:00.379 D/ ( 1897): HostConnection::get() New Host Connection established 0xed7c3080, tid 1897
+06-02 16:38:00.380 D/gralloc_ranchu( 1897): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:00.380 D/ ( 1897): HostConnection::get() New Host Connection established 0xed7c3080, tid 1897
+06-02 16:38:00.380 D/gralloc_ranchu( 1897): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:00.381 D/ ( 1897): HostConnection::get() New Host Connection established 0xed7c3080, tid 1897
+06-02 16:38:00.381 D/gralloc_ranchu( 1897): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:00.381 W/libprocessgroup( 1869): kill(-8565, 9) failed: No such process
+06-02 16:38:00.382 D/ ( 1897): HostConnection::get() New Host Connection established 0xed7c3080, tid 1897
+06-02 16:38:00.382 D/gralloc_ranchu( 1897): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:00.382 D/ ( 1897): HostConnection::get() New Host Connection established 0xed7c3080, tid 1897
+06-02 16:38:00.382 D/gralloc_ranchu( 1897): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:00.385 D/ ( 1897): HostConnection::get() New Host Connection established 0xed7c3080, tid 1897
+06-02 16:38:00.388 I/PackageManager( 1869): Update package org.mozilla.fenix.debug code path from /data/app/org.mozilla.fenix.debug-0JI_FY2TAEvhwr-Ly_nbEQ== to /data/app/org.mozilla.fenix.debug-l0coAwl06p55SDu8S60N3w==; Retain data and using new
+06-02 16:38:00.388 I/PackageManager( 1869): Update package org.mozilla.fenix.debug resource path from /data/app/org.mozilla.fenix.debug-0JI_FY2TAEvhwr-Ly_nbEQ== to /data/app/org.mozilla.fenix.debug-l0coAwl06p55SDu8S60N3w==; Retain data and using new
+06-02 16:38:00.388 I/ActivityManager( 1869): Killing 8615:org.mozilla.fenix.debug:tab0/u0a99 (adj 0): stop org.mozilla.fenix.debug
+06-02 16:38:00.418 W/libprocessgroup( 1869): kill(-8565, 9) failed: No such process
+06-02 16:38:00.422 W/InputDispatcher( 1869): channel 'ce4529b org.mozilla.fenix.debug/org.mozilla.fenix.debug.App (server)' ~ Consumer closed input channel or an error occurred. events=0x9
+06-02 16:38:00.422 E/InputDispatcher( 1869): channel 'ce4529b org.mozilla.fenix.debug/org.mozilla.fenix.debug.App (server)' ~ Channel is unrecoverably broken and will be disposed!
+06-02 16:38:00.423 D/gralloc_ranchu( 1728): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:00.424 D/ ( 1728): HostConnection::get() New Host Connection established 0xe50f1280, tid 2157
+06-02 16:38:00.425 D/gralloc_ranchu( 1728): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:00.425 D/ ( 1728): HostConnection::get() New Host Connection established 0xe90c90c0, tid 2157
+06-02 16:38:00.425 I/Zygote ( 1729): Process 8565 exited due to signal (9)
+06-02 16:38:00.425 D/gralloc_ranchu( 1728): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:00.425 D/ ( 1728): HostConnection::get() New Host Connection established 0xe90c90c0, tid 2157
+06-02 16:38:00.426 D/gralloc_ranchu( 1728): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:00.432 I/WindowManager( 1869): WIN DEATH: Window{ce4529b u0 org.mozilla.fenix.debug/org.mozilla.fenix.debug.App}
+06-02 16:38:00.432 W/InputDispatcher( 1869): Attempted to unregister already unregistered input channel 'ce4529b org.mozilla.fenix.debug/org.mozilla.fenix.debug.App (server)'
+06-02 16:38:00.433 W/ActivityManager( 1869): setHasOverlayUi called on unknown pid: 8565
+06-02 16:38:00.435 W/SurfaceFlinger( 1728): Attempting to destroy on removed layer: Task=387#0
+06-02 16:38:00.435 W/SurfaceFlinger( 1728): Attempting to destroy on removed layer: AppWindowToken{f7b293e token=Token{8b8d5f9 ActivityRecord{8c0a4c0 u0 org.mozilla.fenix.debug/.App t387}}}#0
+06-02 16:38:00.450 D/installd( 1733): Detected label change from u:object_r:app_data_file:s0 to u:object_r:app_data_file:s0:c99,c256,c512,c768 at /data/data/org.mozilla.fenix.debug/code_cache; running recursive restorecon
+06-02 16:38:00.451 D/installd( 1733): Detected label change from u:object_r:app_data_file:s0 to u:object_r:app_data_file:s0:c99,c256,c512,c768 at /data/user_de/0/org.mozilla.fenix.debug/cache; running recursive restorecon
+06-02 16:38:00.452 D/PackageManager( 1869): Instant App installer not found with android.intent.action.INSTALL_INSTANT_APP_PACKAGE
+06-02 16:38:00.452 D/PackageManager( 1869): Clear ephemeral installer activity
+06-02 16:38:00.459 D/gralloc_ranchu( 1619): gralloc_alloc: Creating ashmem region of size 9334784
+06-02 16:38:00.460 W/libprocessgroup( 1869): kill(-8565, 9) failed: No such process
+06-02 16:38:00.460 I/libprocessgroup( 1869): Successfully killed process cgroup uid 10099 pid 8565 in 121ms
+06-02 16:38:00.460 W/libprocessgroup( 1869): kill(-8615, 9) failed: No such process
+06-02 16:38:00.460 I/Zygote ( 1729): Process 8615 exited due to signal (9)
+06-02 16:38:00.471 D/ ( 1728): HostConnection::get() New Host Connection established 0xe90dda40, tid 1952
+06-02 16:38:00.471 D/gralloc_ranchu( 1619): gralloc_alloc: Creating ashmem region of size 9334784
+06-02 16:38:00.480 D/gralloc_ranchu( 1619): gralloc_alloc: Creating ashmem region of size 9334784
+06-02 16:38:00.500 W/libprocessgroup( 1869): kill(-8615, 9) failed: No such process
+06-02 16:38:00.500 I/libprocessgroup( 1869): Successfully killed process cgroup uid 10099 pid 8615 in 40ms
+06-02 16:38:00.507 D/SurfaceFlinger( 1728): duplicate layer name: changing com.google.android.apps.nexuslauncher/com.google.android.apps.nexuslauncher.NexusLauncherActivity to com.google.android.apps.nexuslauncher/com.google.android.apps.nexuslauncher.NexusLauncherActivity#1
+06-02 16:38:00.510 D/gralloc_ranchu( 1619): gralloc_alloc: Creating ashmem region of size 9334784
+06-02 16:38:00.523 D/gralloc_ranchu( 1619): gralloc_alloc: Creating ashmem region of size 9334784
+06-02 16:38:00.533 D/EGL_emulation( 2402): eglMakeCurrent: 0xe1911c80: ver 3 0 (tinfo 0xc8cbe260)
+06-02 16:38:00.534 D/ ( 1728): HostConnection::get() New Host Connection established 0xe90c90c0, tid 2157
+06-02 16:38:00.535 D/gralloc_ranchu( 1619): gralloc_alloc: Creating ashmem region of size 9334784
+06-02 16:38:00.556 D/EGL_emulation( 2488): eglMakeCurrent: 0xe3385ae0: ver 3 0 (tinfo 0xe33838f0)
+06-02 16:38:00.560 I/GoogleInputMethod( 1996): onFinishInput() : Dummy InputConnection bound
+06-02 16:38:00.572 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:00.579 I/GoogleInputMethod( 1996): onStartInput() : Dummy InputConnection bound
+06-02 16:38:00.593 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:00.624 I/chatty ( 2002): uid=10024(com.android.systemui) RenderThread identical 4 lines
+06-02 16:38:00.630 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:00.634 I/system_server( 1869): Explicit concurrent copying GC freed 46216(2MB) AllocSpace objects, 14(664KB) LOS objects, 38% free, 9MB/15MB, paused 63us total 180.949ms
+06-02 16:38:00.641 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:00.710 I/chatty ( 2002): uid=10024(com.android.systemui) RenderThread identical 8 lines
+06-02 16:38:00.716 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:00.719 E/ ( 1733): Couldn't opendir /data/app/vmdl1830460407.tmp: No such file or directory
+06-02 16:38:00.719 E/installd( 1733): Failed to delete /data/app/vmdl1830460407.tmp: No such file or directory
+06-02 16:38:00.722 I/ActivityManager( 1869): Force stopping org.mozilla.fenix.debug appid=10099 user=0: pkg removed
+06-02 16:38:00.729 W/BroadcastQueue( 1869): Background execution not allowed: receiving Intent { act=android.intent.action.PACKAGE_REMOVED dat=package:org.mozilla.fenix.debug flg=0x4000010 (has extras) } to com.android.musicfx/.Compatibility$Receiver
+06-02 16:38:00.753 W/SessionLifecycleManager( 2402): Handover failed. Creating new session controller.
+06-02 16:38:00.787 W/BroadcastQueue( 1869): Background execution not allowed: receiving Intent { act=android.intent.action.PACKAGE_REMOVED dat=package:org.mozilla.fenix.debug flg=0x4000010 (has extras) } to com.google.android.googlequicksearchbox/com.google.android.apps.gsa.googlequicksearchbox.GelStubAppWatcher
+06-02 16:38:00.788 W/BroadcastQueue( 1869): Background execution not allowed: receiving Intent { act=android.intent.action.PACKAGE_ADDED dat=package:org.mozilla.fenix.debug flg=0x4000010 (has extras) } to com.android.musicfx/.Compatibility$Receiver
+06-02 16:38:00.797 W/SurfaceFlinger( 1728): Attempting to set client state on removed layer: Surface(name=AppWindowToken{f0e79dc token=Token{f06ae4f ActivityRecord{533f0ae u0 com.google.android.apps.nexuslauncher/.NexusLauncherActivity t2}}})/@0xc6dc2b9 - animation-leash#0
+06-02 16:38:00.797 W/SurfaceFlinger( 1728): Attempting to destroy on removed layer: Surface(name=AppWindowToken{f0e79dc token=Token{f06ae4f ActivityRecord{533f0ae u0 com.google.android.apps.nexuslauncher/.NexusLauncherActivity t2}}})/@0xc6dc2b9 - animation-leash#0
+06-02 16:38:00.804 I/gle.android.gm( 2660): Waiting for a blocking GC ProfileSaver
+06-02 16:38:00.812 W/BroadcastQueue( 1869): Background execution not allowed: receiving Intent { act=android.intent.action.PACKAGE_ADDED dat=package:org.mozilla.fenix.debug flg=0x4000010 (has extras) } to com.google.android.googlequicksearchbox/com.google.android.apps.gsa.googlequicksearchbox.GelStubAppWatcher
+06-02 16:38:00.812 W/BroadcastQueue( 1869): Background execution not allowed: receiving Intent { act=android.intent.action.PACKAGE_REPLACED dat=package:org.mozilla.fenix.debug flg=0x4000010 (has extras) } to com.android.musicfx/.Compatibility$Receiver
+06-02 16:38:00.812 W/BroadcastQueue( 1869): Background execution not allowed: receiving Intent { act=android.intent.action.PACKAGE_REPLACED dat=package:org.mozilla.fenix.debug flg=0x4000010 (has extras) } to com.google.android.apps.photos/.account.full.FetchAccountPropertiesAppUpgradeBroadcastReceiver
+06-02 16:38:00.812 W/BroadcastQueue( 1869): Background execution not allowed: receiving Intent { act=android.intent.action.PACKAGE_REPLACED dat=package:org.mozilla.fenix.debug flg=0x4000010 (has extras) } to com.google.android.apps.photos/.account.full.SyncAccountsForLoginBroadcastReceiver
+06-02 16:38:00.812 W/BroadcastQueue( 1869): Background execution not allowed: receiving Intent { act=android.intent.action.PACKAGE_REPLACED dat=package:org.mozilla.fenix.debug flg=0x4000010 (has extras) } to com.google.android.apps.photos/.experiments.phenotype.full.PhenotypeAppUpgradeBroadcastReceiver
+06-02 16:38:00.812 W/BroadcastQueue( 1869): Background execution not allowed: receiving Intent { act=android.intent.action.PACKAGE_REPLACED dat=package:org.mozilla.fenix.debug flg=0x4000010 (has extras) } to com.google.android.apps.photos/.notificationchannels.AppUpdateBroadcastReceiver
+06-02 16:38:00.834 W/BroadcastQueue( 1869): Background execution not allowed: receiving Intent { act=android.intent.action.PACKAGE_REPLACED dat=package:org.mozilla.fenix.debug flg=0x4000010 (has extras) } to com.google.android.googlequicksearchbox/com.google.android.apps.gsa.googlequicksearchbox.GelStubAppWatcher
+06-02 16:38:00.841 I/s.nexuslaunche( 2488): Background concurrent copying GC freed 13104(735KB) AllocSpace objects, 9(504KB) LOS objects, 41% free, 8MB/14MB, paused 502us total 117.306ms
+06-02 16:38:00.849 D/CarrierSvcBindHelper( 2121): No carrier app for: 0
+06-02 16:38:00.849 I/Auth ( 2660): [SupervisedAccountIntentOperation] onHandleIntent(): android.intent.action.PACKAGE_ADDED
+06-02 16:38:00.850 I/Auth ( 2660): [SupervisedAccountIntentOperation] This operation is disabled
+06-02 16:38:00.850 D/CarrierSvcBindHelper( 2121): No carrier app for: 0
+06-02 16:38:00.852 W/droid.apps.doc( 8852): Unexpected CPU variant for X86 using defaults: x86
+06-02 16:38:00.857 I/ActivityManager( 1869): Start proc 8852:com.google.android.apps.docs/u0a69 for content provider com.google.android.apps.docs/.storagebackend.StorageBackendContentProvider
+06-02 16:38:00.871 E/system_server( 1869): No package ID 7f found for ID 0x7f0801a6.
+06-02 16:38:00.871 E/system_server( 1869): No package ID 7f found for ID 0x7f13011d.
+06-02 16:38:00.871 E/system_server( 1869): No package ID 7f found for ID 0x7f13011d.
+06-02 16:38:00.871 E/system_server( 1869): No package ID 7f found for ID 0x7f0801a4.
+06-02 16:38:00.871 E/system_server( 1869): No package ID 7f found for ID 0x7f13011c.
+06-02 16:38:00.871 E/system_server( 1869): No package ID 7f found for ID 0x7f13011c.
+06-02 16:38:00.871 I/ChromeSync( 2660): [Sync,SyncIntentOperation] Handling the intent: Intent { act=android.intent.action.PACKAGE_ADDED dat=package:org.mozilla.fenix.debug flg=0x4000010 cmp=com.google.android.gms/.chimera.GmsIntentOperationService (has extras) }.
+06-02 16:38:00.871 D/ImsResolver( 2121): maybeAddedImsService, packageName: org.mozilla.fenix.debug
+06-02 16:38:00.873 D/CarrierConfigLoader( 2121): mHandler: 9 phoneId: 0
+06-02 16:38:00.877 I/InputReader( 1869): Reconfiguring input devices. changes=0x00000010
+06-02 16:38:00.901 I/chatty ( 1869): uid=1000(system) InputReader identical 1 line
+06-02 16:38:00.903 I/InputReader( 1869): Reconfiguring input devices. changes=0x00000010
+06-02 16:38:00.921 I/gle.android.gm( 2660): WaitForGcToComplete blocked ProfileSaver on HeapTrim for 116.746ms
+06-02 16:38:00.927 W/PeopleContactsSync( 2660): CP2 sync disabled by gservices.
+06-02 16:38:00.936 I/droid.apps.doc( 8852): The ClassLoaderContext is a special shared library.
+06-02 16:38:00.941 I/droid.apps.doc( 8852): The ClassLoaderContext is a special shared library.
+06-02 16:38:00.976 W/LocationOracle( 2402): No location history returned by ContextManager
+06-02 16:38:00.984 W/Looper ( 1869): Slow delivery took 222ms main h=android.app.ActivityThread$H c=android.app.-$$Lambda$LoadedApk$ReceiverDispatcher$Args$_BumDX2UKsnxLVrE6UJsJZkotuA@fad9455 m=0
+06-02 16:38:00.986 I/Telecom ( 1869): DefaultDialerCache: Refreshing default dialer for user 0: now com.google.android.dialer: DDC.oR@ALo
+06-02 16:38:00.999 D/AutofillUI( 1869): destroySaveUiUiThread(): already destroyed
+06-02 16:38:00.999 D/AutofillManagerServiceImpl( 1869): Set component for user 0 as AutofillServiceInfo[ServiceInfo{f14f90d com.google.android.gms.autofill.service.AutofillService}, settings:com.google.android.gms.autofill.ui.AutofillSettingsActivity, hasCompatPckgs:false]
+06-02 16:38:01.060 I/Icing ( 2660): IndexChimeraService.getServiceInterface callingPackage=com.google.android.gms componentName=AppsCorpus serviceId=32
+06-02 16:38:01.064 I/Icing ( 2660): IndexChimeraService.getServiceInterface callingPackage=com.google.android.gms componentName=AppsCorpus serviceId=36
+06-02 16:38:01.087 I/MicroDetectionWorker( 2402): #startMicroDetector [speakerMode: 0]
+06-02 16:38:01.087 I/AudioController( 2402): Using mInputStreamFactoryBuilder
+06-02 16:38:01.087 I/AudioController( 2402): Created new AudioSource
+06-02 16:38:01.089 I/MicroDetectionWorker( 2402): onReady
+06-02 16:38:01.114 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:01.137 W/ctxmgr ( 2473): [AclManager]No 3 for (accnt=account#-517948760#, com.google.android.gms(10008):UserVelocityProducer, vrsn=13280022, 0, 3pPkg = null , 3pMdlId = null , pid = 2473). Was: 3 for 1, account#-517948760#
+06-02 16:38:01.150 I/Places ( 2473): Converted 0 out of 1 WiFi scans
+06-02 16:38:01.176 I/GAv4 ( 8852): Google Analytics 10.2.98 is starting up. To enable debug logging on a device run:
+06-02 16:38:01.176 I/GAv4 ( 8852): adb shell setprop log.tag.GAv4 DEBUG
+06-02 16:38:01.176 I/GAv4 ( 8852): adb logcat -s GAv4
+06-02 16:38:01.187 I/PlaceInferenceEngine( 2473): No beacon scan available - ignoring candidates.
+06-02 16:38:01.203 D/ ( 2002): HostConnection::get() New Host Connection established 0xe075c080, tid 2023
+06-02 16:38:01.216 D/gralloc_ranchu( 2002): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:01.217 D/ ( 2002): HostConnection::get() New Host Connection established 0xe075c080, tid 2023
+06-02 16:38:01.217 D/gralloc_ranchu( 2002): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:01.218 D/ ( 2002): HostConnection::get() New Host Connection established 0xe075cb40, tid 2023
+06-02 16:38:01.227 D/gralloc_ranchu( 2002): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:01.228 D/ ( 2002): HostConnection::get() New Host Connection established 0xe075cb40, tid 2023
+06-02 16:38:01.228 W/GAv4 ( 8852): AnalyticsReceiver is not registered or is disabled. Register the receiver for reliable dispatching on non-Google Play devices. See http://goo.gl/8Rd3yj for instructions.
+06-02 16:38:01.234 D/gralloc_ranchu( 2002): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:01.234 D/ ( 2002): HostConnection::get() New Host Connection established 0xe075cb40, tid 2023
+06-02 16:38:01.234 D/gralloc_ranchu( 2002): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:01.235 D/ ( 2002): HostConnection::get() New Host Connection established 0xe075cb40, tid 2023
+06-02 16:38:01.235 D/gralloc_ranchu( 2002): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:01.248 W/ctxmgr ( 2473): [AclManager]No 2 for (accnt=account#-517948760#, com.google.android.gms(10008):UserLocationProducer, vrsn=13280000, 0, 3pPkg = null , 3pMdlId = null , pid = 2473). Was: 3 for 1, account#-517948760#
+06-02 16:38:01.250 W/GAv4 ( 8852): CampaignTrackingReceiver is not registered, not exported or is disabled. Installation campaign tracking is not possible. See http://goo.gl/8Rd3yj for instructions.
+06-02 16:38:01.253 W/FieldDefinition( 8852): Ignoring isIndexed constraint as field also has uniqueness constraint (on just this field, and therefore SQLite will have to create an index on that. For field: com.google.android.apps.docs.database.common.FieldDefinition$a@2f0c143
+06-02 16:38:01.256 W/GAv4 ( 8852): AnalyticsService not registered in the app manifest. Hits might not be delivered reliably. See http://goo.gl/8Rd3yj for instructions.
+06-02 16:38:01.275 I/Icing ( 2660): Usage reports ok 2, Failed Usage reports 0, indexed 0, rejected 0, imm upload false
+06-02 16:38:01.301 I/MicroRecognitionRunner( 2402): Starting detection.
+06-02 16:38:01.301 I/MicrophoneInputStream( 2402): mic_starting SR : 16000 CC : 16 SO : 6
+06-02 16:38:01.303 I/earchbox:searc( 2402): Background concurrent copying GC freed 16338(953KB) AllocSpace objects, 1(92KB) LOS objects, 44% free, 7MB/13MB, paused 50us total 209.099ms
+06-02 16:38:01.307 I/Icing ( 2660): Usage reports ok 0, Failed Usage reports 0, indexed 0, rejected 0, imm upload false
+06-02 16:38:01.307 E/ ( 1627): Request requires android.permission.RECORD_AUDIO
+06-02 16:38:01.307 E/AudioPolicyIntefaceImpl( 1627): getInputForAttr permission denied: recording not allowed for uid 10039 pid 2402
+06-02 16:38:01.307 E/AudioFlinger( 1627): createRecord() checkRecordThread_l failed
+06-02 16:38:01.308 E/IAudioFlinger( 2402): createRecord returned error -22
+06-02 16:38:01.308 E/AudioRecord( 2402): AudioFlinger could not create record track, status: -22
+06-02 16:38:01.310 E/AudioRecord-JNI( 2402): Error creating AudioRecord instance: initialization check failed with status -22.
+06-02 16:38:01.313 E/android.media.AudioRecord( 2402): Error code -20 when initializing native AudioRecord object.
+06-02 16:38:01.313 I/MicrophoneInputStream( 2402): mic_started SR : 16000 CC : 16 SO : 6
+06-02 16:38:01.313 E/ActivityThread( 2402): Failed to find provider info for com.google.android.apps.gsa.testing.ui.audio.recorded
+06-02 16:38:01.314 I/MicroDetectionWorker( 2402): onReady
+06-02 16:38:01.314 W/SpeechLevelGenerator( 2402): Really low audio levels detected. The audio input may have issues.
+06-02 16:38:01.319 I/MicrophoneInputStream( 2402): mic_close SR : 16000 CC : 16 SO : 6
+06-02 16:38:01.321 I/MicroRecognitionRunner( 2402): Detection finished
+06-02 16:38:01.321 W/ErrorReporter( 2402): reportError [type: 211, code: 524300]: Error reading from input stream
+06-02 16:38:01.323 I/MicroRecognitionRunner( 2402): Stopping hotword detection.
+06-02 16:38:01.338 W/ErrorProcessor( 2402): onFatalError, processing error from engine(4)
+06-02 16:38:01.338 W/ErrorProcessor( 2402): com.google.android.apps.gsa.shared.speech.b.g: Error reading from input stream
+06-02 16:38:01.338 W/ErrorProcessor( 2402): at com.google.android.apps.gsa.staticplugins.microdetection.d.k.a(SourceFile:91)
+06-02 16:38:01.338 W/ErrorProcessor( 2402): at com.google.android.apps.gsa.staticplugins.microdetection.d.l.run(Unknown Source:14)
+06-02 16:38:01.338 W/ErrorProcessor( 2402): at com.google.android.libraries.gsa.runner.a.a.b(SourceFile:32)
+06-02 16:38:01.338 W/ErrorProcessor( 2402): at com.google.android.libraries.gsa.runner.a.c.call(Unknown Source:4)
+06-02 16:38:01.338 W/ErrorProcessor( 2402): at java.util.concurrent.FutureTask.run(FutureTask.java:266)
+06-02 16:38:01.338 W/ErrorProcessor( 2402): at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:458)
+06-02 16:38:01.338 W/ErrorProcessor( 2402): at java.util.concurrent.FutureTask.run(FutureTask.java:266)
+06-02 16:38:01.338 W/ErrorProcessor( 2402): at com.google.android.apps.gsa.shared.util.concurrent.b.g.run(Unknown Source:4)
+06-02 16:38:01.338 W/ErrorProcessor( 2402): at com.google.android.apps.gsa.shared.util.concurrent.b.aw.run(SourceFile:4)
+06-02 16:38:01.338 W/ErrorProcessor( 2402): at com.google.android.apps.gsa.shared.util.concurrent.b.aw.run(SourceFile:4)
+06-02 16:38:01.338 W/ErrorProcessor( 2402): at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1167)
+06-02 16:38:01.338 W/ErrorProcessor( 2402): at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:641)
+06-02 16:38:01.338 W/ErrorProcessor( 2402): at java.lang.Thread.run(Thread.java:764)
+06-02 16:38:01.338 W/ErrorProcessor( 2402): at com.google.android.apps.gsa.shared.util.concurrent.b.i.run(SourceFile:6)
+06-02 16:38:01.338 W/ErrorProcessor( 2402): Caused by: com.google.android.apps.gsa.shared.exception.GsaIOException: Error code: 393238 | Buffer overflow, no available space.
+06-02 16:38:01.338 W/ErrorProcessor( 2402): at com.google.android.apps.gsa.speech.audio.Tee.j(SourceFile:103)
+06-02 16:38:01.338 W/ErrorProcessor( 2402): at com.google.android.apps.gsa.speech.audio.au.read(SourceFile:2)
+06-02 16:38:01.338 W/ErrorProcessor( 2402): at java.io.InputStream.read(InputStream.java:101)
+06-02 16:38:01.338 W/ErrorProcessor( 2402): at com.google.android.apps.gsa.speech.audio.ao.run(SourceFile:17)
+06-02 16:38:01.338 W/ErrorProcessor( 2402): at com.google.android.apps.gsa.speech.audio.an.run(SourceFile:2)
+06-02 16:38:01.338 W/ErrorProcessor( 2402): at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:458)
+06-02 16:38:01.338 W/ErrorProcessor( 2402): ... 10 more
+06-02 16:38:01.339 I/AudioController( 2402): internalShutdown
+06-02 16:38:01.340 I/MicroDetector( 2402): Keeping mic open: false
+06-02 16:38:01.340 I/MicroDetectionWorker( 2402): #onError(false)
+06-02 16:38:01.341 I/DeviceStateChecker( 2402): DeviceStateChecker cancelled
+06-02 16:38:01.346 I/Places ( 2473): ?: PlacesBleScanner start() with priority 2
+06-02 16:38:01.365 I/Places ( 2473): ?: PlacesBleScanner start() with priority 2
+06-02 16:38:01.365 I/PlaceInferenceEngine( 2473): [anon] Changed inference mode: 1
+06-02 16:38:01.369 I/Places ( 2473): Converted 0 out of 1 WiFi scans
+06-02 16:38:01.391 I/Places ( 2473): ?: PlacesBleScanner start() with priority 2
+06-02 16:38:01.398 I/Places ( 2473): ?: PlacesBleScanner start() with priority 2
+06-02 16:38:01.399 I/PlaceInferenceEngine( 2473): [anon] Changed inference mode: 1
+06-02 16:38:01.400 I/PlaceInferenceEngine( 2473): No beacon scan available - ignoring candidates.
+06-02 16:38:01.422 W/droid.apps.doc( 8852): Long monitor contention with owner Binder:8852_2 (8868) at void java.lang.Object.wait(long, int)(Object.java:-2) waiters=0 in com.google.android.apps.docs.storagebackend.StorageBackendContentProvider$a com.google.android.apps.docs.storagebackend.StorageBackendContentProvider.b() for 353ms
+06-02 16:38:01.423 I/Icing ( 2660): IndexChimeraService.getServiceInterface callingPackage=com.google.android.gms componentName=AppsCorpus serviceId=32
+06-02 16:38:01.423 I/Icing ( 2660): IndexChimeraService.getServiceInterface callingPackage=com.google.android.gms componentName=AppsCorpus serviceId=36
+06-02 16:38:01.435 I/ProvidersCache( 4535): Provider returned no roots. Possibly naughty: com.google.android.apps.docs.storage
+06-02 16:38:01.441 I/Places ( 2473): Converted 0 out of 1 WiFi scans
+06-02 16:38:01.454 I/Icing ( 2660): Usage reports ok 0, Failed Usage reports 0, indexed 0, rejected 0, imm upload false
+06-02 16:38:01.456 I/ProvidersCache( 4535): Provider returned no roots. Possibly naughty: com.google.android.apps.docs.storage
+06-02 16:38:01.458 W/Looper ( 1869): Drained
+06-02 16:38:01.465 I/PlaceInferenceEngine( 2473): No beacon scan available - ignoring candidates.
+06-02 16:38:01.467 I/Icing ( 2660): Usage reports ok 0, Failed Usage reports 0, indexed 0, rejected 0, imm upload false
+06-02 16:38:01.482 W/ctxmgr ( 2473): [AclManager]No 2 for (accnt=account#-517948760#, com.google.android.gms(10008):PlacesProducer, vrsn=13280000, 0, 3pPkg = null , 3pMdlId = null , pid = 2473). Was: 3 for 18, account#-517948760#
+06-02 16:38:02.182 W/ctxmgr ( 2473): [AclManager]No 2 for (accnt=account#-517948760#, com.google.android.gms(10008):UserLocationProducer, vrsn=13280000, 0, 3pPkg = null , 3pMdlId = null , pid = 2473). Was: 3 for 1, account#-517948760#
+06-02 16:38:02.360 I/Icing ( 2660): Indexing com.google.android.gms-apps from com.google.android.gms
+06-02 16:38:02.417 I/Icing ( 2660): Indexing com.google.android.gms-internal.3p:MobileApplication from com.google.android.gms
+06-02 16:38:02.425 I/Icing ( 2660): Indexing done com.google.android.gms-apps
+06-02 16:38:02.427 I/Icing ( 2660): Indexing done com.google.android.gms-internal.3p:MobileApplication
+06-02 16:38:02.510 I/Icing ( 2660): Indexing com.google.android.gms-apps from com.google.android.gms
+06-02 16:38:02.511 I/Icing ( 2660): Indexing done com.google.android.gms-apps
+06-02 16:38:02.975 I/ActivityManager( 1869): Force stopping org.mozilla.fenix.debug appid=10099 user=0: clear data
+06-02 16:38:02.976 I/ActivityManager( 1869): Force stopping org.mozilla.fenix.debug appid=10099 user=-1: clearApplicationUserData
+06-02 16:38:02.977 D/ZenLog ( 1869): config: removeAutomaticZenRules,ZenModeConfig[user=0,allowAlarms=true,allowMedia=true,allowSystem=false,allowReminders=false,allowEvents=false,allowCalls=true,allowRepeatCallers=true,allowMessages=false,allowCallsFrom=stars,allowMessagesFrom=contacts,suppressedVisualEffects=511,areChannelsBypassingDnd=false,automaticRules={EVENTS_DEFAULT_RULE=ZenRule[enabled=false,snoozing=false,name=Event,zenMode=ZEN_MODE_IMPORTANT_INTERRUPTIONS,conditionId=condition://android/event?userId=-10000&calendar=&reply=1,condition=Condition[id=condition://android/event?userId=-10000&calendar=&reply=1,summary=...,line1=...,line2=...,icon=0,state=STATE_FALSE,flags=2],component=ComponentInfo{android/com.android.server.notification.EventConditionProvider},id=EVENTS_DEFAULT_RULE,creationTime=1587308662810,enabler=null], EVERY_NIGHT_DEFAULT_RULE=ZenRule[enabled=false,snoozing=false,name=Sleeping,zenMode=ZEN_MODE_IMPORTANT_INTERRUPTIONS,conditionId=condition://android/schedule?days=1.2.3.4.5.6.7&start=22.0&end=7.0&exitAtAlarm=true,condition=Condition[id=condition://android/schedule?days=1.2.3.4.5.6.7&start=22.0&end=7.0&exitAtAlarm=true,summary=...,line1=...,line2=...,icon=0,state=STATE_FALSE,flags=2],component=ComponentInfo{android/com.android.server.notification.ScheduleConditionProvider},id=EVERY_NIGHT_DEFAULT_RULE,creationTime=1587308662810,enabler=null]},manualRule=null],Diff[]
+06-02 16:38:02.977 I/ConditionProviders( 1869): Disallowing condition provider org.mozilla.fenix.debug
+06-02 16:38:02.977 D/ZenLog ( 1869): set_zen_mode: off,removeAutomaticZenRules
+06-02 16:38:02.985 I/keystore( 1734): clear_uid 10099
+06-02 16:38:02.992 E/system_server( 1869): No package ID 7f found for ID 0x7f0801a6.
+06-02 16:38:02.992 E/system_server( 1869): No package ID 7f found for ID 0x7f13011d.
+06-02 16:38:02.992 E/system_server( 1869): No package ID 7f found for ID 0x7f13011d.
+06-02 16:38:02.992 E/system_server( 1869): No package ID 7f found for ID 0x7f0801a4.
+06-02 16:38:02.992 E/system_server( 1869): No package ID 7f found for ID 0x7f13011c.
+06-02 16:38:02.992 E/system_server( 1869): No package ID 7f found for ID 0x7f13011c.
+06-02 16:38:02.993 I/GeofencerStateMachine( 2473): removeGeofences: removeRequest=RemoveGeofencingRequest[REMOVE_ALL packageName=org.mozilla.fenix.debug]
+06-02 16:38:02.995 D/CarrierSvcBindHelper( 2121): No carrier app for: 0
+06-02 16:38:03.007 I/ProvidersCache( 4535): Provider returned no roots. Possibly naughty: com.google.android.apps.docs.storage
+06-02 16:38:03.013 D/vold ( 1558): Remounting 10099 as mode read
+06-02 16:38:03.014 I/LocationSettingsChecker( 2660): Removing dialog suppression flag for package org.mozilla.fenix.debug
+06-02 16:38:03.018 I/Icing ( 2660): doRemovePackageData org.mozilla.fenix.debug
+06-02 16:38:03.048 D/vold ( 1558): Remounting 10099 as mode write
+06-02 16:38:03.055 I/IcingNotification( 1996): Received intent: Intent { act=com.google.android.gms.icing.IME_NOTIFICATION flg=0x10 pkg=com.google.android.inputmethod.latin (has extras) }
+06-02 16:38:03.104 I/ActivityManager( 1869): Force stopping org.mozilla.fenix.debug appid=10099 user=-1: set debug app
+06-02 16:38:03.133 I/ActivityManager( 1869): START u0 {flg=0x10000000 cmp=org.mozilla.fenix.debug/.App (has extras)} from uid 0
+06-02 16:38:03.058 I/IcingNotification( 1996): Received intent: Intent { act=com.google.android.gms.icing.IME_NOTIFICATION flg=0x10 pkg=com.google.android.inputmethod.latin (has extras) }
+06-02 16:38:03.175 I/lla.fenix.debu( 8940): Not late-enabling -Xcheck:jni (already on)
+06-02 16:38:03.178 W/ctxmgr ( 2473): [AclManager]No 2 for (accnt=account#-517948760#, com.google.android.gms(10008):UserLocationProducer, vrsn=13280000, 0, 3pPkg = null , 3pMdlId = null , pid = 2473). Was: 3 for 1, account#-517948760#
+06-02 16:38:03.184 D/gralloc_ranchu( 1619): gralloc_alloc: Creating ashmem region of size 9334784
+06-02 16:38:03.185 I/ActivityManager( 1869): Start proc 8940:org.mozilla.fenix.debug/u0a99 for activity org.mozilla.fenix.debug/.App
+06-02 16:38:03.204 D/ ( 1869): HostConnection::get() New Host Connection established 0xc1e7f900, tid 1930
+06-02 16:38:03.246 W/lla.fenix.debu( 8940): Unexpected CPU variant for X86 using defaults: x86
+06-02 16:38:03.345 W/ActivityThread( 8940): Application org.mozilla.fenix.debug can be debugged on port 8100...
+06-02 16:38:03.353 I/lla.fenix.debu( 8940): The ClassLoaderContext is a special shared library.
+06-02 16:38:03.365 D/gralloc_ranchu( 1897): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:03.366 D/ ( 1897): HostConnection::get() New Host Connection established 0xed7c3080, tid 1897
+06-02 16:38:03.366 D/gralloc_ranchu( 1897): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:03.367 D/ ( 1897): HostConnection::get() New Host Connection established 0xed7c3080, tid 1897
+06-02 16:38:03.367 D/gralloc_ranchu( 1897): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:03.367 D/ ( 1897): HostConnection::get() New Host Connection established 0xed7c3080, tid 1897
+06-02 16:38:03.367 D/gralloc_ranchu( 1897): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:03.370 D/EGL_emulation( 2402): eglMakeCurrent: 0xe1911c80: ver 3 0 (tinfo 0xc8cbe260)
+06-02 16:38:03.375 D/ ( 1897): HostConnection::get() New Host Connection established 0xed7c3080, tid 1897
+06-02 16:38:03.378 D/gralloc_ranchu( 1619): gralloc_alloc: Creating ashmem region of size 9334784
+06-02 16:38:03.381 D/ ( 1728): HostConnection::get() New Host Connection established 0xe90dd200, tid 2107
+06-02 16:38:03.381 D/gralloc_ranchu( 1728): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:03.383 D/ ( 1728): HostConnection::get() New Host Connection established 0xe90dd200, tid 2107
+06-02 16:38:03.383 D/gralloc_ranchu( 1728): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:03.385 D/ ( 1728): HostConnection::get() New Host Connection established 0xe90dd200, tid 2107
+06-02 16:38:03.385 D/gralloc_ranchu( 1728): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:03.386 D/ ( 1728): HostConnection::get() New Host Connection established 0xe90dd200, tid 2107
+06-02 16:38:03.387 D/gralloc_ranchu( 1728): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:03.392 D/EGL_emulation( 2488): eglMakeCurrent: 0xe3385ae0: ver 3 0 (tinfo 0xe33838f0)
+06-02 16:38:03.392 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:03.395 D/ ( 1728): HostConnection::get() New Host Connection established 0xe90dda00, tid 2107
+06-02 16:38:03.395 D/gralloc_ranchu( 1728): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:03.406 D/ ( 1728): HostConnection::get() New Host Connection established 0xe90dda00, tid 2107
+06-02 16:38:03.407 D/gralloc_ranchu( 1728): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:03.408 D/ ( 1728): HostConnection::get() New Host Connection established 0xe90dda00, tid 2107
+06-02 16:38:03.408 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:03.412 D/gralloc_ranchu( 1728): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:03.414 D/ ( 1728): HostConnection::get() New Host Connection established 0xe90dda00, tid 2107
+06-02 16:38:03.415 D/gralloc_ranchu( 1728): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:03.416 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:03.504 I/chatty ( 2002): uid=10024(com.android.systemui) RenderThread identical 9 lines
+06-02 16:38:03.518 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:04.180 W/ctxmgr ( 2473): [AclManager]No 2 for (accnt=account#-517948760#, com.google.android.gms(10008):IndoorOutdoorProducer, vrsn=13280000, 0, 3pPkg = null , 3pMdlId = null , pid = 2473). Was: 3 for 57, account#-517948760#
+06-02 16:38:04.214 I/droid.apps.doc( 8852): The ClassLoaderContext is a special shared library.
+06-02 16:38:04.261 I/chatty ( 8852): uid=10069(com.google.android.apps.docs) TaskSchedulerLo identical 2 lines
+06-02 16:38:04.271 I/droid.apps.doc( 8852): The ClassLoaderContext is a special shared library.
+06-02 16:38:04.298 V/NativeCrypto( 8852): Registering com/google/android/gms/org/conscrypt/NativeCrypto's 284 native methods...
+06-02 16:38:04.340 D/NetworkSecurityConfig( 8852): No Network Security Config specified, using platform default
+06-02 16:38:04.345 I/ProviderInstaller( 8852): Installed default security provider GmsCore_OpenSSL
+06-02 16:38:04.534 D/FirebaseApp( 8940): Default FirebaseApp failed to initialize because no default options were found. This usually means that com.google.gms:google-services was not applied to your gradle project.
+06-02 16:38:04.534 I/FirebaseInitProvider( 8940): FirebaseApp initialization unsuccessful
+06-02 16:38:04.675 D/FenixApplication( 8940): Initializing Glean (uploadEnabled=true, isFennec=false)
+06-02 16:38:04.706 D/RustNativeSupport( 8940): findMegazordLibraryName(viaduct, 0.59.0
+06-02 16:38:04.706 D/RustNativeSupport( 8940): lib in use: none
+06-02 16:38:04.706 D/RustNativeSupport( 8940): lib configured: megazord
+06-02 16:38:04.707 D/RustNativeSupport( 8940): lib version configured: 0.59.0
+06-02 16:38:04.707 D/RustNativeSupport( 8940): settled on megazord
+06-02 16:38:04.794 D/libglean_ffi( 8940): glean_ffi: Android logging should be hooked up!
+06-02 16:38:04.799 I/glean/Glean( 8940): Registering pings for mozilla.telemetry.glean.GleanMetrics.Pings
+06-02 16:38:04.804 I/libglean_ffi( 8940): glean_core: Creating new Glean
+06-02 16:38:04.804 D/libglean_ffi( 8940): glean_core::database: Database path: "/data/user/0/org.mozilla.fenix.debug/glean_data/db"
+06-02 16:38:04.804 D/RustNativeSupport( 8940): findMegazordLibraryName(rustlog, 0.59.0
+06-02 16:38:04.804 D/RustNativeSupport( 8940): lib in use: none
+06-02 16:38:04.805 D/RustNativeSupport( 8940): lib configured: megazord
+06-02 16:38:04.805 D/RustNativeSupport( 8940): lib version configured: 0.59.0
+06-02 16:38:04.805 D/RustNativeSupport( 8940): settled on megazord
+06-02 16:38:04.805 I/libglean_ffi( 8940): glean_core::database: Database initialized
+06-02 16:38:04.812 I/rc_log_ffi::ios( 8940): rc_log adapter initialized!
+06-02 16:38:04.840 I/libglean_ffi( 8940): glean_ffi: Glean initialized
+06-02 16:38:04.875 I/GeckoRuntime( 8940): Adding debug configuration from: /data/local/tmp/org.mozilla.fenix.debug-geckoview-config.yaml
+06-02 16:38:04.876 D/GeckoDebugConfig( 8940): Adding environment variables from debug config: {MOZ_CRASHREPORTER=1, MOZ_CRASHREPORTER_NO_REPORT=1, MOZ_CRASHREPORTER_SHUTDOWN=1}
+06-02 16:38:04.876 D/GeckoDebugConfig( 8940): Adding arguments from debug config: [-marionette, -profile, /mnt/sdcard/org.mozilla.fenix.debug-geckodriver-profile]
+06-02 16:38:04.877 D/GeckoThread( 8940): State changed to LAUNCHED
+06-02 16:38:04.878 I/GeckoThread( 8940): preparing to run Gecko
+06-02 16:38:04.879 D/GeckoThread( 8940): env var: MOZ_CRASHREPORTER=1
+06-02 16:38:04.879 D/GeckoThread( 8940): env var: MOZ_CRASHREPORTER_NO_REPORT=1
+06-02 16:38:04.879 D/GeckoThread( 8940): env var: MOZ_CRASHREPORTER_SHUTDOWN=1
+06-02 16:38:04.911 D/GeckoThread( 8940): State changed to MOZGLUE_READY
+06-02 16:38:04.914 D/GeckoRuntime( 8940): Lifecycle: onCreate
+06-02 16:38:04.936 I/glean/MetricsPingSched( 8940): The application just updated. Send metrics ping now.
+06-02 16:38:04.947 W/Settings( 8940): Setting animator_duration_scale has moved from android.provider.Settings.System to android.provider.Settings.Global, returning read-only global URI.
+06-02 16:38:04.952 E/GeckoLibLoad( 8940): Load sqlite start
+06-02 16:38:04.983 E/GeckoLibLoad( 8940): Load sqlite done
+06-02 16:38:04.983 E/GeckoLibLoad( 8940): Load nss start
+06-02 16:38:04.983 E/GeckoLibLoad( 8940): Load nss done
+06-02 16:38:04.996 I/glean/MetricsPingSched( 8940): Collecting the 'metrics' ping, now = Tue Jun 02 16:38:04 EDT 2020, startup = true, reason = upgrade
+06-02 16:38:05.031 E/GeckoLibLoad( 8940): Loaded libs in 47.602025ms total, 10ms(70ms) user, 30ms(40ms) system, 0(0) faults
+06-02 16:38:05.031 D/GeckoThread( 8940): State changed to LIBS_READY
+06-02 16:38:05.031 I/libglean_ffi( 8940): glean_core::ping: Collecting metrics
+06-02 16:38:05.031 I/libglean_ffi( 8940): glean_core::ping: Storage for metrics empty. Bailing out.
+06-02 16:38:05.031 I/libglean_ffi( 8940): glean_core: No content for ping 'metrics', therefore no ping queued.
+06-02 16:38:05.040 D/glean/MetricsPingSched( 8940): Scheduling the 'metrics' ping in 40915085ms
+06-02 16:38:05.043 W/GeckoThread( 8940): zerdatime 4649175 - runGecko
+06-02 16:38:05.049 D/GeckoProfile( 8940): Loading profile at: null name: default
+06-02 16:38:05.050 D/GeckoProfile( 8940): Created new profile dir.
+06-02 16:38:05.051 I/GeckoProfile( 8940): Enqueuing profile init.
+06-02 16:38:05.054 D/GeckoProfile( 8940): Found profile dir: /data/user/0/org.mozilla.fenix.debug/files/mozilla/qy7f9y6n.default
+06-02 16:38:05.054 D/GeckoProfile( 8940): Attempting to write new client ID properties
+06-02 16:38:05.057 D/GeckoProfile( 8940): Creating profile dir: /data/user/0/org.mozilla.fenix.debug/files/mozilla/qy7f9y6n.default
+06-02 16:38:05.077 D/LeakCanary( 8940): Updated AppWatcher.config: Config(no changes)
+06-02 16:38:05.088 I/Gecko:DumpUtils( 8940): Fifo watcher disabled via pref.
+06-02 16:38:05.113 D/GeckoSysInfo( 8940): System memory: 1494MB.
+06-02 16:38:05.113 W/lla.fenix.debu( 8940): Accessing hidden method Landroid/os/MessageQueue;->next()Landroid/os/Message; (light greylist, JNI)
+06-02 16:38:05.113 D/LeakCanary( 8940): Updated LeakCanary.config: Config(no changes)
+06-02 16:38:05.114 D/StrictMode( 8940): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/os/MessageQueue;->next()Landroid/os/Message;
+06-02 16:38:05.114 D/StrictMode( 8940): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:05.114 D/StrictMode( 8940): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:05.114 D/StrictMode( 8940): at org.mozilla.gecko.mozglue.GeckoLoader.nativeRun(Native Method)
+06-02 16:38:05.114 D/StrictMode( 8940): at org.mozilla.gecko.GeckoThread.run(GeckoThread.java:449)
+06-02 16:38:05.114 W/lla.fenix.debu( 8940): Accessing hidden field Landroid/os/MessageQueue;->mMessages:Landroid/os/Message; (light greylist, JNI)
+06-02 16:38:05.115 D/StrictMode( 8940): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/os/MessageQueue;->mMessages:Landroid/os/Message;
+06-02 16:38:05.115 D/StrictMode( 8940): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:05.115 D/StrictMode( 8940): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:05.115 D/StrictMode( 8940): at org.mozilla.gecko.mozglue.GeckoLoader.nativeRun(Native Method)
+06-02 16:38:05.115 D/StrictMode( 8940): at org.mozilla.gecko.GeckoThread.run(GeckoThread.java:449)
+06-02 16:38:05.117 W/lla.fenix.debu( 8940): Accessing hidden field Ljava/lang/Boolean;->value:Z (light greylist, JNI)
+06-02 16:38:05.117 D/StrictMode( 8940): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Ljava/lang/Boolean;->value:Z
+06-02 16:38:05.117 D/StrictMode( 8940): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:05.117 D/StrictMode( 8940): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:05.117 D/StrictMode( 8940): at org.mozilla.gecko.mozglue.GeckoLoader.nativeRun(Native Method)
+06-02 16:38:05.117 D/StrictMode( 8940): at org.mozilla.gecko.GeckoThread.run(GeckoThread.java:449)
+06-02 16:38:05.117 W/lla.fenix.debu( 8940): Accessing hidden field Ljava/lang/Integer;->value:I (light greylist, JNI)
+06-02 16:38:05.118 D/StrictMode( 8940): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Ljava/lang/Integer;->value:I
+06-02 16:38:05.118 D/StrictMode( 8940): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:05.118 D/StrictMode( 8940): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:05.118 D/StrictMode( 8940): at org.mozilla.gecko.mozglue.GeckoLoader.nativeRun(Native Method)
+06-02 16:38:05.118 D/StrictMode( 8940): at org.mozilla.gecko.GeckoThread.run(GeckoThread.java:449)
+06-02 16:38:05.118 W/lla.fenix.debu( 8940): Accessing hidden field Ljava/lang/Double;->value:D (light greylist, JNI)
+06-02 16:38:05.119 D/StrictMode( 8940): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Ljava/lang/Double;->value:D
+06-02 16:38:05.119 D/StrictMode( 8940): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:05.119 D/StrictMode( 8940): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:05.119 D/StrictMode( 8940): at org.mozilla.gecko.mozglue.GeckoLoader.nativeRun(Native Method)
+06-02 16:38:05.119 D/StrictMode( 8940): at org.mozilla.gecko.GeckoThread.run(GeckoThread.java:449)
+06-02 16:38:05.120 D/GeckoThread( 8940): State changed to JNI_READY
+06-02 16:38:05.120 D/App ( 8940): DebugMetricController: start
+06-02 16:38:05.120 D/App ( 8940): DebugMetricController: start
+06-02 16:38:05.122 W/PushConfig( 8940): No firebase configuration found; cannot support push service.
+06-02 16:38:05.174 D/ServiceAllocator( 8940): org.mozilla.gecko.process.GeckoChildProcessServices$tab0 updateBindings: BACKGROUND priority, 0 importance, 2 successful binds, 0 failed binds, 0 successful unbinds, 0 failed unbinds
+06-02 16:38:05.189 D/StrictMode( 8940): StrictMode policy violation; ~duration=351 ms: android.os.strictmode.DiskReadViolation
+06-02 16:38:05.189 D/StrictMode( 8940): at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+06-02 16:38:05.189 D/StrictMode( 8940): at java.io.FileInputStream.<init>(FileInputStream.java:163)
+06-02 16:38:05.189 D/StrictMode( 8940): at org.mozilla.gecko.util.DebugConfig.fromFile(DebugConfig.java:49)
+06-02 16:38:05.189 D/StrictMode( 8940): at org.mozilla.geckoview.GeckoRuntime.init(GeckoRuntime.java:363)
+06-02 16:38:05.189 D/StrictMode( 8940): at org.mozilla.geckoview.GeckoRuntime.create(GeckoRuntime.java:574)
+06-02 16:38:05.189 D/StrictMode( 8940): at GeckoProvider.createRuntime(GeckoProvider.kt:58)
+06-02 16:38:05.189 D/StrictMode( 8940): at GeckoProvider.getOrCreateRuntime(GeckoProvider.kt:28)
+06-02 16:38:05.189 D/StrictMode( 8940): at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:79)
+06-02 16:38:05.189 D/StrictMode( 8940): at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:57)
+06-02 16:38:05.189 D/StrictMode( 8940): at kotlin.SynchronizedLazyImpl.getValue(LazyJVM.kt:74)
+06-02 16:38:05.189 D/StrictMode( 8940): at org.mozilla.fenix.components.Core.getEngine(Unknown Source:7)
+06-02 16:38:05.189 D/StrictMode( 8940): at org.mozilla.fenix.FenixApplication.setupInMainProcessOnly(FenixApplication.kt:128)
+06-02 16:38:05.189 D/StrictMode( 8940): at org.mozilla.fenix.FenixApplication.onCreate(FenixApplication.kt:90)
+06-02 16:38:05.189 D/StrictMode( 8940): at android.app.Instrumentation.callApplicationOnCreate(Instrumentation.java:1154)
+06-02 16:38:05.189 D/StrictMode( 8940): at android.app.ActivityThread.handleBindApplication(ActivityThread.java:5871)
+06-02 16:38:05.189 D/StrictMode( 8940): at android.app.ActivityThread.access$1100(ActivityThread.java:199)
+06-02 16:38:05.189 D/StrictMode( 8940): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1650)
+06-02 16:38:05.189 D/StrictMode( 8940): at android.os.Handler.dispatchMessage(Handler.java:106)
+06-02 16:38:05.189 D/StrictMode( 8940): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:05.189 D/StrictMode( 8940): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:05.189 D/StrictMode( 8940): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:05.189 D/StrictMode( 8940): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:05.189 D/StrictMode( 8940): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:05.194 D/StrictMode( 8940): StrictMode policy violation; ~duration=336 ms: android.os.strictmode.DiskReadViolation
+06-02 16:38:05.194 D/StrictMode( 8940): at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+06-02 16:38:05.194 D/StrictMode( 8940): at libcore.io.BlockGuardOs.read(BlockGuardOs.java:253)
+06-02 16:38:05.194 D/StrictMode( 8940): at libcore.io.IoBridge.read(IoBridge.java:501)
+06-02 16:38:05.194 D/StrictMode( 8940): at java.io.FileInputStream.read(FileInputStream.java:307)
+06-02 16:38:05.194 D/StrictMode( 8940): at java.io.FilterInputStream.read(FilterInputStream.java:133)
+06-02 16:38:05.194 D/StrictMode( 8940): at java.io.PushbackInputStream.read(PushbackInputStream.java:186)
+06-02 16:38:05.194 D/StrictMode( 8940): at org.yaml.snakeyaml.reader.UnicodeReader.init(UnicodeReader.java:92)
+06-02 16:38:05.194 D/StrictMode( 8940): at org.yaml.snakeyaml.reader.UnicodeReader.read(UnicodeReader.java:124)
+06-02 16:38:05.194 D/StrictMode( 8940): at org.yaml.snakeyaml.reader.StreamReader.update(StreamReader.java:183)
+06-02 16:38:05.194 D/StrictMode( 8940): at org.yaml.snakeyaml.reader.StreamReader.ensureEnoughData(StreamReader.java:176)
+06-02 16:38:05.194 D/StrictMode( 8940): at org.yaml.snakeyaml.reader.StreamReader.ensureEnoughData(StreamReader.java:171)
+06-02 16:38:05.194 D/StrictMode( 8940): at org.yaml.snakeyaml.reader.StreamReader.peek(StreamReader.java:126)
+06-02 16:38:05.194 D/StrictMode( 8940): at org.yaml.snakeyaml.scanner.ScannerImpl.scanToNextToken(ScannerImpl.java:1177)
+06-02 16:38:05.194 D/StrictMode( 8940): at org.yaml.snakeyaml.scanner.ScannerImpl.fetchMoreTokens(ScannerImpl.java:287)
+06-02 16:38:05.194 D/StrictMode( 8940): at org.yaml.snakeyaml.scanner.ScannerImpl.checkToken(ScannerImpl.java:227)
+06-02 16:38:05.194 D/StrictMode( 8940): at org.yaml.snakeyaml.parser.ParserImpl$ParseImplicitDocumentStart.produce(ParserImpl.java:195)
+06-02 16:38:05.194 D/StrictMode( 8940): at org.yaml.snakeyaml.parser.ParserImpl.peekEvent(ParserImpl.java:158)
+06-02 16:38:05.194 D/StrictMode( 8940): at org.yaml.snakeyaml.parser.ParserImpl.checkEvent(ParserImpl.java:148)
+06-02 16:38:05.194 D/StrictMode( 8940): at org.yaml.snakeyaml.composer.Composer.getSingleNode(Composer.java:107)
+06-02 16:38:05.194 D/StrictMode( 8940): at org.yaml.snakeyaml.constructor.BaseConstructor.getSingleData(BaseConstructor.java:141)
+06-02 16:38:05.194 D/StrictMode( 8940): at org.yaml.snakeyaml.Yaml.loadFromReader(Yaml.java:525)
+06-02 16:38:05.194 D/StrictMode( 8940): at org.yaml.snakeyaml.Yaml.load(Yaml.java:453)
+06-02 16:38:05.194 D/StrictMode( 8940): at org.mozilla.gecko.util.DebugConfig.fromFile(DebugConfig.java:51)
+06-02 16:38:05.194 D/StrictMode( 8940): at org.mozilla.geckoview.GeckoRuntime.init(GeckoRuntime.java:363)
+06-02 16:38:05.194 D/StrictMode( 8940): at org.mozilla.geckoview.GeckoRuntime.create(GeckoRuntime.java:574)
+06-02 16:38:05.194 D/StrictMode( 8940): at GeckoProvider.createRuntime(GeckoProvider.kt:58)
+06-02 16:38:05.194 D/StrictMode( 8940): at GeckoProvider.getOrCreateRuntime(GeckoProvider.kt:28)
+06-02 16:38:05.194 D/StrictMode( 8940): at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:79)
+06-02 16:38:05.194 D/StrictMode( 8940): at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:57)
+06-02 16:38:05.194 D/StrictMode( 8940): at kotlin.SynchronizedLazyImpl.getValue(LazyJVM.kt:74)
+06-02 16:38:05.194 D/StrictMode( 8940): at org.mozilla.fenix.components.Core.getEngine(Unknown Source:7)
+06-02 16:38:05.194 D/StrictMode( 8940): at org.mozilla.fenix.FenixApplication.setupInMainProcessOnly(FenixApplication.kt:128)
+06-02 16:38:05.194 D/StrictMode( 8940): at org.mozilla.fenix.FenixApplication.onCreate(FenixApplication.kt:90)
+06-02 16:38:05.194 D/StrictMode( 8940): at android.app.Instrumentation.callApplicationOnCreate(Instrumentation.java:1154)
+06-02 16:38:05.194 D/StrictMode( 8940): at android.app.ActivityThread.handleBindApplication(ActivityThread.java:5871)
+06-02 16:38:05.194 D/StrictMode( 8940): at android.app.ActivityThread.access$1100(ActivityThread.java:199)
+06-02 16:38:05.194 D/StrictMode( 8940): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1650)
+06-02 16:38:05.194 D/StrictMode( 8940): at android.os.Handler.dispatchMessage(Handler.java:106)
+06-02 16:38:05.194 D/StrictMode( 8940): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:05.194 D/StrictMode( 8940): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:05.194 D/StrictMode( 8940): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:05.194 D/StrictMode( 8940): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:05.194 D/StrictMode( 8940): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:05.200 D/StrictMode( 8940): StrictMode policy violation; ~duration=336 ms: android.os.strictmode.DiskReadViolation
+06-02 16:38:05.200 D/StrictMode( 8940): at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+06-02 16:38:05.200 D/StrictMode( 8940): at libcore.io.BlockGuardOs.read(BlockGuardOs.java:253)
+06-02 16:38:05.200 D/StrictMode( 8940): at libcore.io.IoBridge.read(IoBridge.java:501)
+06-02 16:38:05.200 D/StrictMode( 8940): at java.io.FileInputStream.read(FileInputStream.java:307)
+06-02 16:38:05.200 D/StrictMode( 8940): at java.io.FilterInputStream.read(FilterInputStream.java:133)
+06-02 16:38:05.200 D/StrictMode( 8940): at java.io.PushbackInputStream.read(PushbackInputStream.java:186)
+06-02 16:38:05.200 D/StrictMode( 8940): at sun.nio.cs.StreamDecoder.readBytes(StreamDecoder.java:288)
+06-02 16:38:05.200 D/StrictMode( 8940): at sun.nio.cs.StreamDecoder.implRead(StreamDecoder.java:351)
+06-02 16:38:05.200 D/StrictMode( 8940): at sun.nio.cs.StreamDecoder.read(StreamDecoder.java:180)
+06-02 16:38:05.200 D/StrictMode( 8940): at java.io.InputStreamReader.read(InputStreamReader.java:184)
+06-02 16:38:05.200 D/StrictMode( 8940): at org.yaml.snakeyaml.reader.UnicodeReader.read(UnicodeReader.java:125)
+06-02 16:38:05.200 D/StrictMode( 8940): at org.yaml.snakeyaml.reader.StreamReader.update(StreamReader.java:183)
+06-02 16:38:05.200 D/StrictMode( 8940): at org.yaml.snakeyaml.reader.StreamReader.ensureEnoughData(StreamReader.java:176)
+06-02 16:38:05.200 D/StrictMode( 8940): at org.yaml.snakeyaml.reader.StreamReader.ensureEnoughData(StreamReader.java:171)
+06-02 16:38:05.200 D/StrictMode( 8940): at org.yaml.snakeyaml.reader.StreamReader.peek(StreamReader.java:126)
+06-02 16:38:05.200 D/StrictMode( 8940): at org.yaml.snakeyaml.scanner.ScannerImpl.scanToNextToken(ScannerImpl.java:1177)
+06-02 16:38:05.200 D/StrictMode( 8940): at org.yaml.snakeyaml.scanner.ScannerImpl.fetchMoreTokens(ScannerImpl.java:287)
+06-02 16:38:05.200 D/StrictMode( 8940): at org.yaml.snakeyaml.scanner.ScannerImpl.checkToken(ScannerImpl.java:227)
+06-02 16:38:05.200 D/StrictMode( 8940): at org.yaml.snakeyaml.parser.ParserImpl$ParseImplicitDocumentStart.produce(ParserImpl.java:195)
+06-02 16:38:05.200 D/StrictMode( 8940): at org.yaml.snakeyaml.parser.ParserImpl.peekEvent(ParserImpl.java:158)
+06-02 16:38:05.200 D/StrictMode( 8940): at org.yaml.snakeyaml.parser.ParserImpl.checkEvent(ParserImpl.java:148)
+06-02 16:38:05.200 D/StrictMode( 8940): at org.yaml.snakeyaml.composer.Composer.getSingleNode(Composer.java:107)
+06-02 16:38:05.200 D/StrictMode( 8940): at org.yaml.snakeyaml.constructor.BaseConstructor.getSingleData(BaseConstructor.java:141)
+06-02 16:38:05.200 D/StrictMode( 8940): at org.yaml.snakeyaml.Yaml.loadFromReader(Yaml.java:525)
+06-02 16:38:05.200 D/StrictMode( 8940): at org.yaml.snakeyaml.Yaml.load(Yaml.java:453)
+06-02 16:38:05.200 D/StrictMode( 8940): at org.mozilla.gecko.util.DebugConfig.fromFile(DebugConfig.java:51)
+06-02 16:38:05.200 D/StrictMode( 8940): at org.mozilla.geckoview.GeckoRuntime.init(GeckoRuntime.java:363)
+06-02 16:38:05.200 D/StrictMode( 8940): at org.mozilla.geckoview.GeckoRuntime.create(GeckoRuntime.java:574)
+06-02 16:38:05.200 D/StrictMode( 8940): at GeckoProvider.createRuntime(GeckoProvider.kt:58)
+06-02 16:38:05.200 D/StrictMode( 8940): at GeckoProvider.getOrCreateRuntime(GeckoProvider.kt:28)
+06-02 16:38:05.200 D/StrictMode( 8940): at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:79)
+06-02 16:38:05.200 D/StrictMode( 8940): at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:57)
+06-02 16:38:05.200 D/StrictMode( 8940): at kotlin.SynchronizedLazyImpl.getValue(LazyJVM.kt:74)
+06-02 16:38:05.200 D/StrictMode( 8940): at org.mozilla.fenix.components.Core.getEngine(Unknown Source:7)
+06-02 16:38:05.200 D/StrictMode( 8940): at org.mozilla.fenix.FenixApplication.setupInMainProcessOnly(FenixApplication.kt:128)
+06-02 16:38:05.200 D/StrictMode( 8940): at org.mozilla.fenix.FenixApplication.onCreate(FenixApplication.kt:90)
+06-02 16:38:05.200 D/StrictMode( 8940): at android.app.Instrumentation.callApplicationOnCreate(Instrumentation.java:1154)
+06-02 16:38:05.200 D/StrictMode( 8940): at android.app.ActivityThread.handleBindApplication(ActivityThread.java:5871)
+06-02 16:38:05.200 D/StrictMode( 8940): at android.app.ActivityThread.access$1100(ActivityThread.java:199)
+06-02 16:38:05.200 D/StrictMode( 8940): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1650)
+06-02 16:38:05.200 D/StrictMode( 8940): at android.os.Handler.dispatchMessage(Handler.java:106)
+06-02 16:38:05.200 D/StrictMode( 8940): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:05.200 D/StrictMode( 8940): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:05.200 D/StrictMode( 8940): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:05.200 D/StrictMode( 8940): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:05.200 D/StrictMode( 8940): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:05.202 I/enix.debug:tab( 8988): Not late-enabling -Xcheck:jni (already on)
+06-02 16:38:05.203 D/StrictMode( 8940): StrictMode policy violation; ~duration=320 ms: android.os.strictmode.DiskReadViolation
+06-02 16:38:05.203 D/StrictMode( 8940): at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+06-02 16:38:05.203 D/StrictMode( 8940): at libcore.io.BlockGuardOs.read(BlockGuardOs.java:253)
+06-02 16:38:05.203 D/StrictMode( 8940): at libcore.io.IoBridge.read(IoBridge.java:501)
+06-02 16:38:05.203 D/StrictMode( 8940): at java.io.FileInputStream.read(FileInputStream.java:307)
+06-02 16:38:05.203 D/StrictMode( 8940): at java.io.FilterInputStream.read(FilterInputStream.java:133)
+06-02 16:38:05.203 D/StrictMode( 8940): at java.io.PushbackInputStream.read(PushbackInputStream.java:186)
+06-02 16:38:05.203 D/StrictMode( 8940): at sun.nio.cs.StreamDecoder.readBytes(StreamDecoder.java:288)
+06-02 16:38:05.203 D/StrictMode( 8940): at sun.nio.cs.StreamDecoder.implRead(StreamDecoder.java:351)
+06-02 16:38:05.203 D/StrictMode( 8940): at sun.nio.cs.StreamDecoder.read(StreamDecoder.java:180)
+06-02 16:38:05.203 D/StrictMode( 8940): at java.io.InputStreamReader.read(InputStreamReader.java:184)
+06-02 16:38:05.203 D/StrictMode( 8940): at org.yaml.snakeyaml.reader.UnicodeReader.read(UnicodeReader.java:125)
+06-02 16:38:05.203 D/StrictMode( 8940): at org.yaml.snakeyaml.reader.StreamReader.update(StreamReader.java:183)
+06-02 16:38:05.203 D/StrictMode( 8940): at org.yaml.snakeyaml.reader.StreamReader.ensureEnoughData(StreamReader.java:176)
+06-02 16:38:05.203 D/StrictMode( 8940): at org.yaml.snakeyaml.reader.StreamReader.peek(StreamReader.java:136)
+06-02 16:38:05.203 D/StrictMode( 8940): at org.yaml.snakeyaml.scanner.ScannerImpl.scanPlain(ScannerImpl.java:1999)
+06-02 16:38:05.203 D/StrictMode( 8940): at org.yaml.snakeyaml.scanner.ScannerImpl.fetchPlain(ScannerImpl.java:1044)
+06-02 16:38:05.203 D/StrictMode( 8940): at org.yaml.snakeyaml.scanner.ScannerImpl.fetchMoreTokens(ScannerImpl.java:399)
+06-02 16:38:05.203 D/StrictMode( 8940): at org.yaml.snakeyaml.scanner.ScannerImpl.checkToken(ScannerImpl.java:227)
+06-02 16:38:05.203 D/StrictMode( 8940): at org.yaml.snakeyaml.parser.ParserImpl$ParseBlockSequenceEntry.produce(ParserImpl.java:504)
+06-02 16:38:05.203 D/StrictMode( 8940): at org.yaml.snakeyaml.parser.ParserImpl.peekEvent(ParserImpl.java:158)
+06-02 16:38:05.203 D/StrictMode( 8940): at org.yaml.snakeyaml.parser.ParserImpl.checkEvent(ParserImpl.java:148)
+06-02 16:38:05.203 D/StrictMode( 8940): at org.yaml.snakeyaml.composer.Composer.composeSequenceNode(Composer.java:188)
+06-02 16:38:05.203 D/StrictMode( 8940): at org.yaml.snakeyaml.composer.Composer.composeNode(Composer.java:142)
+06-02 16:38:05.203 D/StrictMode( 8940): at org.yaml.snakeyaml.composer.Composer.composeValueNode(Composer.java:236)
+06-02 16:38:05.203 D/StrictMode( 8940): at org.yaml.snakeyaml.composer.Composer.composeMappingChildren(Composer.java:227)
+06-02 16:38:05.203 D/StrictMode( 8940): at org.yaml.snakeyaml.composer.Composer.composeMappingNode(Composer.java:215)
+06-02 16:38:05.203 D/StrictMode( 8940): at org.yaml.snakeyaml.composer.Composer.composeNode(Composer.java:144)
+06-02 16:38:05.203 D/StrictMode( 8940): at org.yaml.snakeyaml.composer.Composer.getNode(Composer.java:85)
+06-02 16:38:05.203 D/StrictMode( 8940): at org.yaml.snakeyaml.composer.Composer.getSingleNode(Composer.java:108)
+06-02 16:38:05.203 D/StrictMode( 8940): at org.yaml.snakeyaml.constructor.BaseConstructor.getSingleData(BaseConstructor.java:141)
+06-02 16:38:05.203 D/StrictMode( 8940): at org.yaml.snakeyaml.Yaml.loadFromReader(Yaml.java:525)
+06-02 16:38:05.203 D/StrictMode( 8940): at org.yaml.snakeyaml.Yaml.load(Yaml.java:453)
+06-02 16:38:05.203 D/StrictMode( 8940): at org.mozilla.gecko.util.DebugConfig.fromFile(DebugConfig.java:51)
+06-02 16:38:05.203 D/StrictMode( 8940): at org.mozilla.geckoview.GeckoRuntime.init(GeckoRuntime.java:363)
+06-02 16:38:05.203 D/StrictMode( 8940): at org.mozilla.geckoview.GeckoRuntime.create(GeckoRuntime.java:574)
+06-02 16:38:05.203 D/StrictMode( 8940): at GeckoProvider.createRuntime(GeckoProvider.kt:58)
+06-02 16:38:05.203 D/StrictMode( 8940): at GeckoProvider.getOrCreateRuntime(GeckoProvider.kt:28)
+06-02 16:38:05.203 D/StrictMode( 8940): at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:79)
+06-02 16:38:05.203 D/StrictMode( 8940): at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:57)
+06-02 16:38:05.203 D/StrictMode( 8940): at kotlin.SynchronizedLazyImpl.getValue(LazyJVM.kt:74)
+06-02 16:38:05.203 D/StrictMode( 8940): at org.mozilla.fenix.components.Core.getEngine(Unknown Source:7)
+06-02 16:38:05.203 D/StrictMode( 8940): at org.mozilla.fenix.FenixApplication.setupInMainProcessOnly(FenixApplication.kt:128)
+06-02 16:38:05.203 D/StrictMode( 8940): at org.mozilla.fenix.FenixApplication.onCreate(FenixApplication.kt:90)
+06-02 16:38:05.203 D/StrictMode( 8940): at android.app.Instrumentation.callApplicationOnCreate(Instrumentation.java:1154)
+06-02 16:38:05.203 D/StrictMode( 8940): at android.app.ActivityThread.handleBindApplication(ActivityThread.java:5871)
+06-02 16:38:05.203 D/StrictMode( 8940): at android.app.ActivityThread.access$1100(ActivityThread.java:199)
+06-02 16:38:05.203 D/StrictMode( 8940): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1650)
+06-02 16:38:05.203 D/StrictMode( 8940): at android.os.Handler.dispatchMessage(Handler.java:106)
+06-02 16:38:05.203 D/StrictMode( 8940): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:05.203 D/StrictMode( 8940): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:05.203 D/StrictMode( 8940): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:05.203 D/StrictMode( 8940): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:05.203 D/StrictMode( 8940): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:05.205 D/StrictMode( 8940): StrictMode policy violation; ~duration=266 ms: android.os.strictmode.DiskReadViolation
+06-02 16:38:05.205 D/StrictMode( 8940): at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+06-02 16:38:05.205 D/StrictMode( 8940): at java.io.UnixFileSystem.checkAccess(UnixFileSystem.java:251)
+06-02 16:38:05.205 D/StrictMode( 8940): at java.io.File.exists(File.java:815)
+06-02 16:38:05.205 D/StrictMode( 8940): at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:605)
+06-02 16:38:05.205 D/StrictMode( 8940): at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:596)
+06-02 16:38:05.205 D/StrictMode( 8940): at android.app.ContextImpl.getPreferencesDir(ContextImpl.java:552)
+06-02 16:38:05.205 D/StrictMode( 8940): at android.app.ContextImpl.getSharedPreferencesPath(ContextImpl.java:747)
+06-02 16:38:05.205 D/StrictMode( 8940): at android.app.ContextImpl.getSharedPreferences(ContextImpl.java:400)
+06-02 16:38:05.205 D/StrictMode( 8940): at android.content.ContextWrapper.getSharedPreferences(ContextWrapper.java:174)
+06-02 16:38:05.205 D/StrictMode( 8940): at mozilla.components.browser.engine.gecko.GeckoEngine.<init>(GeckoEngine.kt:68)
+06-02 16:38:05.205 D/StrictMode( 8940): at mozilla.components.browser.engine.gecko.GeckoEngine.<init>(GeckoEngine.kt:63)
+06-02 16:38:05.205 D/StrictMode( 8940): at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:76)
+06-02 16:38:05.205 D/StrictMode( 8940): at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:57)
+06-02 16:38:05.205 D/StrictMode( 8940): at kotlin.SynchronizedLazyImpl.getValue(LazyJVM.kt:74)
+06-02 16:38:05.205 D/StrictMode( 8940): at org.mozilla.fenix.components.Core.getEngine(Unknown Source:7)
+06-02 16:38:05.205 D/StrictMode( 8940): at org.mozilla.fenix.FenixApplication.setupInMainProcessOnly(FenixApplication.kt:128)
+06-02 16:38:05.205 D/StrictMode( 8940): at org.mozilla.fenix.FenixApplication.onCreate(FenixApplication.kt:90)
+06-02 16:38:05.205 D/StrictMode( 8940): at android.app.Instrumentation.callApplicationOnCreate(Instrumentation.java:1154)
+06-02 16:38:05.205 D/StrictMode( 8940): at android.app.ActivityThread.handleBindApplication(ActivityThread.java:5871)
+06-02 16:38:05.205 D/StrictMode( 8940): at android.app.ActivityThread.access$1100(ActivityThread.java:199)
+06-02 16:38:05.205 D/StrictMode( 8940): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1650)
+06-02 16:38:05.205 D/StrictMode( 8940): at android.os.Handler.dispatchMessage(Handler.java:106)
+06-02 16:38:05.205 D/StrictMode( 8940): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:05.205 D/StrictMode( 8940): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:05.205 D/StrictMode( 8940): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:05.205 D/StrictMode( 8940): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:05.205 D/StrictMode( 8940): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:05.205 I/ActivityManager( 1869): Start proc 8988:org.mozilla.fenix.debug:tab0/u0a99 for service org.mozilla.fenix.debug/org.mozilla.gecko.process.GeckoChildProcessServices$tab0
+06-02 16:38:05.206 D/StrictMode( 8940): StrictMode policy violation; ~duration=112 ms: android.os.strictmode.DiskReadViolation
+06-02 16:38:05.206 D/StrictMode( 8940): at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+06-02 16:38:05.206 D/StrictMode( 8940): at java.io.UnixFileSystem.checkAccess(UnixFileSystem.java:251)
+06-02 16:38:05.206 D/StrictMode( 8940): at java.io.File.exists(File.java:815)
+06-02 16:38:05.206 D/StrictMode( 8940): at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:605)
+06-02 16:38:05.206 D/StrictMode( 8940): at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:596)
+06-02 16:38:05.206 D/StrictMode( 8940): at android.app.ContextImpl.getPreferencesDir(ContextImpl.java:552)
+06-02 16:38:05.206 D/StrictMode( 8940): at android.app.ContextImpl.getSharedPreferencesPath(ContextImpl.java:747)
+06-02 16:38:05.206 D/StrictMode( 8940): at android.app.ContextImpl.getSharedPreferences(ContextImpl.java:400)
+06-02 16:38:05.206 D/StrictMode( 8940): at android.content.ContextWrapper.getSharedPreferences(ContextWrapper.java:174)
+06-02 16:38:05.206 D/StrictMode( 8940): at androidx.preference.PreferenceManager.getDefaultSharedPreferences(PreferenceManager.java:119)
+06-02 16:38:05.206 D/StrictMode( 8940): at org.mozilla.fenix.DebugFenixApplication.setupLeakCanary(DebugFenixApplication.kt:15)
+06-02 16:38:05.206 D/StrictMode( 8940): at org.mozilla.fenix.FenixApplication.setupInMainProcessOnly(FenixApplication.kt:140)
+06-02 16:38:05.206 D/StrictMode( 8940): at org.mozilla.fenix.FenixApplication.onCreate(FenixApplication.kt:90)
+06-02 16:38:05.206 D/StrictMode( 8940): at android.app.Instrumentation.callApplicationOnCreate(Instrumentation.java:1154)
+06-02 16:38:05.206 D/StrictMode( 8940): at android.app.ActivityThread.handleBindApplication(ActivityThread.java:5871)
+06-02 16:38:05.206 D/StrictMode( 8940): at android.app.ActivityThread.access$1100(ActivityThread.java:199)
+06-02 16:38:05.206 D/StrictMode( 8940): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1650)
+06-02 16:38:05.206 D/StrictMode( 8940): at android.os.Handler.dispatchMessage(Handler.java:106)
+06-02 16:38:05.206 D/StrictMode( 8940): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:05.206 D/StrictMode( 8940): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:05.206 D/StrictMode( 8940): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:05.206 D/StrictMode( 8940): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:05.206 D/StrictMode( 8940): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:05.207 D/StrictMode( 8940): StrictMode policy violation; ~duration=110 ms: android.os.strictmode.DiskReadViolation
+06-02 16:38:05.207 D/StrictMode( 8940): at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+06-02 16:38:05.207 D/StrictMode( 8940): at android.app.SharedPreferencesImpl.awaitLoadedLocked(SharedPreferencesImpl.java:256)
+06-02 16:38:05.207 D/StrictMode( 8940): at android.app.SharedPreferencesImpl.getBoolean(SharedPreferencesImpl.java:325)
+06-02 16:38:05.207 D/StrictMode( 8940): at org.mozilla.fenix.DebugFenixApplication.setupLeakCanary(DebugFenixApplication.kt:16)
+06-02 16:38:05.207 D/StrictMode( 8940): at org.mozilla.fenix.FenixApplication.setupInMainProcessOnly(FenixApplication.kt:140)
+06-02 16:38:05.207 D/StrictMode( 8940): at org.mozilla.fenix.FenixApplication.onCreate(FenixApplication.kt:90)
+06-02 16:38:05.207 D/StrictMode( 8940): at android.app.Instrumentation.callApplicationOnCreate(Instrumentation.java:1154)
+06-02 16:38:05.207 D/StrictMode( 8940): at android.app.ActivityThread.handleBindApplication(ActivityThread.java:5871)
+06-02 16:38:05.207 D/StrictMode( 8940): at android.app.ActivityThread.access$1100(ActivityThread.java:199)
+06-02 16:38:05.207 D/StrictMode( 8940): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1650)
+06-02 16:38:05.207 D/StrictMode( 8940): at android.os.Handler.dispatchMessage(Handler.java:106)
+06-02 16:38:05.207 D/StrictMode( 8940): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:05.207 D/StrictMode( 8940): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:05.207 D/StrictMode( 8940): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:05.207 D/StrictMode( 8940): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:05.207 D/StrictMode( 8940): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:05.262 W/enix.debug:tab( 8988): Unexpected CPU variant for X86 using defaults: x86
+06-02 16:38:05.304 W/lla.fenix.debu( 8940): Accessing hidden method Landroid/content/res/Resources$Theme;->rebase()V (dark greylist, reflection)
+06-02 16:38:05.309 D/StrictMode( 8940): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/content/res/Resources$Theme;->rebase()V
+06-02 16:38:05.309 D/StrictMode( 8940): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:05.309 D/StrictMode( 8940): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:05.309 D/StrictMode( 8940): at java.lang.Class.getDeclaredMethodInternal(Native Method)
+06-02 16:38:05.309 D/StrictMode( 8940): at java.lang.Class.getMethod(Class.java:2064)
+06-02 16:38:05.309 D/StrictMode( 8940): at java.lang.Class.getDeclaredMethod(Class.java:2047)
+06-02 16:38:05.309 D/StrictMode( 8940): at androidx.core.content.res.ResourcesCompat$ThemeCompat$ImplApi23.rebase(ResourcesCompat.java:501)
+06-02 16:38:05.309 D/StrictMode( 8940): at androidx.core.content.res.ResourcesCompat$ThemeCompat.rebase(ResourcesCompat.java:477)
+06-02 16:38:05.309 D/StrictMode( 8940): at androidx.appcompat.app.AppCompatDelegateImpl.attachBaseContext2(AppCompatDelegateImpl.java:465)
+06-02 16:38:05.309 D/StrictMode( 8940): at androidx.appcompat.app.AppCompatActivity.attachBaseContext(AppCompatActivity.java:107)
+06-02 16:38:05.309 D/StrictMode( 8940): at mozilla.components.support.locale.LocaleAwareAppCompatActivity.attachBaseContext(LocaleAwareAppCompatActivity.kt:19)
+06-02 16:38:05.309 D/StrictMode( 8940): at android.app.Activity.attach(Activity.java:7051)
+06-02 16:38:05.309 D/StrictMode( 8940): at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2873)
+06-02 16:38:05.309 D/StrictMode( 8940): at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:3048)
+06-02 16:38:05.309 D/StrictMode( 8940): at android.app.servertransaction.LaunchActivityItem.execute(LaunchActivityItem.java:78)
+06-02 16:38:05.309 D/StrictMode( 8940): at android.app.servertransaction.TransactionExecutor.executeCallbacks(TransactionExecutor.java:108)
+06-02 16:38:05.309 D/StrictMode( 8940): at android.app.servertransaction.TransactionExecutor.execute(TransactionExecutor.java:68)
+06-02 16:38:05.309 D/StrictMode( 8940): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1808)
+06-02 16:38:05.309 D/StrictMode( 8940): at android.os.Handler.dispatchMessage(Handler.java:106)
+06-02 16:38:05.309 D/StrictMode( 8940): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:05.309 D/StrictMode( 8940): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:05.309 D/StrictMode( 8940): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:05.309 D/StrictMode( 8940): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:05.309 D/StrictMode( 8940): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:05.314 I/ResourcesCompat( 8940): Failed to retrieve rebase() method
+06-02 16:38:05.314 I/ResourcesCompat( 8940): java.lang.NoSuchMethodException: rebase []
+06-02 16:38:05.314 I/ResourcesCompat( 8940): at java.lang.Class.getMethod(Class.java:2068)
+06-02 16:38:05.314 I/ResourcesCompat( 8940): at java.lang.Class.getDeclaredMethod(Class.java:2047)
+06-02 16:38:05.314 I/ResourcesCompat( 8940): at androidx.core.content.res.ResourcesCompat$ThemeCompat$ImplApi23.rebase(ResourcesCompat.java:501)
+06-02 16:38:05.314 I/ResourcesCompat( 8940): at androidx.core.content.res.ResourcesCompat$ThemeCompat.rebase(ResourcesCompat.java:477)
+06-02 16:38:05.314 I/ResourcesCompat( 8940): at androidx.appcompat.app.AppCompatDelegateImpl.attachBaseContext2(AppCompatDelegateImpl.java:465)
+06-02 16:38:05.314 I/ResourcesCompat( 8940): at androidx.appcompat.app.AppCompatActivity.attachBaseContext(AppCompatActivity.java:107)
+06-02 16:38:05.314 I/ResourcesCompat( 8940): at mozilla.components.support.locale.LocaleAwareAppCompatActivity.attachBaseContext(LocaleAwareAppCompatActivity.kt:19)
+06-02 16:38:05.314 I/ResourcesCompat( 8940): at android.app.Activity.attach(Activity.java:7051)
+06-02 16:38:05.314 I/ResourcesCompat( 8940): at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2873)
+06-02 16:38:05.314 I/ResourcesCompat( 8940): at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:3048)
+06-02 16:38:05.314 I/ResourcesCompat( 8940): at android.app.servertransaction.LaunchActivityItem.execute(LaunchActivityItem.java:78)
+06-02 16:38:05.314 I/ResourcesCompat( 8940): at android.app.servertransaction.TransactionExecutor.executeCallbacks(TransactionExecutor.java:108)
+06-02 16:38:05.314 I/ResourcesCompat( 8940): at android.app.servertransaction.TransactionExecutor.execute(TransactionExecutor.java:68)
+06-02 16:38:05.314 I/ResourcesCompat( 8940): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1808)
+06-02 16:38:05.314 I/ResourcesCompat( 8940): at android.os.Handler.dispatchMessage(Handler.java:106)
+06-02 16:38:05.314 I/ResourcesCompat( 8940): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:05.314 I/ResourcesCompat( 8940): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:05.314 I/ResourcesCompat( 8940): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:05.314 I/ResourcesCompat( 8940): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:05.314 I/ResourcesCompat( 8940): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:05.335 I/enix.debug:tab( 8988): The ClassLoaderContext is a special shared library.
+06-02 16:38:05.356 W/lla.fenix.debu( 8940): Accessing hidden method Landroid/graphics/drawable/Drawable;->getOpticalInsets()Landroid/graphics/Insets; (light greylist, linking)
+06-02 16:38:05.356 W/lla.fenix.debu( 8940): Accessing hidden field Landroid/graphics/Insets;->left:I (light greylist, linking)
+06-02 16:38:05.357 W/lla.fenix.debu( 8940): Accessing hidden field Landroid/graphics/Insets;->right:I (light greylist, linking)
+06-02 16:38:05.357 W/lla.fenix.debu( 8940): Accessing hidden field Landroid/graphics/Insets;->top:I (light greylist, linking)
+06-02 16:38:05.357 W/lla.fenix.debu( 8940): Accessing hidden field Landroid/graphics/Insets;->bottom:I (light greylist, linking)
+06-02 16:38:05.361 D/GeckoViewStartup( 8940): observe: app-startup
+06-02 16:38:05.391 D/GeckoViewConsole( 8940): enabled = true
+06-02 16:38:05.453 W/lla.fenix.debu( 8940): Accessing hidden method Landroid/view/View;->getAccessibilityDelegate()Landroid/view/View$AccessibilityDelegate; (light greylist, linking)
+06-02 16:38:05.461 W/lla.fenix.debu( 8940): Accessing hidden method Landroid/view/View;->computeFitSystemWindows(Landroid/graphics/Rect;Landroid/graphics/Rect;)Z (light greylist, reflection)
+06-02 16:38:05.466 D/StrictMode( 8940): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/view/View;->computeFitSystemWindows(Landroid/graphics/Rect;Landroid/graphics/Rect;)Z
+06-02 16:38:05.466 D/StrictMode( 8940): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:05.466 D/StrictMode( 8940): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:05.466 D/StrictMode( 8940): at java.lang.Class.getDeclaredMethodInternal(Native Method)
+06-02 16:38:05.466 D/StrictMode( 8940): at java.lang.Class.getMethod(Class.java:2064)
+06-02 16:38:05.466 D/StrictMode( 8940): at java.lang.Class.getDeclaredMethod(Class.java:2047)
+06-02 16:38:05.466 D/StrictMode( 8940): at androidx.appcompat.widget.ViewUtils.<clinit>(ViewUtils.java:44)
+06-02 16:38:05.466 D/StrictMode( 8940): at androidx.appcompat.widget.ViewUtils.makeOptionalFitsSystemWindows(ViewUtils.java:80)
+06-02 16:38:05.466 D/StrictMode( 8940): at androidx.appcompat.app.AppCompatDelegateImpl.createSubDecor(AppCompatDelegateImpl.java:970)
+06-02 16:38:05.466 D/StrictMode( 8940): at androidx.appcompat.app.AppCompatDelegateImpl.ensureSubDecor(AppCompatDelegateImpl.java:803)
+06-02 16:38:05.466 D/StrictMode( 8940): at androidx.appcompat.app.AppCompatDelegateImpl.setContentView(AppCompatDelegateImpl.java:692)
+06-02 16:38:05.466 D/StrictMode( 8940): at androidx.appcompat.app.AppCompatActivity.setContentView(AppCompatActivity.java:170)
+06-02 16:38:05.466 D/StrictMode( 8940): at org.mozilla.fenix.HomeActivity.onCreate(HomeActivity.kt:130)
+06-02 16:38:05.466 D/StrictMode( 8940): at android.app.Activity.performCreate(Activity.java:7136)
+06-02 16:38:05.466 D/StrictMode( 8940): at android.app.Activity.performCreate(Activity.java:7127)
+06-02 16:38:05.466 D/StrictMode( 8940): at android.app.Instrumentation.callActivityOnCreate(Instrumentation.java:1271)
+06-02 16:38:05.466 D/StrictMode( 8940): at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2893)
+06-02 16:38:05.466 D/StrictMode( 8940): at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:3048)
+06-02 16:38:05.466 D/StrictMode( 8940): at android.app.servertransaction.LaunchActivityItem.execute(LaunchActivityItem.java:78)
+06-02 16:38:05.466 D/StrictMode( 8940): at android.app.servertransaction.TransactionExecutor.executeCallbacks(TransactionExecutor.java:108)
+06-02 16:38:05.466 D/StrictMode( 8940): at android.app.servertransaction.TransactionExecutor.execute(TransactionExecutor.java:68)
+06-02 16:38:05.466 D/StrictMode( 8940): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1808)
+06-02 16:38:05.466 D/StrictMode( 8940): at android.os.Handler.dispatchMessage(Handler.java:106)
+06-02 16:38:05.466 D/StrictMode( 8940): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:05.466 D/StrictMode( 8940): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:05.466 D/StrictMode( 8940): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:05.466 D/StrictMode( 8940): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:05.466 D/StrictMode( 8940): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:05.468 W/lla.fenix.debu( 8940): Accessing hidden method Landroid/view/ViewGroup;->makeOptionalFitsSystemWindows()V (light greylist, reflection)
+06-02 16:38:05.470 D/StrictMode( 8940): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/view/ViewGroup;->makeOptionalFitsSystemWindows()V
+06-02 16:38:05.470 D/StrictMode( 8940): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:05.470 D/StrictMode( 8940): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:05.470 D/StrictMode( 8940): at java.lang.Class.getDeclaredMethodInternal(Native Method)
+06-02 16:38:05.470 D/StrictMode( 8940): at java.lang.Class.getPublicMethodRecursive(Class.java:2075)
+06-02 16:38:05.470 D/StrictMode( 8940): at java.lang.Class.getMethod(Class.java:2063)
+06-02 16:38:05.470 D/StrictMode( 8940): at java.lang.Class.getMethod(Class.java:1690)
+06-02 16:38:05.470 D/StrictMode( 8940): at androidx.appcompat.widget.ViewUtils.makeOptionalFitsSystemWindows(ViewUtils.java:84)
+06-02 16:38:05.470 D/StrictMode( 8940): at androidx.appcompat.app.AppCompatDelegateImpl.createSubDecor(AppCompatDelegateImpl.java:970)
+06-02 16:38:05.470 D/StrictMode( 8940): at androidx.appcompat.app.AppCompatDelegateImpl.ensureSubDecor(AppCompatDelegateImpl.java:803)
+06-02 16:38:05.470 D/StrictMode( 8940): at androidx.appcompat.app.AppCompatDelegateImpl.setContentView(AppCompatDelegateImpl.java:692)
+06-02 16:38:05.470 D/StrictMode( 8940): at androidx.appcompat.app.AppCompatActivity.setContentView(AppCompatActivity.java:170)
+06-02 16:38:05.470 D/StrictMode( 8940): at org.mozilla.fenix.HomeActivity.onCreate(HomeActivity.kt:130)
+06-02 16:38:05.470 D/StrictMode( 8940): at android.app.Activity.performCreate(Activity.java:7136)
+06-02 16:38:05.470 D/StrictMode( 8940): at android.app.Activity.performCreate(Activity.java:7127)
+06-02 16:38:05.470 D/StrictMode( 8940): at android.app.Instrumentation.callActivityOnCreate(Instrumentation.java:1271)
+06-02 16:38:05.470 D/StrictMode( 8940): at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2893)
+06-02 16:38:05.470 D/StrictMode( 8940): at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:3048)
+06-02 16:38:05.470 D/StrictMode( 8940): at android.app.servertransaction.LaunchActivityItem.execute(LaunchActivityItem.java:78)
+06-02 16:38:05.470 D/StrictMode( 8940): at android.app.servertransaction.TransactionExecutor.executeCallbacks(TransactionExecutor.java:108)
+06-02 16:38:05.470 D/StrictMode( 8940): at android.app.servertransaction.TransactionExecutor.execute(TransactionExecutor.java:68)
+06-02 16:38:05.470 D/StrictMode( 8940): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1808)
+06-02 16:38:05.470 D/StrictMode( 8940): at android.os.Handler.dispatchMessage(Handler.java:106)
+06-02 16:38:05.470 D/StrictMode( 8940): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:05.470 D/StrictMode( 8940): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:05.470 D/StrictMode( 8940): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:05.470 D/StrictMode( 8940): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:05.470 D/StrictMode( 8940): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:05.546 I/GeckoConsole( 8940): No chrome package registered for chrome://browser/content/built_in_addons.json
+06-02 16:38:05.598 I/AJC ( 8940): isPerformanceTest
+06-02 16:38:05.601 I/AJC ( 8940): isPerformanceTest : isPhonePlugged: true
+06-02 16:38:05.601 I/AJC ( 8940): isPerformanceTest : isAdbEnabled: true
+06-02 16:38:05.606 D/App ( 8940): DebugMetricController: track event: org.mozilla.fenix.components.metrics.Event$DismissedOnboarding@6354493
+06-02 16:38:05.893 W/lla.fenix.debu( 8940): Accessing hidden method Landroid/graphics/FontFamily;-><init>()V (light greylist, reflection)
+06-02 16:38:05.900 D/StrictMode( 8940): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/graphics/FontFamily;-><init>()V
+06-02 16:38:05.900 D/StrictMode( 8940): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:05.900 D/StrictMode( 8940): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:05.900 D/StrictMode( 8940): at java.lang.Class.getDeclaredConstructorInternal(Native Method)
+06-02 16:38:05.900 D/StrictMode( 8940): at java.lang.Class.getConstructor0(Class.java:2325)
+06-02 16:38:05.900 D/StrictMode( 8940): at java.lang.Class.getConstructor(Class.java:1725)
+06-02 16:38:05.900 D/StrictMode( 8940): at androidx.core.graphics.TypefaceCompatApi26Impl.obtainFontFamilyCtor(TypefaceCompatApi26Impl.java:321)
+06-02 16:38:05.900 D/StrictMode( 8940): at androidx.core.graphics.TypefaceCompatApi26Impl.<init>(TypefaceCompatApi26Impl.java:84)
+06-02 16:38:05.900 D/StrictMode( 8940): at androidx.core.graphics.TypefaceCompatApi28Impl.<init>(TypefaceCompatApi28Impl.java:36)
+06-02 16:38:05.900 D/StrictMode( 8940): at androidx.core.graphics.TypefaceCompat.<clinit>(TypefaceCompat.java:51)
+06-02 16:38:05.900 D/StrictMode( 8940): at androidx.core.graphics.TypefaceCompat.create(TypefaceCompat.java:194)
+06-02 16:38:05.900 D/StrictMode( 8940): at androidx.appcompat.widget.AppCompatTextView.setTypeface(AppCompatTextView.java:708)
+06-02 16:38:05.900 D/StrictMode( 8940): at android.widget.TextView.resolveStyleAndSetTypeface(TextView.java:2037)
+06-02 16:38:05.900 D/StrictMode( 8940): at android.widget.TextView.setTypefaceFromAttrs(TextView.java:2008)
+06-02 16:38:05.900 D/StrictMode( 8940): at android.widget.TextView.applyTextAppearance(TextView.java:3640)
+06-02 16:38:05.900 D/StrictMode( 8940): at android.widget.TextView.<init>(TextView.java:1498)
+06-02 16:38:05.900 D/StrictMode( 8940): at android.widget.TextView.<init>(TextView.java:869)
+06-02 16:38:05.900 D/StrictMode( 8940): at androidx.appcompat.widget.AppCompatTextView.<init>(AppCompatTextView.java:100)
+06-02 16:38:05.900 D/StrictMode( 8940): at androidx.appcompat.widget.AppCompatTextView.<init>(AppCompatTextView.java:95)
+06-02 16:38:05.900 D/StrictMode( 8940): at androidx.appcompat.app.AppCompatViewInflater.createTextView(AppCompatViewInflater.java:194)
+06-02 16:38:05.900 D/StrictMode( 8940): at androidx.appcompat.app.AppCompatViewInflater.createView(AppCompatViewInflater.java:115)
+06-02 16:38:05.900 D/StrictMode( 8940): at androidx.appcompat.app.AppCompatDelegateImpl.createView(AppCompatDelegateImpl.java:1548)
+06-02 16:38:05.900 D/StrictMode( 8940): at androidx.appcompat.app.AppCompatDelegateImpl.onCreateView(AppCompatDelegateImpl.java:1599)
+06-02 16:38:05.900 D/StrictMode( 8940): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:772)
+06-02 16:38:05.900 D/StrictMode( 8940): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:730)
+06-02 16:38:05.900 D/StrictMode( 8940): at android.view.LayoutInflater.rInflate(LayoutInflater.java:863)
+06-02 16:38:05.900 D/StrictMode( 8940): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:05.900 D/StrictMode( 8940): at android.view.LayoutInflater.rInflate(LayoutInflater.java:866)
+06-02 16:38:05.900 D/StrictMode( 8940): at android.view.LayoutInflater.inflate(LayoutInflater.java:489)
+06-02 16:38:05.900 D/StrictMode( 8940): at android.view.LayoutInflater.inflate(LayoutInflater.java:423)
+06-02 16:38:05.900 D/StrictMode( 8940): at android.view.LayoutInflater.inflate(LayoutInflater.java:374)
+06-02 16:38:05.900 D/StrictMode( 8940): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(TabCounter.kt:30)
+06-02 16:38:05.900 D/StrictMode( 8940): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(TabCounter.kt:22)
+06-02 16:38:05.900 D/StrictMode( 8940): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(Unknown Source:6)
+06-02 16:38:05.900 D/StrictMode( 8940): at java.lang.reflect.Constructor.newInstance0(Native Method)
+06-02 16:38:05.900 D/StrictMode( 8940): at java.lang.reflect.Constructor.newInstance(Constructor.java:343)
+06-02 16:38:05.900 D/StrictMode( 8940): at android.view.LayoutInflater.createView(LayoutInflater.java:647)
+06-02 16:38:05.900 D/StrictMode( 8940): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:790)
+06-02 16:38:05.900 D/StrictMode( 8940): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:730)
+06-02 16:38:05.900 D/StrictMode( 8940): at android.view.LayoutInflater.rInflate(LayoutInflater.java:863)
+06-02 16:38:05.900 D/StrictMode( 8940): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:05.900 D/StrictMode( 8940): at android.view.LayoutInflater.rInflate(LayoutInflater.java:866)
+06-02 16:38:05.900 D/StrictMode( 8940): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:05.900 D/StrictMode( 8940): at android.view.LayoutInflater.inflate(LayoutInflater.java:515)
+06-02 16:38:05.900 D/StrictMode( 8940): at android.view.LayoutInflater.inflate(LayoutInflater.java:423)
+06-02 16:38:05.900 D/StrictMode( 8940): at org.mozilla.fenix.home.HomeFragment.onCreateView(HomeFragment.kt:183)
+06-02 16:38:05.900 D/StrictMode( 8940): at androidx.fragment.app.Fragment.performCreateView(Fragment.java:2698)
+06-02 16:38:05.900 D/StrictMode( 8940): at androidx.fragment.app.FragmentStateManager.createView(FragmentStateManager.java:320)
+06-02 16:38:05.900 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1187)
+06-02 16:38:05.900 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.addAddedFragments(FragmentManager.java:2224)
+06-02 16:38:05.900 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.executeOpsTogether(FragmentManager.java:1997)
+06-02 16:38:05.900 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.removeRedundantOperationsAndExecute(FragmentManager.java:1953)
+06-02 16:38:05.900 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.execPendingActions(FragmentManager.java:1849)
+06-02 16:38:05.900 D/StrictMode( 8940): at
+06-02 16:38:05.901 W/lla.fenix.debu( 8940): Accessing hidden method Landroid/graphics/FontFamily;->addFontFromAssetManager(Landroid/content/res/AssetManager;Ljava/lang/String;IZIII[Landroid/graphics/fonts/FontVariationAxis;)Z (light greylist, reflection)
+06-02 16:38:05.914 D/StrictMode( 8940): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/graphics/FontFamily;->addFontFromAssetManager(Landroid/content/res/AssetManager;Ljava/lang/String;IZIII[Landroid/graphics/fonts/FontVariationAxis;)Z
+06-02 16:38:05.914 D/StrictMode( 8940): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:05.914 D/StrictMode( 8940): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:05.914 D/StrictMode( 8940): at java.lang.Class.getDeclaredMethodInternal(Native Method)
+06-02 16:38:05.914 D/StrictMode( 8940): at java.lang.Class.getPublicMethodRecursive(Class.java:2075)
+06-02 16:38:05.914 D/StrictMode( 8940): at java.lang.Class.getMethod(Class.java:2063)
+06-02 16:38:05.914 D/StrictMode( 8940): at java.lang.Class.getMethod(Class.java:1690)
+06-02 16:38:05.914 D/StrictMode( 8940): at androidx.core.graphics.TypefaceCompatApi26Impl.obtainAddFontFromAssetManagerMethod(TypefaceCompatApi26Impl.java:326)
+06-02 16:38:05.914 D/StrictMode( 8940): at androidx.core.graphics.TypefaceCompatApi26Impl.<init>(TypefaceCompatApi26Impl.java:85)
+06-02 16:38:05.914 D/StrictMode( 8940): at androidx.core.graphics.TypefaceCompatApi28Impl.<init>(TypefaceCompatApi28Impl.java:36)
+06-02 16:38:05.914 D/StrictMode( 8940): at androidx.core.graphics.TypefaceCompat.<clinit>(TypefaceCompat.java:51)
+06-02 16:38:05.914 D/StrictMode( 8940): at androidx.core.graphics.TypefaceCompat.create(TypefaceCompat.java:194)
+06-02 16:38:05.914 D/StrictMode( 8940): at androidx.appcompat.widget.AppCompatTextView.setTypeface(AppCompatTextView.java:708)
+06-02 16:38:05.914 D/StrictMode( 8940): at android.widget.TextView.resolveStyleAndSetTypeface(TextView.java:2037)
+06-02 16:38:05.914 D/StrictMode( 8940): at android.widget.TextView.setTypefaceFromAttrs(TextView.java:2008)
+06-02 16:38:05.914 D/StrictMode( 8940): at android.widget.TextView.applyTextAppearance(TextView.java:3640)
+06-02 16:38:05.914 D/StrictMode( 8940): at android.widget.TextView.<init>(TextView.java:1498)
+06-02 16:38:05.914 D/StrictMode( 8940): at android.widget.TextView.<init>(TextView.java:869)
+06-02 16:38:05.914 D/StrictMode( 8940): at androidx.appcompat.widget.AppCompatTextView.<init>(AppCompatTextView.java:100)
+06-02 16:38:05.914 D/StrictMode( 8940): at androidx.appcompat.widget.AppCompatTextView.<init>(AppCompatTextView.java:95)
+06-02 16:38:05.914 D/StrictMode( 8940): at androidx.appcompat.app.AppCompatViewInflater.createTextView(AppCompatViewInflater.java:194)
+06-02 16:38:05.914 D/StrictMode( 8940): at androidx.appcompat.app.AppCompatViewInflater.createView(AppCompatViewInflater.java:115)
+06-02 16:38:05.914 D/StrictMode( 8940): at androidx.appcompat.app.AppCompatDelegateImpl.createView(AppCompatDelegateImpl.java:1548)
+06-02 16:38:05.914 D/StrictMode( 8940): at androidx.appcompat.app.AppCompatDelegateImpl.onCreateView(AppCompatDelegateImpl.java:1599)
+06-02 16:38:05.914 D/StrictMode( 8940): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:772)
+06-02 16:38:05.914 D/StrictMode( 8940): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:730)
+06-02 16:38:05.914 D/StrictMode( 8940): at android.view.LayoutInflater.rInflate(LayoutInflater.java:863)
+06-02 16:38:05.914 D/StrictMode( 8940): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:05.914 D/StrictMode( 8940): at android.view.LayoutInflater.rInflate(LayoutInflater.java:866)
+06-02 16:38:05.914 D/StrictMode( 8940): at android.view.LayoutInflater.inflate(LayoutInflater.java:489)
+06-02 16:38:05.914 D/StrictMode( 8940): at android.view.LayoutInflater.inflate(LayoutInflater.java:423)
+06-02 16:38:05.914 D/StrictMode( 8940): at android.view.LayoutInflater.inflate(LayoutInflater.java:374)
+06-02 16:38:05.914 D/StrictMode( 8940): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(TabCounter.kt:30)
+06-02 16:38:05.914 D/StrictMode( 8940): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(TabCounter.kt:22)
+06-02 16:38:05.914 D/StrictMode( 8940): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(Unknown Source:6)
+06-02 16:38:05.914 D/StrictMode( 8940): at java.lang.reflect.Constructor.newInstance0(Native Method)
+06-02 16:38:05.914 D/StrictMode( 8940): at java.lang.reflect.Constructor.newInstance(Constructor.java:343)
+06-02 16:38:05.914 D/StrictMode( 8940): at android.view.LayoutInflater.createView(LayoutInflater.java:647)
+06-02 16:38:05.914 D/StrictMode( 8940): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:790)
+06-02 16:38:05.914 D/StrictMode( 8940): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:730)
+06-02 16:38:05.914 D/StrictMode( 8940): at android.view.LayoutInflater.rInflate(LayoutInflater.java:863)
+06-02 16:38:05.914 D/StrictMode( 8940): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:05.914 D/StrictMode( 8940): at android.view.LayoutInflater.rInflate(LayoutInflater.java:866)
+06-02 16:38:05.914 D/StrictMode( 8940): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:05.914 D/StrictMode( 8940): at android.view.LayoutInflater.inflate(LayoutInflater.java:515)
+06-02 16:38:05.914 D/StrictMode( 8940): at android.view.LayoutInflater.inflate(LayoutInflater.java:423)
+06-02 16:38:05.914 D/StrictMode( 8940): at org.mozilla.fenix.home.HomeFragment.onCreateView(HomeFragment.kt:183)
+06-02 16:38:05.914 D/StrictMode( 8940): at androidx.fragment.app.Fragment.performCreateView(Fragment.java:2698)
+06-02 16:38:05.914 D/StrictMode( 8940): at androidx.fragment.app.FragmentStateManager.createView(FragmentStateManager.java:320)
+06-02 16:38:05.914 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1187)
+06-02 16:38:05.914 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.addAddedFragments(FragmentManager.java:2224)
+06-02 16:38:05.914 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.executeOpsTogether(FragmentManager.java:1997)
+06-02 16:38:05.914 D/StrictMode( 8940): at androidx.fragme
+06-02 16:38:05.915 W/lla.fenix.debu( 8940): Accessing hidden method Landroid/graphics/FontFamily;->addFontFromBuffer(Ljava/nio/ByteBuffer;I[Landroid/graphics/fonts/FontVariationAxis;II)Z (light greylist, reflection)
+06-02 16:38:05.924 D/StrictMode( 8940): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/graphics/FontFamily;->addFontFromBuffer(Ljava/nio/ByteBuffer;I[Landroid/graphics/fonts/FontVariationAxis;II)Z
+06-02 16:38:05.924 D/StrictMode( 8940): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:05.924 D/StrictMode( 8940): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:05.924 D/StrictMode( 8940): at java.lang.Class.getDeclaredMethodInternal(Native Method)
+06-02 16:38:05.924 D/StrictMode( 8940): at java.lang.Class.getPublicMethodRecursive(Class.java:2075)
+06-02 16:38:05.924 D/StrictMode( 8940): at java.lang.Class.getMethod(Class.java:2063)
+06-02 16:38:05.924 D/StrictMode( 8940): at java.lang.Class.getMethod(Class.java:1690)
+06-02 16:38:05.924 D/StrictMode( 8940): at androidx.core.graphics.TypefaceCompatApi26Impl.obtainAddFontFromBufferMethod(TypefaceCompatApi26Impl.java:333)
+06-02 16:38:05.924 D/StrictMode( 8940): at androidx.core.graphics.TypefaceCompatApi26Impl.<init>(TypefaceCompatApi26Impl.java:86)
+06-02 16:38:05.924 D/StrictMode( 8940): at androidx.core.graphics.TypefaceCompatApi28Impl.<init>(TypefaceCompatApi28Impl.java:36)
+06-02 16:38:05.924 D/StrictMode( 8940): at androidx.core.graphics.TypefaceCompat.<clinit>(TypefaceCompat.java:51)
+06-02 16:38:05.924 D/StrictMode( 8940): at androidx.core.graphics.TypefaceCompat.create(TypefaceCompat.java:194)
+06-02 16:38:05.924 D/StrictMode( 8940): at androidx.appcompat.widget.AppCompatTextView.setTypeface(AppCompatTextView.java:708)
+06-02 16:38:05.924 D/StrictMode( 8940): at android.widget.TextView.resolveStyleAndSetTypeface(TextView.java:2037)
+06-02 16:38:05.924 D/StrictMode( 8940): at android.widget.TextView.setTypefaceFromAttrs(TextView.java:2008)
+06-02 16:38:05.924 D/StrictMode( 8940): at android.widget.TextView.applyTextAppearance(TextView.java:3640)
+06-02 16:38:05.924 D/StrictMode( 8940): at android.widget.TextView.<init>(TextView.java:1498)
+06-02 16:38:05.924 D/StrictMode( 8940): at android.widget.TextView.<init>(TextView.java:869)
+06-02 16:38:05.924 D/StrictMode( 8940): at androidx.appcompat.widget.AppCompatTextView.<init>(AppCompatTextView.java:100)
+06-02 16:38:05.924 D/StrictMode( 8940): at androidx.appcompat.widget.AppCompatTextView.<init>(AppCompatTextView.java:95)
+06-02 16:38:05.924 D/StrictMode( 8940): at androidx.appcompat.app.AppCompatViewInflater.createTextView(AppCompatViewInflater.java:194)
+06-02 16:38:05.924 D/StrictMode( 8940): at androidx.appcompat.app.AppCompatViewInflater.createView(AppCompatViewInflater.java:115)
+06-02 16:38:05.924 D/StrictMode( 8940): at androidx.appcompat.app.AppCompatDelegateImpl.createView(AppCompatDelegateImpl.java:1548)
+06-02 16:38:05.924 D/StrictMode( 8940): at androidx.appcompat.app.AppCompatDelegateImpl.onCreateView(AppCompatDelegateImpl.java:1599)
+06-02 16:38:05.924 D/StrictMode( 8940): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:772)
+06-02 16:38:05.924 D/StrictMode( 8940): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:730)
+06-02 16:38:05.924 D/StrictMode( 8940): at android.view.LayoutInflater.rInflate(LayoutInflater.java:863)
+06-02 16:38:05.924 D/StrictMode( 8940): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:05.924 D/StrictMode( 8940): at android.view.LayoutInflater.rInflate(LayoutInflater.java:866)
+06-02 16:38:05.924 D/StrictMode( 8940): at android.view.LayoutInflater.inflate(LayoutInflater.java:489)
+06-02 16:38:05.924 D/StrictMode( 8940): at android.view.LayoutInflater.inflate(LayoutInflater.java:423)
+06-02 16:38:05.924 D/StrictMode( 8940): at android.view.LayoutInflater.inflate(LayoutInflater.java:374)
+06-02 16:38:05.924 D/StrictMode( 8940): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(TabCounter.kt:30)
+06-02 16:38:05.924 D/StrictMode( 8940): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(TabCounter.kt:22)
+06-02 16:38:05.924 D/StrictMode( 8940): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(Unknown Source:6)
+06-02 16:38:05.924 D/StrictMode( 8940): at java.lang.reflect.Constructor.newInstance0(Native Method)
+06-02 16:38:05.924 D/StrictMode( 8940): at java.lang.reflect.Constructor.newInstance(Constructor.java:343)
+06-02 16:38:05.924 D/StrictMode( 8940): at android.view.LayoutInflater.createView(LayoutInflater.java:647)
+06-02 16:38:05.924 D/StrictMode( 8940): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:790)
+06-02 16:38:05.924 D/StrictMode( 8940): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:730)
+06-02 16:38:05.924 D/StrictMode( 8940): at android.view.LayoutInflater.rInflate(LayoutInflater.java:863)
+06-02 16:38:05.924 D/StrictMode( 8940): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:05.924 D/StrictMode( 8940): at android.view.LayoutInflater.rInflate(LayoutInflater.java:866)
+06-02 16:38:05.924 D/StrictMode( 8940): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:05.924 D/StrictMode( 8940): at android.view.LayoutInflater.inflate(LayoutInflater.java:515)
+06-02 16:38:05.924 D/StrictMode( 8940): at android.view.LayoutInflater.inflate(LayoutInflater.java:423)
+06-02 16:38:05.924 D/StrictMode( 8940): at org.mozilla.fenix.home.HomeFragment.onCreateView(HomeFragment.kt:183)
+06-02 16:38:05.924 D/StrictMode( 8940): at androidx.fragment.app.Fragment.performCreateView(Fragment.java:2698)
+06-02 16:38:05.924 D/StrictMode( 8940): at androidx.fragment.app.FragmentStateManager.createView(FragmentStateManager.java:320)
+06-02 16:38:05.924 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1187)
+06-02 16:38:05.924 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.addAddedFragments(FragmentManager.java:2224)
+06-02 16:38:05.924 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.executeOpsTogether(FragmentManager.java:1997)
+06-02 16:38:05.924 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.removeRedundantOperati
+06-02 16:38:05.924 W/lla.fenix.debu( 8940): Accessing hidden method Landroid/graphics/FontFamily;->freeze()Z (light greylist, reflection)
+06-02 16:38:05.931 D/StrictMode( 8940): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/graphics/FontFamily;->freeze()Z
+06-02 16:38:05.931 D/StrictMode( 8940): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:05.931 D/StrictMode( 8940): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:05.931 D/StrictMode( 8940): at java.lang.Class.getDeclaredMethodInternal(Native Method)
+06-02 16:38:05.931 D/StrictMode( 8940): at java.lang.Class.getPublicMethodRecursive(Class.java:2075)
+06-02 16:38:05.931 D/StrictMode( 8940): at java.lang.Class.getMethod(Class.java:2063)
+06-02 16:38:05.931 D/StrictMode( 8940): at java.lang.Class.getMethod(Class.java:1690)
+06-02 16:38:05.931 D/StrictMode( 8940): at androidx.core.graphics.TypefaceCompatApi26Impl.obtainFreezeMethod(TypefaceCompatApi26Impl.java:339)
+06-02 16:38:05.931 D/StrictMode( 8940): at androidx.core.graphics.TypefaceCompatApi26Impl.<init>(TypefaceCompatApi26Impl.java:87)
+06-02 16:38:05.931 D/StrictMode( 8940): at androidx.core.graphics.TypefaceCompatApi28Impl.<init>(TypefaceCompatApi28Impl.java:36)
+06-02 16:38:05.931 D/StrictMode( 8940): at androidx.core.graphics.TypefaceCompat.<clinit>(TypefaceCompat.java:51)
+06-02 16:38:05.931 D/StrictMode( 8940): at androidx.core.graphics.TypefaceCompat.create(TypefaceCompat.java:194)
+06-02 16:38:05.931 D/StrictMode( 8940): at androidx.appcompat.widget.AppCompatTextView.setTypeface(AppCompatTextView.java:708)
+06-02 16:38:05.931 D/StrictMode( 8940): at android.widget.TextView.resolveStyleAndSetTypeface(TextView.java:2037)
+06-02 16:38:05.931 D/StrictMode( 8940): at android.widget.TextView.setTypefaceFromAttrs(TextView.java:2008)
+06-02 16:38:05.931 D/StrictMode( 8940): at android.widget.TextView.applyTextAppearance(TextView.java:3640)
+06-02 16:38:05.931 D/StrictMode( 8940): at android.widget.TextView.<init>(TextView.java:1498)
+06-02 16:38:05.931 D/StrictMode( 8940): at android.widget.TextView.<init>(TextView.java:869)
+06-02 16:38:05.931 D/StrictMode( 8940): at androidx.appcompat.widget.AppCompatTextView.<init>(AppCompatTextView.java:100)
+06-02 16:38:05.931 D/StrictMode( 8940): at androidx.appcompat.widget.AppCompatTextView.<init>(AppCompatTextView.java:95)
+06-02 16:38:05.931 D/StrictMode( 8940): at androidx.appcompat.app.AppCompatViewInflater.createTextView(AppCompatViewInflater.java:194)
+06-02 16:38:05.931 D/StrictMode( 8940): at androidx.appcompat.app.AppCompatViewInflater.createView(AppCompatViewInflater.java:115)
+06-02 16:38:05.931 D/StrictMode( 8940): at androidx.appcompat.app.AppCompatDelegateImpl.createView(AppCompatDelegateImpl.java:1548)
+06-02 16:38:05.931 D/StrictMode( 8940): at androidx.appcompat.app.AppCompatDelegateImpl.onCreateView(AppCompatDelegateImpl.java:1599)
+06-02 16:38:05.931 D/StrictMode( 8940): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:772)
+06-02 16:38:05.931 D/StrictMode( 8940): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:730)
+06-02 16:38:05.931 D/StrictMode( 8940): at android.view.LayoutInflater.rInflate(LayoutInflater.java:863)
+06-02 16:38:05.931 D/StrictMode( 8940): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:05.931 D/StrictMode( 8940): at android.view.LayoutInflater.rInflate(LayoutInflater.java:866)
+06-02 16:38:05.931 D/StrictMode( 8940): at android.view.LayoutInflater.inflate(LayoutInflater.java:489)
+06-02 16:38:05.931 D/StrictMode( 8940): at android.view.LayoutInflater.inflate(LayoutInflater.java:423)
+06-02 16:38:05.931 D/StrictMode( 8940): at android.view.LayoutInflater.inflate(LayoutInflater.java:374)
+06-02 16:38:05.931 D/StrictMode( 8940): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(TabCounter.kt:30)
+06-02 16:38:05.931 D/StrictMode( 8940): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(TabCounter.kt:22)
+06-02 16:38:05.931 D/StrictMode( 8940): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(Unknown Source:6)
+06-02 16:38:05.931 D/StrictMode( 8940): at java.lang.reflect.Constructor.newInstance0(Native Method)
+06-02 16:38:05.931 D/StrictMode( 8940): at java.lang.reflect.Constructor.newInstance(Constructor.java:343)
+06-02 16:38:05.931 D/StrictMode( 8940): at android.view.LayoutInflater.createView(LayoutInflater.java:647)
+06-02 16:38:05.931 D/StrictMode( 8940): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:790)
+06-02 16:38:05.931 D/StrictMode( 8940): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:730)
+06-02 16:38:05.931 D/StrictMode( 8940): at android.view.LayoutInflater.rInflate(LayoutInflater.java:863)
+06-02 16:38:05.931 D/StrictMode( 8940): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:05.931 D/StrictMode( 8940): at android.view.LayoutInflater.rInflate(LayoutInflater.java:866)
+06-02 16:38:05.931 D/StrictMode( 8940): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:05.931 D/StrictMode( 8940): at android.view.LayoutInflater.inflate(LayoutInflater.java:515)
+06-02 16:38:05.931 D/StrictMode( 8940): at android.view.LayoutInflater.inflate(LayoutInflater.java:423)
+06-02 16:38:05.931 D/StrictMode( 8940): at org.mozilla.fenix.home.HomeFragment.onCreateView(HomeFragment.kt:183)
+06-02 16:38:05.931 D/StrictMode( 8940): at androidx.fragment.app.Fragment.performCreateView(Fragment.java:2698)
+06-02 16:38:05.931 D/StrictMode( 8940): at androidx.fragment.app.FragmentStateManager.createView(FragmentStateManager.java:320)
+06-02 16:38:05.931 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1187)
+06-02 16:38:05.931 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.addAddedFragments(FragmentManager.java:2224)
+06-02 16:38:05.931 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.executeOpsTogether(FragmentManager.java:1997)
+06-02 16:38:05.931 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.removeRedundantOperationsAndExecute(FragmentManager.java:1953)
+06-02 16:38:05.931 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.execPe
+06-02 16:38:05.931 W/lla.fenix.debu( 8940): Accessing hidden method Landroid/graphics/FontFamily;->abortCreation()V (light greylist, reflection)
+06-02 16:38:05.938 D/StrictMode( 8940): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/graphics/FontFamily;->abortCreation()V
+06-02 16:38:05.938 D/StrictMode( 8940): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:05.938 D/StrictMode( 8940): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:05.938 D/StrictMode( 8940): at java.lang.Class.getDeclaredMethodInternal(Native Method)
+06-02 16:38:05.938 D/StrictMode( 8940): at java.lang.Class.getPublicMethodRecursive(Class.java:2075)
+06-02 16:38:05.938 D/StrictMode( 8940): at java.lang.Class.getMethod(Class.java:2063)
+06-02 16:38:05.938 D/StrictMode( 8940): at java.lang.Class.getMethod(Class.java:1690)
+06-02 16:38:05.938 D/StrictMode( 8940): at androidx.core.graphics.TypefaceCompatApi26Impl.obtainAbortCreationMethod(TypefaceCompatApi26Impl.java:343)
+06-02 16:38:05.938 D/StrictMode( 8940): at androidx.core.graphics.TypefaceCompatApi26Impl.<init>(TypefaceCompatApi26Impl.java:88)
+06-02 16:38:05.938 D/StrictMode( 8940): at androidx.core.graphics.TypefaceCompatApi28Impl.<init>(TypefaceCompatApi28Impl.java:36)
+06-02 16:38:05.938 D/StrictMode( 8940): at androidx.core.graphics.TypefaceCompat.<clinit>(TypefaceCompat.java:51)
+06-02 16:38:05.938 D/StrictMode( 8940): at androidx.core.graphics.TypefaceCompat.create(TypefaceCompat.java:194)
+06-02 16:38:05.938 D/StrictMode( 8940): at androidx.appcompat.widget.AppCompatTextView.setTypeface(AppCompatTextView.java:708)
+06-02 16:38:05.938 D/StrictMode( 8940): at android.widget.TextView.resolveStyleAndSetTypeface(TextView.java:2037)
+06-02 16:38:05.938 D/StrictMode( 8940): at android.widget.TextView.setTypefaceFromAttrs(TextView.java:2008)
+06-02 16:38:05.938 D/StrictMode( 8940): at android.widget.TextView.applyTextAppearance(TextView.java:3640)
+06-02 16:38:05.938 D/StrictMode( 8940): at android.widget.TextView.<init>(TextView.java:1498)
+06-02 16:38:05.938 D/StrictMode( 8940): at android.widget.TextView.<init>(TextView.java:869)
+06-02 16:38:05.938 D/StrictMode( 8940): at androidx.appcompat.widget.AppCompatTextView.<init>(AppCompatTextView.java:100)
+06-02 16:38:05.938 D/StrictMode( 8940): at androidx.appcompat.widget.AppCompatTextView.<init>(AppCompatTextView.java:95)
+06-02 16:38:05.938 D/StrictMode( 8940): at androidx.appcompat.app.AppCompatViewInflater.createTextView(AppCompatViewInflater.java:194)
+06-02 16:38:05.938 D/StrictMode( 8940): at androidx.appcompat.app.AppCompatViewInflater.createView(AppCompatViewInflater.java:115)
+06-02 16:38:05.938 D/StrictMode( 8940): at androidx.appcompat.app.AppCompatDelegateImpl.createView(AppCompatDelegateImpl.java:1548)
+06-02 16:38:05.938 D/StrictMode( 8940): at androidx.appcompat.app.AppCompatDelegateImpl.onCreateView(AppCompatDelegateImpl.java:1599)
+06-02 16:38:05.938 D/StrictMode( 8940): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:772)
+06-02 16:38:05.938 D/StrictMode( 8940): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:730)
+06-02 16:38:05.938 D/StrictMode( 8940): at android.view.LayoutInflater.rInflate(LayoutInflater.java:863)
+06-02 16:38:05.938 D/StrictMode( 8940): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:05.938 D/StrictMode( 8940): at android.view.LayoutInflater.rInflate(LayoutInflater.java:866)
+06-02 16:38:05.938 D/StrictMode( 8940): at android.view.LayoutInflater.inflate(LayoutInflater.java:489)
+06-02 16:38:05.938 D/StrictMode( 8940): at android.view.LayoutInflater.inflate(LayoutInflater.java:423)
+06-02 16:38:05.938 D/StrictMode( 8940): at android.view.LayoutInflater.inflate(LayoutInflater.java:374)
+06-02 16:38:05.938 D/StrictMode( 8940): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(TabCounter.kt:30)
+06-02 16:38:05.938 D/StrictMode( 8940): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(TabCounter.kt:22)
+06-02 16:38:05.938 D/StrictMode( 8940): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(Unknown Source:6)
+06-02 16:38:05.938 D/StrictMode( 8940): at java.lang.reflect.Constructor.newInstance0(Native Method)
+06-02 16:38:05.938 D/StrictMode( 8940): at java.lang.reflect.Constructor.newInstance(Constructor.java:343)
+06-02 16:38:05.938 D/StrictMode( 8940): at android.view.LayoutInflater.createView(LayoutInflater.java:647)
+06-02 16:38:05.938 D/StrictMode( 8940): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:790)
+06-02 16:38:05.938 D/StrictMode( 8940): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:730)
+06-02 16:38:05.938 D/StrictMode( 8940): at android.view.LayoutInflater.rInflate(LayoutInflater.java:863)
+06-02 16:38:05.938 D/StrictMode( 8940): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:05.938 D/StrictMode( 8940): at android.view.LayoutInflater.rInflate(LayoutInflater.java:866)
+06-02 16:38:05.938 D/StrictMode( 8940): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:05.938 D/StrictMode( 8940): at android.view.LayoutInflater.inflate(LayoutInflater.java:515)
+06-02 16:38:05.938 D/StrictMode( 8940): at android.view.LayoutInflater.inflate(LayoutInflater.java:423)
+06-02 16:38:05.938 D/StrictMode( 8940): at org.mozilla.fenix.home.HomeFragment.onCreateView(HomeFragment.kt:183)
+06-02 16:38:05.938 D/StrictMode( 8940): at androidx.fragment.app.Fragment.performCreateView(Fragment.java:2698)
+06-02 16:38:05.938 D/StrictMode( 8940): at androidx.fragment.app.FragmentStateManager.createView(FragmentStateManager.java:320)
+06-02 16:38:05.938 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1187)
+06-02 16:38:05.938 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.addAddedFragments(FragmentManager.java:2224)
+06-02 16:38:05.938 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.executeOpsTogether(FragmentManager.java:1997)
+06-02 16:38:05.938 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.removeRedundantOperationsAndExecute(FragmentManager.java:1953)
+06-02 16:38:05.938 D/StrictMode( 8940): at androidx.fragment.app.Fragment
+06-02 16:38:05.943 W/lla.fenix.debu( 8940): Accessing hidden method Landroid/graphics/Typeface;->createFromFamiliesWithDefault([Landroid/graphics/FontFamily;Ljava/lang/String;II)Landroid/graphics/Typeface; (light greylist, reflection)
+06-02 16:38:05.948 D/StrictMode( 8940): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/graphics/Typeface;->createFromFamiliesWithDefault([Landroid/graphics/FontFamily;Ljava/lang/String;II)Landroid/graphics/Typeface;
+06-02 16:38:05.948 D/StrictMode( 8940): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:05.948 D/StrictMode( 8940): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:05.948 D/StrictMode( 8940): at java.lang.Class.getDeclaredMethodInternal(Native Method)
+06-02 16:38:05.948 D/StrictMode( 8940): at java.lang.Class.getMethod(Class.java:2064)
+06-02 16:38:05.948 D/StrictMode( 8940): at java.lang.Class.getDeclaredMethod(Class.java:2047)
+06-02 16:38:05.948 D/StrictMode( 8940): at androidx.core.graphics.TypefaceCompatApi28Impl.obtainCreateFromFamiliesWithDefaultMethod(TypefaceCompatApi28Impl.java:62)
+06-02 16:38:05.948 D/StrictMode( 8940): at androidx.core.graphics.TypefaceCompatApi26Impl.<init>(TypefaceCompatApi26Impl.java:89)
+06-02 16:38:05.948 D/StrictMode( 8940): at androidx.core.graphics.TypefaceCompatApi28Impl.<init>(TypefaceCompatApi28Impl.java:36)
+06-02 16:38:05.948 D/StrictMode( 8940): at androidx.core.graphics.TypefaceCompat.<clinit>(TypefaceCompat.java:51)
+06-02 16:38:05.948 D/StrictMode( 8940): at androidx.core.graphics.TypefaceCompat.create(TypefaceCompat.java:194)
+06-02 16:38:05.948 D/StrictMode( 8940): at androidx.appcompat.widget.AppCompatTextView.setTypeface(AppCompatTextView.java:708)
+06-02 16:38:05.948 D/StrictMode( 8940): at android.widget.TextView.resolveStyleAndSetTypeface(TextView.java:2037)
+06-02 16:38:05.948 D/StrictMode( 8940): at android.widget.TextView.setTypefaceFromAttrs(TextView.java:2008)
+06-02 16:38:05.948 D/StrictMode( 8940): at android.widget.TextView.applyTextAppearance(TextView.java:3640)
+06-02 16:38:05.948 D/StrictMode( 8940): at android.widget.TextView.<init>(TextView.java:1498)
+06-02 16:38:05.948 D/StrictMode( 8940): at android.widget.TextView.<init>(TextView.java:869)
+06-02 16:38:05.948 D/StrictMode( 8940): at androidx.appcompat.widget.AppCompatTextView.<init>(AppCompatTextView.java:100)
+06-02 16:38:05.948 D/StrictMode( 8940): at androidx.appcompat.widget.AppCompatTextView.<init>(AppCompatTextView.java:95)
+06-02 16:38:05.948 D/StrictMode( 8940): at androidx.appcompat.app.AppCompatViewInflater.createTextView(AppCompatViewInflater.java:194)
+06-02 16:38:05.948 D/StrictMode( 8940): at androidx.appcompat.app.AppCompatViewInflater.createView(AppCompatViewInflater.java:115)
+06-02 16:38:05.948 D/StrictMode( 8940): at androidx.appcompat.app.AppCompatDelegateImpl.createView(AppCompatDelegateImpl.java:1548)
+06-02 16:38:05.948 D/StrictMode( 8940): at androidx.appcompat.app.AppCompatDelegateImpl.onCreateView(AppCompatDelegateImpl.java:1599)
+06-02 16:38:05.948 D/StrictMode( 8940): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:772)
+06-02 16:38:05.948 D/StrictMode( 8940): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:730)
+06-02 16:38:05.948 D/StrictMode( 8940): at android.view.LayoutInflater.rInflate(LayoutInflater.java:863)
+06-02 16:38:05.948 D/StrictMode( 8940): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:05.948 D/StrictMode( 8940): at android.view.LayoutInflater.rInflate(LayoutInflater.java:866)
+06-02 16:38:05.948 D/StrictMode( 8940): at android.view.LayoutInflater.inflate(LayoutInflater.java:489)
+06-02 16:38:05.948 D/StrictMode( 8940): at android.view.LayoutInflater.inflate(LayoutInflater.java:423)
+06-02 16:38:05.948 D/StrictMode( 8940): at android.view.LayoutInflater.inflate(LayoutInflater.java:374)
+06-02 16:38:05.948 D/StrictMode( 8940): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(TabCounter.kt:30)
+06-02 16:38:05.948 D/StrictMode( 8940): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(TabCounter.kt:22)
+06-02 16:38:05.948 D/StrictMode( 8940): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(Unknown Source:6)
+06-02 16:38:05.948 D/StrictMode( 8940): at java.lang.reflect.Constructor.newInstance0(Native Method)
+06-02 16:38:05.948 D/StrictMode( 8940): at java.lang.reflect.Constructor.newInstance(Constructor.java:343)
+06-02 16:38:05.948 D/StrictMode( 8940): at android.view.LayoutInflater.createView(LayoutInflater.java:647)
+06-02 16:38:05.948 D/StrictMode( 8940): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:790)
+06-02 16:38:05.948 D/StrictMode( 8940): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:730)
+06-02 16:38:05.948 D/StrictMode( 8940): at android.view.LayoutInflater.rInflate(LayoutInflater.java:863)
+06-02 16:38:05.948 D/StrictMode( 8940): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:05.948 D/StrictMode( 8940): at android.view.LayoutInflater.rInflate(LayoutInflater.java:866)
+06-02 16:38:05.948 D/StrictMode( 8940): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:05.948 D/StrictMode( 8940): at android.view.LayoutInflater.inflate(LayoutInflater.java:515)
+06-02 16:38:05.948 D/StrictMode( 8940): at android.view.LayoutInflater.inflate(LayoutInflater.java:423)
+06-02 16:38:05.948 D/StrictMode( 8940): at org.mozilla.fenix.home.HomeFragment.onCreateView(HomeFragment.kt:183)
+06-02 16:38:05.948 D/StrictMode( 8940): at androidx.fragment.app.Fragment.performCreateView(Fragment.java:2698)
+06-02 16:38:05.948 D/StrictMode( 8940): at androidx.fragment.app.FragmentStateManager.createView(FragmentStateManager.java:320)
+06-02 16:38:05.948 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1187)
+06-02 16:38:05.948 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.addAddedFragments(FragmentManager.java:2224)
+06-02 16:38:05.948 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.executeOpsTogether(FragmentManager.java:1997)
+06-02 16:38:05.948 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.removeRedundantOperationsAndExecute(FragmentMa
+06-02 16:38:05.990 I/droid.apps.doc( 8852): Waiting for a blocking GC ProfileSaver
+06-02 16:38:06.011 I/droid.apps.doc( 8852): WaitForGcToComplete blocked ProfileSaver on HeapTrim for 20.559ms
+06-02 16:38:06.052 I/EventLogSendingHelper( 2402): Sending log events.
+06-02 16:38:06.103 D/GeckoThread( 8940): State changed to PROFILE_READY
+06-02 16:38:06.164 D/GeckoViewStartup( 8940): observe: profile-after-change
+06-02 16:38:06.183 D/GeckoViewTelemetryController( 8940): setup - canRecordPrereleaseData true, canRecordReleaseData true
+06-02 16:38:06.247 D/GeckoRuntime( 8940): Lifecycle: onStart
+06-02 16:38:06.250 D/GeckoThread( 8940): State changed to RUNNING
+06-02 16:38:06.252 D/GeckoRuntime( 8940): Lifecycle: onResume
+06-02 16:38:06.256 D/GeckoNetworkManager( 8940): Incoming event start for state OffNoListeners -> OnNoListeners
+06-02 16:38:06.258 D/GeckoNetworkManager( 8940): New network state: UP, WIFI, WIFI
+06-02 16:38:06.264 D/OpenGLRenderer( 8940): Skia GL Pipeline
+06-02 16:38:06.277 I/Gecko ( 8940): -*- nsDNSServiceDiscovery.js : nsDNSServiceDiscovery
+06-02 16:38:06.302 E/SurfaceFlinger( 1728): ro.sf.lcd_density must be defined as a build property
+06-02 16:38:06.327 D/StrictMode( 8940): StrictMode policy violation; ~duration=1066 ms: android.os.strictmode.DiskReadViolation
+06-02 16:38:06.327 D/StrictMode( 8940): at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+06-02 16:38:06.327 D/StrictMode( 8940): at java.io.UnixFileSystem.checkAccess(UnixFileSystem.java:251)
+06-02 16:38:06.327 D/StrictMode( 8940): at java.io.File.exists(File.java:815)
+06-02 16:38:06.327 D/StrictMode( 8940): at android.app.ContextImpl.getDataDir(ContextImpl.java:2253)
+06-02 16:38:06.327 D/StrictMode( 8940): at android.app.ContextImpl.getPreferencesDir(ContextImpl.java:550)
+06-02 16:38:06.327 D/StrictMode( 8940): at android.app.ContextImpl.getSharedPreferencesPath(ContextImpl.java:747)
+06-02 16:38:06.327 D/StrictMode( 8940): at android.app.ContextImpl.getSharedPreferences(ContextImpl.java:400)
+06-02 16:38:06.327 D/StrictMode( 8940): at mozilla.components.support.locale.LocaleManager$Storage.getSharedPreferences(LocaleManager.kt:123)
+06-02 16:38:06.327 D/StrictMode( 8940): at mozilla.components.support.locale.LocaleManager$Storage.getLocale(LocaleManager.kt:99)
+06-02 16:38:06.327 D/StrictMode( 8940): at mozilla.components.support.locale.LocaleManager.getCurrentLocale(LocaleManager.kt:42)
+06-02 16:38:06.327 D/StrictMode( 8940): at mozilla.components.support.locale.LocaleManager.updateResources$support_locale_release(LocaleManager.kt:72)
+06-02 16:38:06.327 D/StrictMode( 8940): at mozilla.components.support.locale.LocaleAwareAppCompatActivity.attachBaseContext(LocaleAwareAppCompatActivity.kt:18)
+06-02 16:38:06.327 D/StrictMode( 8940): at android.app.Activity.attach(Activity.java:7051)
+06-02 16:38:06.327 D/StrictMode( 8940): at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2873)
+06-02 16:38:06.327 D/StrictMode( 8940): at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:3048)
+06-02 16:38:06.327 D/StrictMode( 8940): at android.app.servertransaction.LaunchActivityItem.execute(LaunchActivityItem.java:78)
+06-02 16:38:06.327 D/StrictMode( 8940): at android.app.servertransaction.TransactionExecutor.executeCallbacks(TransactionExecutor.java:108)
+06-02 16:38:06.327 D/StrictMode( 8940): at android.app.servertransaction.TransactionExecutor.execute(TransactionExecutor.java:68)
+06-02 16:38:06.327 D/StrictMode( 8940): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1808)
+06-02 16:38:06.327 D/StrictMode( 8940): at android.os.Handler.dispatchMessage(Handler.java:106)
+06-02 16:38:06.327 D/StrictMode( 8940): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:06.327 D/StrictMode( 8940): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:06.327 D/StrictMode( 8940): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:06.327 D/StrictMode( 8940): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:06.327 D/StrictMode( 8940): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:06.328 D/StrictMode( 8940): StrictMode policy violation; ~duration=1066 ms: android.os.strictmode.DiskReadViolation
+06-02 16:38:06.328 D/StrictMode( 8940): at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+06-02 16:38:06.328 D/StrictMode( 8940): at java.io.UnixFileSystem.checkAccess(UnixFileSystem.java:251)
+06-02 16:38:06.328 D/StrictMode( 8940): at java.io.File.exists(File.java:815)
+06-02 16:38:06.328 D/StrictMode( 8940): at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:605)
+06-02 16:38:06.328 D/StrictMode( 8940): at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:596)
+06-02 16:38:06.328 D/StrictMode( 8940): at android.app.ContextImpl.getPreferencesDir(ContextImpl.java:552)
+06-02 16:38:06.328 D/StrictMode( 8940): at android.app.ContextImpl.getSharedPreferencesPath(ContextImpl.java:747)
+06-02 16:38:06.328 D/StrictMode( 8940): at android.app.ContextImpl.getSharedPreferences(ContextImpl.java:400)
+06-02 16:38:06.328 D/StrictMode( 8940): at mozilla.components.support.locale.LocaleManager$Storage.getSharedPreferences(LocaleManager.kt:123)
+06-02 16:38:06.328 D/StrictMode( 8940): at mozilla.components.support.locale.LocaleManager$Storage.getLocale(LocaleManager.kt:99)
+06-02 16:38:06.328 D/StrictMode( 8940): at mozilla.components.support.locale.LocaleManager.getCurrentLocale(LocaleManager.kt:42)
+06-02 16:38:06.328 D/StrictMode( 8940): at mozilla.components.support.locale.LocaleManager.updateResources$support_locale_release(LocaleManager.kt:72)
+06-02 16:38:06.328 D/StrictMode( 8940): at mozilla.components.support.locale.LocaleAwareAppCompatActivity.attachBaseContext(LocaleAwareAppCompatActivity.kt:18)
+06-02 16:38:06.328 D/StrictMode( 8940): at android.app.Activity.attach(Activity.java:7051)
+06-02 16:38:06.328 D/StrictMode( 8940): at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2873)
+06-02 16:38:06.328 D/StrictMode( 8940): at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:3048)
+06-02 16:38:06.328 D/StrictMode( 8940): at android.app.servertransaction.LaunchActivityItem.execute(LaunchActivityItem.java:78)
+06-02 16:38:06.328 D/StrictMode( 8940): at android.app.servertransaction.TransactionExecutor.executeCallbacks(TransactionExecutor.java:108)
+06-02 16:38:06.328 D/StrictMode( 8940): at android.app.servertransaction.TransactionExecutor.execute(TransactionExecutor.java:68)
+06-02 16:38:06.328 D/StrictMode( 8940): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1808)
+06-02 16:38:06.328 D/StrictMode( 8940): at android.os.Handler.dispatchMessage(Handler.java:106)
+06-02 16:38:06.328 D/StrictMode( 8940): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:06.328 D/StrictMode( 8940): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:06.328 D/StrictMode( 8940): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:06.328 D/StrictMode( 8940): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:06.328 D/StrictMode( 8940): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:06.332 D/StrictMode( 8940): StrictMode policy violation; ~duration=725 ms: android.os.strictmode.DiskReadViolation
+06-02 16:38:06.332 D/StrictMode( 8940): at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+06-02 16:38:06.332 D/StrictMode( 8940): at java.io.UnixFileSystem.checkAccess(UnixFileSystem.java:251)
+06-02 16:38:06.332 D/StrictMode( 8940): at java.io.File.exists(File.java:815)
+06-02 16:38:06.332 D/StrictMode( 8940): at android.app.ContextImpl.getDataDir(ContextImpl.java:2253)
+06-02 16:38:06.332 D/StrictMode( 8940): at android.app.ContextImpl.getPreferencesDir(ContextImpl.java:550)
+06-02 16:38:06.332 D/StrictMode( 8940): at android.app.ContextImpl.getSharedPreferencesPath(ContextImpl.java:747)
+06-02 16:38:06.332 D/StrictMode( 8940): at android.app.ContextImpl.getSharedPreferences(ContextImpl.java:400)
+06-02 16:38:06.332 D/StrictMode( 8940): at android.content.ContextWrapper.getSharedPreferences(ContextWrapper.java:174)
+06-02 16:38:06.332 D/StrictMode( 8940): at android.content.ContextWrapper.getSharedPreferences(ContextWrapper.java:174)
+06-02 16:38:06.332 D/StrictMode( 8940): at org.mozilla.fenix.onboarding.FenixOnboarding.<init>(FenixOnboarding.kt:15)
+06-02 16:38:06.332 D/StrictMode( 8940): at org.mozilla.fenix.perf.Performance.disableOnboarding(Performance.kt:72)
+06-02 16:38:06.332 D/StrictMode( 8940): at org.mozilla.fenix.perf.Performance.processIntentIfPerformanceTest(Performance.kt:32)
+06-02 16:38:06.332 D/StrictMode( 8940): at org.mozilla.fenix.HomeActivity.onCreate(HomeActivity.kt:145)
+06-02 16:38:06.332 D/StrictMode( 8940): at android.app.Activity.performCreate(Activity.java:7136)
+06-02 16:38:06.332 D/StrictMode( 8940): at android.app.Activity.performCreate(Activity.java:7127)
+06-02 16:38:06.332 D/StrictMode( 8940): at android.app.Instrumentation.callActivityOnCreate(Instrumentation.java:1271)
+06-02 16:38:06.332 D/StrictMode( 8940): at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2893)
+06-02 16:38:06.332 D/StrictMode( 8940): at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:3048)
+06-02 16:38:06.332 D/StrictMode( 8940): at android.app.servertransaction.LaunchActivityItem.execute(LaunchActivityItem.java:78)
+06-02 16:38:06.332 D/StrictMode( 8940): at android.app.servertransaction.TransactionExecutor.executeCallbacks(TransactionExecutor.java:108)
+06-02 16:38:06.332 D/StrictMode( 8940): at android.app.servertransaction.TransactionExecutor.execute(TransactionExecutor.java:68)
+06-02 16:38:06.332 D/StrictMode( 8940): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1808)
+06-02 16:38:06.332 D/StrictMode( 8940): at android.os.Handler.dispatchMessage(Handler.java:106)
+06-02 16:38:06.332 D/StrictMode( 8940): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:06.332 D/StrictMode( 8940): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:06.332 D/StrictMode( 8940): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:06.332 D/StrictMode( 8940): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:06.332 D/StrictMode( 8940): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:06.342 D/StrictMode( 8940): StrictMode policy violation; ~duration=725 ms: android.os.strictmode.DiskReadViolation
+06-02 16:38:06.342 D/StrictMode( 8940): at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+06-02 16:38:06.342 D/StrictMode( 8940): at java.io.UnixFileSystem.checkAccess(UnixFileSystem.java:251)
+06-02 16:38:06.342 D/StrictMode( 8940): at java.io.File.exists(File.java:815)
+06-02 16:38:06.342 D/StrictMode( 8940): at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:605)
+06-02 16:38:06.342 D/StrictMode( 8940): at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:596)
+06-02 16:38:06.342 D/StrictMode( 8940): at android.app.ContextImpl.getPreferencesDir(ContextImpl.java:552)
+06-02 16:38:06.342 D/StrictMode( 8940): at android.app.ContextImpl.getSharedPreferencesPath(ContextImpl.java:747)
+06-02 16:38:06.342 D/StrictMode( 8940): at android.app.ContextImpl.getSharedPreferences(ContextImpl.java:400)
+06-02 16:38:06.342 D/StrictMode( 8940): at android.content.ContextWrapper.getSharedPreferences(ContextWrapper.java:174)
+06-02 16:38:06.342 D/StrictMode( 8940): at android.content.ContextWrapper.getSharedPreferences(ContextWrapper.java:174)
+06-02 16:38:06.342 D/StrictMode( 8940): at org.mozilla.fenix.onboarding.FenixOnboarding.<init>(FenixOnboarding.kt:15)
+06-02 16:38:06.342 D/StrictMode( 8940): at org.mozilla.fenix.perf.Performance.disableOnboarding(Performance.kt:72)
+06-02 16:38:06.342 D/StrictMode( 8940): at org.mozilla.fenix.perf.Performance.processIntentIfPerformanceTest(Performance.kt:32)
+06-02 16:38:06.342 D/StrictMode( 8940): at org.mozilla.fenix.HomeActivity.onCreate(HomeActivity.kt:145)
+06-02 16:38:06.342 D/StrictMode( 8940): at android.app.Activity.performCreate(Activity.java:7136)
+06-02 16:38:06.342 D/StrictMode( 8940): at android.app.Activity.performCreate(Activity.java:7127)
+06-02 16:38:06.342 D/StrictMode( 8940): at android.app.Instrumentation.callActivityOnCreate(Instrumentation.java:1271)
+06-02 16:38:06.342 D/StrictMode( 8940): at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2893)
+06-02 16:38:06.342 D/StrictMode( 8940): at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:3048)
+06-02 16:38:06.342 D/StrictMode( 8940): at android.app.servertransaction.LaunchActivityItem.execute(LaunchActivityItem.java:78)
+06-02 16:38:06.342 D/StrictMode( 8940): at android.app.servertransaction.TransactionExecutor.executeCallbacks(TransactionExecutor.java:108)
+06-02 16:38:06.342 D/StrictMode( 8940): at android.app.servertransaction.TransactionExecutor.execute(TransactionExecutor.java:68)
+06-02 16:38:06.342 D/StrictMode( 8940): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1808)
+06-02 16:38:06.342 D/StrictMode( 8940): at android.os.Handler.dispatchMessage(Handler.java:106)
+06-02 16:38:06.342 D/StrictMode( 8940): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:06.342 D/StrictMode( 8940): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:06.342 D/StrictMode( 8940): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:06.342 D/StrictMode( 8940): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:06.342 D/StrictMode( 8940): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:06.348 D/StrictMode( 8940): StrictMode policy violation; ~duration=724 ms: android.os.strictmode.DiskReadViolation
+06-02 16:38:06.348 D/StrictMode( 8940): at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+06-02 16:38:06.348 D/StrictMode( 8940): at android.app.SharedPreferencesImpl.awaitLoadedLocked(SharedPreferencesImpl.java:256)
+06-02 16:38:06.348 D/StrictMode( 8940): at android.app.SharedPreferencesImpl.edit(SharedPreferencesImpl.java:349)
+06-02 16:38:06.348 D/StrictMode( 8940): at org.mozilla.fenix.onboarding.FenixOnboarding.setOnboardedVersion(FenixOnboarding.kt:42)
+06-02 16:38:06.348 D/StrictMode( 8940): at org.mozilla.fenix.onboarding.FenixOnboarding.finish(FenixOnboarding.kt:25)
+06-02 16:38:06.348 D/StrictMode( 8940): at org.mozilla.fenix.perf.Performance.disableOnboarding(Performance.kt:72)
+06-02 16:38:06.348 D/StrictMode( 8940): at org.mozilla.fenix.perf.Performance.processIntentIfPerformanceTest(Performance.kt:32)
+06-02 16:38:06.348 D/StrictMode( 8940): at org.mozilla.fenix.HomeActivity.onCreate(HomeActivity.kt:145)
+06-02 16:38:06.348 D/StrictMode( 8940): at android.app.Activity.performCreate(Activity.java:7136)
+06-02 16:38:06.348 D/StrictMode( 8940): at android.app.Activity.performCreate(Activity.java:7127)
+06-02 16:38:06.348 D/StrictMode( 8940): at android.app.Instrumentation.callActivityOnCreate(Instrumentation.java:1271)
+06-02 16:38:06.348 D/StrictMode( 8940): at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2893)
+06-02 16:38:06.348 D/StrictMode( 8940): at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:3048)
+06-02 16:38:06.348 D/StrictMode( 8940): at android.app.servertransaction.LaunchActivityItem.execute(LaunchActivityItem.java:78)
+06-02 16:38:06.348 D/StrictMode( 8940): at android.app.servertransaction.TransactionExecutor.executeCallbacks(TransactionExecutor.java:108)
+06-02 16:38:06.348 D/StrictMode( 8940): at android.app.servertransaction.TransactionExecutor.execute(TransactionExecutor.java:68)
+06-02 16:38:06.348 D/StrictMode( 8940): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1808)
+06-02 16:38:06.348 D/StrictMode( 8940): at android.os.Handler.dispatchMessage(Handler.java:106)
+06-02 16:38:06.348 D/StrictMode( 8940): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:06.348 D/StrictMode( 8940): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:06.348 D/StrictMode( 8940): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:06.348 D/StrictMode( 8940): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:06.348 D/StrictMode( 8940): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:06.352 D/StrictMode( 8940): StrictMode policy violation; ~duration=237 ms: android.os.strictmode.DiskReadViolation
+06-02 16:38:06.352 D/StrictMode( 8940): at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+06-02 16:38:06.352 D/StrictMode( 8940): at java.io.UnixFileSystem.checkAccess(UnixFileSystem.java:251)
+06-02 16:38:06.352 D/StrictMode( 8940): at java.io.File.exists(File.java:815)
+06-02 16:38:06.352 D/StrictMode( 8940): at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:605)
+06-02 16:38:06.352 D/StrictMode( 8940): at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:596)
+06-02 16:38:06.352 D/StrictMode( 8940): at android.app.ContextImpl.getPreferencesDir(ContextImpl.java:552)
+06-02 16:38:06.352 D/StrictMode( 8940): at android.app.ContextImpl.getSharedPreferencesPath(ContextImpl.java:747)
+06-02 16:38:06.352 D/StrictMode( 8940): at android.app.ContextImpl.getSharedPreferences(ContextImpl.java:400)
+06-02 16:38:06.352 D/StrictMode( 8940): at android.content.ContextWrapper.getSharedPreferences(ContextWrapper.java:174)
+06-02 16:38:06.352 D/StrictMode( 8940): at mozilla.components.support.locale.LocaleManager$Storage.getSharedPreferences(LocaleManager.kt:123)
+06-02 16:38:06.352 D/StrictMode( 8940): at mozilla.components.support.locale.LocaleManager$Storage.getLocale(LocaleManager.kt:99)
+06-02 16:38:06.352 D/StrictMode( 8940): at mozilla.components.support.locale.LocaleManager.getCurrentLocale(LocaleManager.kt:42)
+06-02 16:38:06.352 D/StrictMode( 8940): at org.mozilla.fenix.settings.advanced.LocaleManagerExtensionKt.getSelectedLocale(LocaleManagerExtension.kt:39)
+06-02 16:38:06.352 D/StrictMode( 8940): at org.mozilla.fenix.settings.advanced.LocaleManagerExtensionKt.getSelectedLocale$default(LocaleManagerExtension.kt:37)
+06-02 16:38:06.352 D/StrictMode( 8940): at org.mozilla.fenix.components.TopSiteStorage.addDefaultTopSites(TopSiteStorage.kt:57)
+06-02 16:38:06.352 D/StrictMode( 8940): at org.mozilla.fenix.components.TopSiteStorage.<init>(TopSiteStorage.kt:30)
+06-02 16:38:06.352 D/StrictMode( 8940): at org.mozilla.fenix.components.Core$topSiteStorage$2.invoke(Core.kt:216)
+06-02 16:38:06.352 D/StrictMode( 8940): at org.mozilla.fenix.components.Core$topSiteStorage$2.invoke(Core.kt:57)
+06-02 16:38:06.352 D/StrictMode( 8940): at kotlin.SynchronizedLazyImpl.getValue(LazyJVM.kt:74)
+06-02 16:38:06.352 D/StrictMode( 8940): at org.mozilla.fenix.components.Core.getTopSiteStorage(Unknown Source:8)
+06-02 16:38:06.352 D/StrictMode( 8940): at org.mozilla.fenix.home.HomeFragment$onCreateView$2.invoke(HomeFragment.kt:210)
+06-02 16:38:06.352 D/StrictMode( 8940): at org.mozilla.fenix.home.HomeFragment$onCreateView$2.invoke(HomeFragment.kt:114)
+06-02 16:38:06.352 D/StrictMode( 8940): at org.mozilla.fenix.components.StoreProviderFactory.create(StoreProvider.kt:42)
+06-02 16:38:06.352 D/StrictMode( 8940): at androidx.lifecycle.ViewModelProvider.get(ViewModelProvider.java:187)
+06-02 16:38:06.352 D/StrictMode( 8940): at androidx.lifecycle.ViewModelProvider.get(ViewModelProvider.java:150)
+06-02 16:38:06.352 D/StrictMode( 8940): at org.mozilla.fenix.components.StoreProvider$Companion.get(StoreProvider.kt:46)
+06-02 16:38:06.352 D/StrictMode( 8940): at org.mozilla.fenix.home.HomeFragment.onCreateView(HomeFragment.kt:203)
+06-02 16:38:06.352 D/StrictMode( 8940): at androidx.fragment.app.Fragment.performCreateView(Fragment.java:2698)
+06-02 16:38:06.352 D/StrictMode( 8940): at androidx.fragment.app.FragmentStateManager.createView(FragmentStateManager.java:320)
+06-02 16:38:06.352 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1187)
+06-02 16:38:06.352 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.addAddedFragments(FragmentManager.java:2224)
+06-02 16:38:06.352 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.executeOpsTogether(FragmentManager.java:1997)
+06-02 16:38:06.352 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.removeRedundantOperationsAndExecute(FragmentManager.java:1953)
+06-02 16:38:06.352 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.execPendingActions(FragmentManager.java:1849)
+06-02 16:38:06.352 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.dispatchStateChange(FragmentManager.java:2629)
+06-02 16:38:06.352 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.dispatchActivityCreated(FragmentManager.java:2577)
+06-02 16:38:06.352 D/StrictMode( 8940): at androidx.fragment.app.Fragment.performActivityCreated(Fragment.java:2722)
+06-02 16:38:06.352 D/StrictMode( 8940): at androidx.fragment.app.FragmentStateManager.activityCreated(FragmentStateManager.java:346)
+06-02 16:38:06.352 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1188)
+06-02 16:38:06.352 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1356)
+06-02 16:38:06.352 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.moveFragmentToExpectedState(FragmentManager.java:1434)
+06-02 16:38:06.352 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1497)
+06-02 16:38:06.352 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.dispatchStateChange(FragmentManager.java:2625)
+06-02 16:38:06.352 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.dispatchActivityCreated(FragmentManager.java:2577)
+06-02 16:38:06.352 D/StrictMode( 8940): at androidx.fragment.app.FragmentController.dispatchActivityCreated(FragmentController.java:247)
+06-02 16:38:06.352 D/StrictMode( 8940): at androidx.fragment.app.FragmentActivity.onStart(FragmentActivity.java:541)
+06-02 16:38:06.352 D/StrictMode( 8940): at androidx.appcompat.app.AppCompatActivity.onStart(AppCompatActivity.java:210)
+06-02 16:38:06.352 D/StrictMode( 8940): at android.app.Instrumentation.callActivityOnStart(Instrumentation.java:1391)
+06-02 16:38:06.352 D/StrictMode( 8940):
+06-02 16:38:06.368 D/StrictMode( 8940): StrictMode policy violation; ~duration=131 ms: android.os.strictmode.DiskReadViolation
+06-02 16:38:06.368 D/StrictMode( 8940): at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+06-02 16:38:06.368 D/StrictMode( 8940): at java.io.UnixFileSystem.checkAccess(UnixFileSystem.java:251)
+06-02 16:38:06.368 D/StrictMode( 8940): at java.io.File.exists(File.java:815)
+06-02 16:38:06.368 D/StrictMode( 8940): at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:605)
+06-02 16:38:06.368 D/StrictMode( 8940): at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:596)
+06-02 16:38:06.368 D/StrictMode( 8940): at android.app.ContextImpl.getPreferencesDir(ContextImpl.java:552)
+06-02 16:38:06.368 D/StrictMode( 8940): at android.app.ContextImpl.getSharedPreferencesPath(ContextImpl.java:747)
+06-02 16:38:06.368 D/StrictMode( 8940): at android.app.ContextImpl.getSharedPreferences(ContextImpl.java:400)
+06-02 16:38:06.368 D/StrictMode( 8940): at android.content.ContextWrapper.getSharedPreferences(ContextWrapper.java:174)
+06-02 16:38:06.368 D/StrictMode( 8940): at org.mozilla.fenix.components.AccountAbnormalities.<init>(AccountAbnormalities.kt:78)
+06-02 16:38:06.368 D/StrictMode( 8940): at org.mozilla.fenix.components.AccountAbnormalities.<init>(AccountAbnormalities.kt:60)
+06-02 16:38:06.368 D/StrictMode( 8940): at org.mozilla.fenix.components.BackgroundServices.<init>(BackgroundServices.kt:103)
+06-02 16:38:06.368 D/StrictMode( 8940): at org.mozilla.fenix.components.Components$backgroundServices$2.invoke(Components.kt:34)
+06-02 16:38:06.368 D/StrictMode( 8940): at org.mozilla.fenix.components.Components$backgroundServices$2.invoke(Components.kt:32)
+06-02 16:38:06.368 D/StrictMode( 8940): at kotlin.SynchronizedLazyImpl.getValue(LazyJVM.kt:74)
+06-02 16:38:06.368 D/StrictMode( 8940): at org.mozilla.fenix.components.Components.getBackgroundServices(Unknown Source:7)
+06-02 16:38:06.368 D/StrictMode( 8940): at org.mozilla.fenix.home.HomeMenu$coreMenuItems$2.invoke(HomeMenu.kt:131)
+06-02 16:38:06.368 D/StrictMode( 8940): at org.mozilla.fenix.home.HomeMenu$coreMenuItems$2.invoke(HomeMenu.kt:31)
+06-02 16:38:06.368 D/StrictMode( 8940): at kotlin.SynchronizedLazyImpl.getValue(LazyJVM.kt:74)
+06-02 16:38:06.368 D/StrictMode( 8940): at org.mozilla.fenix.home.HomeMenu.getCoreMenuItems(Unknown Source:7)
+06-02 16:38:06.368 D/StrictMode( 8940): at org.mozilla.fenix.home.HomeMenu.<init>(HomeMenu.kt:170)
+06-02 16:38:06.368 D/StrictMode( 8940): at org.mozilla.fenix.home.HomeFragment.createHomeMenu(HomeFragment.kt:668)
+06-02 16:38:06.368 D/StrictMode( 8940): at org.mozilla.fenix.home.HomeFragment.onViewCreated(HomeFragment.kt:337)
+06-02 16:38:06.368 D/StrictMode( 8940): at androidx.fragment.app.FragmentStateManager.createView(FragmentStateManager.java:332)
+06-02 16:38:06.368 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1187)
+06-02 16:38:06.368 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.addAddedFragments(FragmentManager.java:2224)
+06-02 16:38:06.368 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.executeOpsTogether(FragmentManager.java:1997)
+06-02 16:38:06.368 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.removeRedundantOperationsAndExecute(FragmentManager.java:1953)
+06-02 16:38:06.368 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.execPendingActions(FragmentManager.java:1849)
+06-02 16:38:06.368 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.dispatchStateChange(FragmentManager.java:2629)
+06-02 16:38:06.368 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.dispatchActivityCreated(FragmentManager.java:2577)
+06-02 16:38:06.368 D/StrictMode( 8940): at androidx.fragment.app.Fragment.performActivityCreated(Fragment.java:2722)
+06-02 16:38:06.368 D/StrictMode( 8940): at androidx.fragment.app.FragmentStateManager.activityCreated(FragmentStateManager.java:346)
+06-02 16:38:06.368 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1188)
+06-02 16:38:06.368 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1356)
+06-02 16:38:06.368 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.moveFragmentToExpectedState(FragmentManager.java:1434)
+06-02 16:38:06.368 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1497)
+06-02 16:38:06.368 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.dispatchStateChange(FragmentManager.java:2625)
+06-02 16:38:06.368 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.dispatchActivityCreated(FragmentManager.java:2577)
+06-02 16:38:06.368 D/StrictMode( 8940): at androidx.fragment.app.FragmentController.dispatchActivityCreated(FragmentController.java:247)
+06-02 16:38:06.368 D/StrictMode( 8940): at androidx.fragment.app.FragmentActivity.onStart(FragmentActivity.java:541)
+06-02 16:38:06.368 D/StrictMode( 8940): at androidx.appcompat.app.AppCompatActivity.onStart(AppCompatActivity.java:210)
+06-02 16:38:06.368 D/StrictMode( 8940): at android.app.Instrumentation.callActivityOnStart(Instrumentation.java:1391)
+06-02 16:38:06.368 D/StrictMode( 8940): at android.app.Activity.performStart(Activity.java:7157)
+06-02 16:38:06.368 D/StrictMode( 8940): at android.app.ActivityThread.handleStartActivity(ActivityThread.java:2937)
+06-02 16:38:06.368 D/StrictMode( 8940): at android.app.servertransaction.TransactionExecutor.performLifecycleSequence(TransactionExecutor.java:180)
+06-02 16:38:06.368 D/StrictMode( 8940): at android.app.servertransaction.TransactionExecutor.cycleToPath(TransactionExecutor.java:165)
+06-02 16:38:06.368 D/StrictMode( 8940): at android.app.servertransaction.TransactionExecutor.executeLifecycleState(TransactionExecutor.java:142)
+06-02 16:38:06.368 D/StrictMode( 8940): at android.app.servertransaction.TransactionExecutor.execute(Transa
+06-02 16:38:06.379 D/StrictMode( 8940): StrictMode policy violation; ~duration=96 ms: android.os.strictmode.DiskReadViolation
+06-02 16:38:06.379 D/StrictMode( 8940): at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+06-02 16:38:06.379 D/StrictMode( 8940): at java.io.UnixFileSystem.checkAccess(UnixFileSystem.java:251)
+06-02 16:38:06.379 D/StrictMode( 8940): at java.io.File.exists(File.java:815)
+06-02 16:38:06.379 D/StrictMode( 8940): at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:605)
+06-02 16:38:06.379 D/StrictMode( 8940): at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:596)
+06-02 16:38:06.379 D/StrictMode( 8940): at android.app.ContextImpl.getPreferencesDir(ContextImpl.java:552)
+06-02 16:38:06.379 D/StrictMode( 8940): at android.app.ContextImpl.getSharedPreferencesPath(ContextImpl.java:747)
+06-02 16:38:06.379 D/StrictMode( 8940): at android.app.ContextImpl.getSharedPreferences(ContextImpl.java:400)
+06-02 16:38:06.379 D/StrictMode( 8940): at android.content.ContextWrapper.getSharedPreferences(ContextWrapper.java:174)
+06-02 16:38:06.379 D/StrictMode( 8940): at android.content.ContextWrapper.getSharedPreferences(ContextWrapper.java:174)
+06-02 16:38:06.379 D/StrictMode( 8940): at android.preference.PreferenceManager.getDefaultSharedPreferences(PreferenceManager.java:526)
+06-02 16:38:06.379 D/StrictMode( 8940): at org.mozilla.fenix.whatsnew.SharedPreferenceWhatsNewStorage.<init>(WhatsNewStorage.kt:35)
+06-02 16:38:06.379 D/StrictMode( 8940): at org.mozilla.fenix.whatsnew.WhatsNew$Companion.shouldHighlightWhatsNew(WhatsNew.kt:71)
+06-02 16:38:06.379 D/StrictMode( 8940): at org.mozilla.fenix.home.HomeMenu$coreMenuItems$2$whatsNewItem$1.invoke(HomeMenu.kt:92)
+06-02 16:38:06.379 D/StrictMode( 8940): at org.mozilla.fenix.home.HomeMenu$coreMenuItems$2$whatsNewItem$1.invoke(HomeMenu.kt:31)
+06-02 16:38:06.379 D/StrictMode( 8940): at mozilla.components.browser.menu.ext.BrowserMenuItemKt$getHighlight$3.invoke(BrowserMenuItem.kt:18)
+06-02 16:38:06.379 D/StrictMode( 8940): at mozilla.components.browser.menu.ext.BrowserMenuItemKt$getHighlight$3.invoke(Unknown Source:2)
+06-02 16:38:06.379 D/StrictMode( 8940): at kotlin.sequences.FilteringSequence$iterator$1.calcNext(Sequences.kt:133)
+06-02 16:38:06.379 D/StrictMode( 8940): at kotlin.sequences.FilteringSequence$iterator$1.hasNext(Sequences.kt:156)
+06-02 16:38:06.379 D/StrictMode( 8940): at kotlin.sequences.TransformingSequence$iterator$1.hasNext(Sequences.kt:176)
+06-02 16:38:06.379 D/StrictMode( 8940): at kotlin.sequences.FilteringSequence$iterator$1.calcNext(Sequences.kt:131)
+06-02 16:38:06.379 D/StrictMode( 8940): at kotlin.sequences.FilteringSequence$iterator$1.hasNext(Sequences.kt:156)
+06-02 16:38:06.379 D/StrictMode( 8940): at mozilla.components.browser.menu.ext.BrowserMenuItemKt.getHighlight(BrowserMenuItem.kt:31)
+06-02 16:38:06.379 D/StrictMode( 8940): at org.mozilla.fenix.home.HomeMenu$coreMenuItems$2.invoke(HomeMenu.kt:149)
+06-02 16:38:06.379 D/StrictMode( 8940): at org.mozilla.fenix.home.HomeMenu$coreMenuItems$2.invoke(HomeMenu.kt:31)
+06-02 16:38:06.379 D/StrictMode( 8940): at kotlin.SynchronizedLazyImpl.getValue(LazyJVM.kt:74)
+06-02 16:38:06.379 D/StrictMode( 8940): at org.mozilla.fenix.home.HomeMenu.getCoreMenuItems(Unknown Source:7)
+06-02 16:38:06.379 D/StrictMode( 8940): at org.mozilla.fenix.home.HomeMenu.<init>(HomeMenu.kt:170)
+06-02 16:38:06.379 D/StrictMode( 8940): at org.mozilla.fenix.home.HomeFragment.createHomeMenu(HomeFragment.kt:668)
+06-02 16:38:06.379 D/StrictMode( 8940): at org.mozilla.fenix.home.HomeFragment.onViewCreated(HomeFragment.kt:337)
+06-02 16:38:06.379 D/StrictMode( 8940): at androidx.fragment.app.FragmentStateManager.createView(FragmentStateManager.java:332)
+06-02 16:38:06.379 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1187)
+06-02 16:38:06.379 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.addAddedFragments(FragmentManager.java:2224)
+06-02 16:38:06.379 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.executeOpsTogether(FragmentManager.java:1997)
+06-02 16:38:06.379 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.removeRedundantOperationsAndExecute(FragmentManager.java:1953)
+06-02 16:38:06.379 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.execPendingActions(FragmentManager.java:1849)
+06-02 16:38:06.379 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.dispatchStateChange(FragmentManager.java:2629)
+06-02 16:38:06.379 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.dispatchActivityCreated(FragmentManager.java:2577)
+06-02 16:38:06.379 D/StrictMode( 8940): at androidx.fragment.app.Fragment.performActivityCreated(Fragment.java:2722)
+06-02 16:38:06.379 D/StrictMode( 8940): at androidx.fragment.app.FragmentStateManager.activityCreated(FragmentStateManager.java:346)
+06-02 16:38:06.379 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1188)
+06-02 16:38:06.379 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1356)
+06-02 16:38:06.379 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.moveFragmentToExpectedState(FragmentManager.java:1434)
+06-02 16:38:06.379 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1497)
+06-02 16:38:06.379 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.dispatchStateChange(FragmentManager.java:2625)
+06-02 16:38:06.379 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.dispatchActivityCreated(FragmentManager.java:2577)
+06-02 16:38:06.379 D/StrictMode( 8940): at androidx.fragment.app.FragmentController.dispatchActivityCreated(FragmentController.java:247)
+06-02 16:38:06.379 D/StrictMode( 8940): at androidx.fragment.app.FragmentActivity.onStart(FragmentActivity.java:541)
+06-02 16:38:06.379 D/StrictMode( 8940): at androidx.appcompat.app.AppCompatAc
+06-02 16:38:06.383 D/LeakCanary( 8940): LeakCanary is running and ready to detect leaks
+06-02 16:38:06.386 I/libglean_ffi( 8940): glean_core::ping: Collecting baseline
+06-02 16:38:06.398 D/libglean_ffi( 8940): glean_core::ping: Storing ping '0f17279d-898f-4274-874d-358ab99b6d4e' at '/data/user/0/org.mozilla.fenix.debug/glean_data/pending_pings/0f17279d-898f-4274-874d-358ab99b6d4e'
+06-02 16:38:06.399 I/libglean_ffi( 8940): glean_core: The ping 'baseline' was submitted and will be sent as soon as possible
+06-02 16:38:06.424 D/GeckoNetworkManager( 8940): Incoming event enableNotifications for state OnNoListeners -> OnWithListeners
+06-02 16:38:06.431 D/GeckoNetworkManager( 8940): New network state: UP, WIFI, WIFI
+06-02 16:38:06.436 W/ActivityManager( 1869): Receiver with filter android.content.IntentFilter@68a603a already registered for pid 8940, callerPackage is org.mozilla.fenix.debug
+06-02 16:38:06.439 D/GeckoNetworkManager( 8940): Incoming event receivedUpdate for state OnWithListeners -> OnWithListeners
+06-02 16:38:06.440 D/GeckoNetworkManager( 8940): New network state: UP, WIFI, WIFI
+06-02 16:38:06.449 D/GeckoViewStartup( 8940): onEvent GeckoView:SetLocale
+06-02 16:38:06.459 D/GeckoViewStartup( 8940): onEvent GeckoView:ResetUserPrefs
+06-02 16:38:06.493 D/GeckoViewRemoteDebugger( 8940): onInit
+06-02 16:38:06.495 D/GeckoViewConsole( 8940): enabled = false
+06-02 16:38:06.540 D/WIFI_UT ( 1869): got request NetworkRequest [ TRACK_DEFAULT id=33, [ Capabilities: INTERNET&NOT_RESTRICTED&TRUSTED Unwanted: Uid: 10099] ] with score 60
+06-02 16:38:06.540 D/WIFI ( 1869): got request NetworkRequest [ TRACK_DEFAULT id=33, [ Capabilities: INTERNET&NOT_RESTRICTED&TRUSTED Unwanted: Uid: 10099] ] with score 60
+06-02 16:38:06.541 D/PhoneSwitcherNetworkRequstListener( 2121): got request NetworkRequest [ TRACK_DEFAULT id=33, [ Capabilities: INTERNET&NOT_RESTRICTED&TRUSTED Unwanted: Uid: 10099] ] with score 60
+06-02 16:38:06.548 I/lla.fenix.debu( 8940): Background concurrent copying GC freed 21002(1731KB) AllocSpace objects, 41(1252KB) LOS objects, 49% free, 4MB/8MB, paused 488us total 183.846ms
+06-02 16:38:06.562 D/GeckoViewStartup( 8940): onEvent GeckoView:SetLocale
+06-02 16:38:06.563 D/GeckoViewStartup( 8940): onEvent GeckoView:SetDefaultPrefs
+06-02 16:38:06.578 I/chatty ( 8940): uid=10099(org.mozilla.fenix.debug) identical 1 line
+06-02 16:38:06.584 D/GeckoViewStartup( 8940): onEvent GeckoView:SetDefaultPrefs
+06-02 16:38:06.587 D/gralloc_ranchu( 1619): gralloc_alloc: Creating ashmem region of size 9334784
+06-02 16:38:06.596 D/gralloc_ranchu( 1619): gralloc_alloc: Creating ashmem region of size 9334784
+06-02 16:38:06.602 D/gralloc_ranchu( 1619): gralloc_alloc: Creating ashmem region of size 9334784
+06-02 16:38:06.607 D/ ( 8940): HostConnection::get() New Host Connection established 0xcd97f0c0, tid 9026
+06-02 16:38:06.614 D/GeckoViewStartup( 8940): onEvent GeckoView:SetDefaultPrefs
+06-02 16:38:06.616 I/ConfigStore( 8940): android::hardware::configstore::V1_0::ISurfaceFlingerConfigs::hasWideColorDisplay retrieved: 0
+06-02 16:38:06.616 D/GeckoThread( 8988): State changed to LAUNCHED
+06-02 16:38:06.616 I/ConfigStore( 8940): android::hardware::configstore::V1_0::ISurfaceFlingerConfigs::hasHDRDisplay retrieved: 0
+06-02 16:38:06.616 I/OpenGLRenderer( 8940): Initialized EGL, version 1.4
+06-02 16:38:06.616 D/OpenGLRenderer( 8940): Swap behavior 1
+06-02 16:38:06.616 W/OpenGLRenderer( 8940): Failed to choose config with EGL_SWAP_BEHAVIOR_PRESERVED, retrying without...
+06-02 16:38:06.616 D/OpenGLRenderer( 8940): Swap behavior 0
+06-02 16:38:06.617 D/EGL_emulation( 8940): eglCreateContext: 0xe33868c0: maj 3 min 0 rcv 3
+06-02 16:38:06.618 D/GeckoViewStartup( 8940): onEvent GeckoView:SetDefaultPrefs
+06-02 16:38:06.619 I/GeckoThread( 8988): preparing to run Gecko
+06-02 16:38:06.619 D/EGL_emulation( 8940): eglMakeCurrent: 0xe33868c0: ver 3 0 (tinfo 0xb3475200)
+06-02 16:38:06.624 E/SurfaceFlinger( 1728): ro.sf.lcd_density must be defined as a build property
+06-02 16:38:06.631 D/GeckoViewStartup( 8940): onEvent GeckoView:SetDefaultPrefs
+06-02 16:38:06.636 W/lla.fenix.debu( 8940): Accessing hidden field Landroid/os/Trace;->TRACE_TAG_APP:J (light greylist, reflection)
+06-02 16:38:06.641 D/StrictMode( 8940): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/os/Trace;->TRACE_TAG_APP:J
+06-02 16:38:06.641 D/StrictMode( 8940): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:06.641 D/StrictMode( 8940): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:06.641 D/StrictMode( 8940): at java.lang.Class.getPublicFieldRecursive(Native Method)
+06-02 16:38:06.641 D/StrictMode( 8940): at java.lang.Class.getField(Class.java:1599)
+06-02 16:38:06.641 D/StrictMode( 8940): at androidx.core.os.TraceCompat.<clinit>(TraceCompat.java:48)
+06-02 16:38:06.641 D/StrictMode( 8940): at androidx.core.os.TraceCompat.beginSection(TraceCompat.java:100)
+06-02 16:38:06.641 D/StrictMode( 8940): at androidx.recyclerview.widget.RecyclerView.onLayout(RecyclerView.java:4403)
+06-02 16:38:06.641 D/StrictMode( 8940): at android.view.View.layout(View.java:20672)
+06-02 16:38:06.641 D/StrictMode( 8940): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:06.641 D/StrictMode( 8940): at com.google.android.material.appbar.HeaderScrollingViewBehavior.layoutChild(HeaderScrollingViewBehavior.java:148)
+06-02 16:38:06.641 D/StrictMode( 8940): at com.google.android.material.appbar.ViewOffsetBehavior.onLayoutChild(ViewOffsetBehavior.java:43)
+06-02 16:38:06.641 D/StrictMode( 8940): at com.google.android.material.appbar.AppBarLayout$ScrollingViewBehavior.onLayoutChild(AppBarLayout.java:1892)
+06-02 16:38:06.641 D/StrictMode( 8940): at androidx.coordinatorlayout.widget.CoordinatorLayout.onLayout(CoordinatorLayout.java:918)
+06-02 16:38:06.641 D/StrictMode( 8940): at android.view.View.layout(View.java:20672)
+06-02 16:38:06.641 D/StrictMode( 8940): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:06.641 D/StrictMode( 8940): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:06.641 D/StrictMode( 8940): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:06.641 D/StrictMode( 8940): at android.view.View.layout(View.java:20672)
+06-02 16:38:06.641 D/StrictMode( 8940): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:06.641 D/StrictMode( 8940): at android.widget.LinearLayout.setChildFrame(LinearLayout.java:1812)
+06-02 16:38:06.641 D/StrictMode( 8940): at android.widget.LinearLayout.layoutVertical(LinearLayout.java:1656)
+06-02 16:38:06.641 D/StrictMode( 8940): at android.widget.LinearLayout.onLayout(LinearLayout.java:1565)
+06-02 16:38:06.641 D/StrictMode( 8940): at android.view.View.layout(View.java:20672)
+06-02 16:38:06.641 D/StrictMode( 8940): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:06.641 D/StrictMode( 8940): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:06.641 D/StrictMode( 8940): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:06.641 D/StrictMode( 8940): at android.view.View.layout(View.java:20672)
+06-02 16:38:06.641 D/StrictMode( 8940): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:06.641 D/StrictMode( 8940): at android.widget.LinearLayout.setChildFrame(LinearLayout.java:1812)
+06-02 16:38:06.641 D/StrictMode( 8940): at android.widget.LinearLayout.layoutVertical(LinearLayout.java:1656)
+06-02 16:38:06.641 D/StrictMode( 8940): at android.widget.LinearLayout.onLayout(LinearLayout.java:1565)
+06-02 16:38:06.641 D/StrictMode( 8940): at android.view.View.layout(View.java:20672)
+06-02 16:38:06.641 D/StrictMode( 8940): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:06.641 D/StrictMode( 8940): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:06.641 D/StrictMode( 8940): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:06.641 D/StrictMode( 8940): at android.view.View.layout(View.java:20672)
+06-02 16:38:06.641 D/StrictMode( 8940): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:06.641 D/StrictMode( 8940): at android.widget.LinearLayout.setChildFrame(LinearLayout.java:1812)
+06-02 16:38:06.641 D/StrictMode( 8940): at android.widget.LinearLayout.layoutVertical(LinearLayout.java:1656)
+06-02 16:38:06.641 D/StrictMode( 8940): at android.widget.LinearLayout.onLayout(LinearLayout.java:1565)
+06-02 16:38:06.641 D/StrictMode( 8940): at android.view.View.layout(View.java:20672)
+06-02 16:38:06.641 D/StrictMode( 8940): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:06.641 D/StrictMode( 8940): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:06.641 D/StrictMode( 8940): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:06.641 D/StrictMode( 8940): at com.android.internal.policy.DecorView.onLayout(DecorView.java:753)
+06-02 16:38:06.641 D/StrictMode( 8940): at android.view.View.layout(View.java:20672)
+06-02 16:38:06.641 D/StrictMode( 8940): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:06.641 D/StrictMode( 8940): at android.view.ViewRootImpl.performLayout(ViewRootImpl.java:2792)
+06-02 16:38:06.641 D/StrictMode( 8940): at android.view.ViewRootImpl.performTraversals(ViewRootImpl.java:2319)
+06-02 16:38:06.641 D/StrictMode( 8940): at android.view.ViewRootImpl.doTraversal(ViewRootImpl.java:1460)
+06-02 16:38:06.641 D/StrictMode( 8940): at android.view.ViewRootImpl$TraversalRunnable.run(ViewRootImpl.java:7183)
+06-02 16:38:06.641 D/StrictMode( 8940): at android.view.Choreographer$CallbackRecord.run(Choreographer.java:949)
+06-02 16:38:06.641 D/StrictMode( 8940): at android.view.Choreographer.doCallbacks(Choreographer.java:761)
+06-02 16:38:06.641 D/StrictMode( 8940): at android.view.Choreographer.doFrame(Choreographer.java:696)
+06-02 16:38:06.641 D/StrictMode( 8940): at android.view.Choreographer$FrameDisplayEventReceiver.run(Choreographer.java:935)
+06-02 16:38:06.641 D/StrictMode( 8940): at android.os.Handler.handleCallback(Handler.java:873)
+06-02 16:38:06.641 D/StrictMode( 8940): at android.os.Handler.dispatchMessage(Handler.java:99)
+06-02 16:38:06.641 D/StrictMode( 8940): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:06.641 D/StrictMode( 8940): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:06.641 D/StrictMode( 8940): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:06.641 D/StrictMode( 8940): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:06.641 D/StrictMode( 8940): at com.and
+06-02 16:38:06.641 W/lla.fenix.debu( 8940): Accessing hidden method Landroid/os/Trace;->isTagEnabled(J)Z (light greylist, reflection)
+06-02 16:38:06.645 D/StrictMode( 8940): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/os/Trace;->isTagEnabled(J)Z
+06-02 16:38:06.645 D/StrictMode( 8940): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:06.645 D/StrictMode( 8940): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:06.645 D/StrictMode( 8940): at java.lang.Class.getDeclaredMethodInternal(Native Method)
+06-02 16:38:06.645 D/StrictMode( 8940): at java.lang.Class.getPublicMethodRecursive(Class.java:2075)
+06-02 16:38:06.645 D/StrictMode( 8940): at java.lang.Class.getMethod(Class.java:2063)
+06-02 16:38:06.645 D/StrictMode( 8940): at java.lang.Class.getMethod(Class.java:1690)
+06-02 16:38:06.645 D/StrictMode( 8940): at androidx.core.os.TraceCompat.<clinit>(TraceCompat.java:51)
+06-02 16:38:06.645 D/StrictMode( 8940): at androidx.core.os.TraceCompat.beginSection(TraceCompat.java:100)
+06-02 16:38:06.645 D/StrictMode( 8940): at androidx.recyclerview.widget.RecyclerView.onLayout(RecyclerView.java:4403)
+06-02 16:38:06.645 D/StrictMode( 8940): at android.view.View.layout(View.java:20672)
+06-02 16:38:06.645 D/StrictMode( 8940): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:06.645 D/StrictMode( 8940): at com.google.android.material.appbar.HeaderScrollingViewBehavior.layoutChild(HeaderScrollingViewBehavior.java:148)
+06-02 16:38:06.645 D/StrictMode( 8940): at com.google.android.material.appbar.ViewOffsetBehavior.onLayoutChild(ViewOffsetBehavior.java:43)
+06-02 16:38:06.645 D/StrictMode( 8940): at com.google.android.material.appbar.AppBarLayout$ScrollingViewBehavior.onLayoutChild(AppBarLayout.java:1892)
+06-02 16:38:06.645 D/StrictMode( 8940): at androidx.coordinatorlayout.widget.CoordinatorLayout.onLayout(CoordinatorLayout.java:918)
+06-02 16:38:06.645 D/StrictMode( 8940): at android.view.View.layout(View.java:20672)
+06-02 16:38:06.645 D/StrictMode( 8940): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:06.645 D/StrictMode( 8940): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:06.645 D/StrictMode( 8940): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:06.645 D/StrictMode( 8940): at android.view.View.layout(View.java:20672)
+06-02 16:38:06.645 D/StrictMode( 8940): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:06.645 D/StrictMode( 8940): at android.widget.LinearLayout.setChildFrame(LinearLayout.java:1812)
+06-02 16:38:06.645 D/StrictMode( 8940): at android.widget.LinearLayout.layoutVertical(LinearLayout.java:1656)
+06-02 16:38:06.645 D/StrictMode( 8940): at android.widget.LinearLayout.onLayout(LinearLayout.java:1565)
+06-02 16:38:06.645 D/StrictMode( 8940): at android.view.View.layout(View.java:20672)
+06-02 16:38:06.645 D/StrictMode( 8940): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:06.645 D/StrictMode( 8940): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:06.645 D/StrictMode( 8940): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:06.645 D/StrictMode( 8940): at android.view.View.layout(View.java:20672)
+06-02 16:38:06.645 D/StrictMode( 8940): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:06.645 D/StrictMode( 8940): at android.widget.LinearLayout.setChildFrame(LinearLayout.java:1812)
+06-02 16:38:06.645 D/StrictMode( 8940): at android.widget.LinearLayout.layoutVertical(LinearLayout.java:1656)
+06-02 16:38:06.645 D/StrictMode( 8940): at android.widget.LinearLayout.onLayout(LinearLayout.java:1565)
+06-02 16:38:06.645 D/StrictMode( 8940): at android.view.View.layout(View.java:20672)
+06-02 16:38:06.645 D/StrictMode( 8940): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:06.645 D/StrictMode( 8940): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:06.645 D/StrictMode( 8940): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:06.645 D/StrictMode( 8940): at android.view.View.layout(View.java:20672)
+06-02 16:38:06.645 D/StrictMode( 8940): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:06.645 D/StrictMode( 8940): at android.widget.LinearLayout.setChildFrame(LinearLayout.java:1812)
+06-02 16:38:06.645 D/StrictMode( 8940): at android.widget.LinearLayout.layoutVertical(LinearLayout.java:1656)
+06-02 16:38:06.645 D/StrictMode( 8940): at android.widget.LinearLayout.onLayout(LinearLayout.java:1565)
+06-02 16:38:06.645 D/StrictMode( 8940): at android.view.View.layout(View.java:20672)
+06-02 16:38:06.645 D/StrictMode( 8940): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:06.645 D/StrictMode( 8940): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:06.645 D/StrictMode( 8940): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:06.645 D/StrictMode( 8940): at com.android.internal.policy.DecorView.onLayout(DecorView.java:753)
+06-02 16:38:06.645 D/StrictMode( 8940): at android.view.View.layout(View.java:20672)
+06-02 16:38:06.645 D/StrictMode( 8940): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:06.645 D/StrictMode( 8940): at android.view.ViewRootImpl.performLayout(ViewRootImpl.java:2792)
+06-02 16:38:06.645 D/StrictMode( 8940): at android.view.ViewRootImpl.performTraversals(ViewRootImpl.java:2319)
+06-02 16:38:06.645 D/StrictMode( 8940): at android.view.ViewRootImpl.doTraversal(ViewRootImpl.java:1460)
+06-02 16:38:06.645 D/StrictMode( 8940): at android.view.ViewRootImpl$TraversalRunnable.run(ViewRootImpl.java:7183)
+06-02 16:38:06.645 D/StrictMode( 8940): at android.view.Choreographer$CallbackRecord.run(Choreographer.java:949)
+06-02 16:38:06.645 D/StrictMode( 8940): at android.view.Choreographer.doCallbacks(Choreographer.java:761)
+06-02 16:38:06.645 D/StrictMode( 8940): at android.view.Choreographer.doFrame(Choreographer.java:696)
+06-02 16:38:06.645 D/StrictMode( 8940): at android.view.Choreographer$FrameDisplayEventReceiver.run(Choreographer.java:935)
+06-02 16:38:06.645 D/StrictMode( 8940): at android.os.Handler.handleCallback(Handler.java:873)
+06-02 16:38:06.645 D/StrictMode( 8940): at android.os.Handler.dispatchMessage(Handler.java:99)
+06-02 16:38:06.645 D/StrictMode( 8940): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:06.645 D/StrictMode( 8940): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:06.645 D/StrictMode( 8940): at java.lang.reflect.Method.invoke
+06-02 16:38:06.645 W/lla.fenix.debu( 8940): Accessing hidden method Landroid/os/Trace;->asyncTraceBegin(JLjava/lang/String;I)V (light greylist, reflection)
+06-02 16:38:06.653 D/StrictMode( 8940): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/os/Trace;->asyncTraceBegin(JLjava/lang/String;I)V
+06-02 16:38:06.653 D/StrictMode( 8940): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:06.653 D/StrictMode( 8940): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:06.653 D/StrictMode( 8940): at java.lang.Class.getDeclaredMethodInternal(Native Method)
+06-02 16:38:06.653 D/StrictMode( 8940): at java.lang.Class.getPublicMethodRecursive(Class.java:2075)
+06-02 16:38:06.653 D/StrictMode( 8940): at java.lang.Class.getMethod(Class.java:2063)
+06-02 16:38:06.653 D/StrictMode( 8940): at java.lang.Class.getMethod(Class.java:1690)
+06-02 16:38:06.653 D/StrictMode( 8940): at androidx.core.os.TraceCompat.<clinit>(TraceCompat.java:52)
+06-02 16:38:06.653 D/StrictMode( 8940): at androidx.core.os.TraceCompat.beginSection(TraceCompat.java:100)
+06-02 16:38:06.653 D/StrictMode( 8940): at androidx.recyclerview.widget.RecyclerView.onLayout(RecyclerView.java:4403)
+06-02 16:38:06.653 D/StrictMode( 8940): at android.view.View.layout(View.java:20672)
+06-02 16:38:06.653 D/StrictMode( 8940): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:06.653 D/StrictMode( 8940): at com.google.android.material.appbar.HeaderScrollingViewBehavior.layoutChild(HeaderScrollingViewBehavior.java:148)
+06-02 16:38:06.653 D/StrictMode( 8940): at com.google.android.material.appbar.ViewOffsetBehavior.onLayoutChild(ViewOffsetBehavior.java:43)
+06-02 16:38:06.653 D/StrictMode( 8940): at com.google.android.material.appbar.AppBarLayout$ScrollingViewBehavior.onLayoutChild(AppBarLayout.java:1892)
+06-02 16:38:06.653 D/StrictMode( 8940): at androidx.coordinatorlayout.widget.CoordinatorLayout.onLayout(CoordinatorLayout.java:918)
+06-02 16:38:06.653 D/StrictMode( 8940): at android.view.View.layout(View.java:20672)
+06-02 16:38:06.653 D/StrictMode( 8940): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:06.653 D/StrictMode( 8940): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:06.653 D/StrictMode( 8940): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:06.653 D/StrictMode( 8940): at android.view.View.layout(View.java:20672)
+06-02 16:38:06.653 D/StrictMode( 8940): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:06.653 D/StrictMode( 8940): at android.widget.LinearLayout.setChildFrame(LinearLayout.java:1812)
+06-02 16:38:06.653 D/StrictMode( 8940): at android.widget.LinearLayout.layoutVertical(LinearLayout.java:1656)
+06-02 16:38:06.653 D/StrictMode( 8940): at android.widget.LinearLayout.onLayout(LinearLayout.java:1565)
+06-02 16:38:06.653 D/StrictMode( 8940): at android.view.View.layout(View.java:20672)
+06-02 16:38:06.653 D/StrictMode( 8940): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:06.653 D/StrictMode( 8940): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:06.653 D/StrictMode( 8940): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:06.653 D/StrictMode( 8940): at android.view.View.layout(View.java:20672)
+06-02 16:38:06.653 D/StrictMode( 8940): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:06.653 D/StrictMode( 8940): at android.widget.LinearLayout.setChildFrame(LinearLayout.java:1812)
+06-02 16:38:06.653 D/StrictMode( 8940): at android.widget.LinearLayout.layoutVertical(LinearLayout.java:1656)
+06-02 16:38:06.653 D/StrictMode( 8940): at android.widget.LinearLayout.onLayout(LinearLayout.java:1565)
+06-02 16:38:06.653 D/StrictMode( 8940): at android.view.View.layout(View.java:20672)
+06-02 16:38:06.653 D/StrictMode( 8940): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:06.653 D/StrictMode( 8940): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:06.653 D/StrictMode( 8940): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:06.653 D/StrictMode( 8940): at android.view.View.layout(View.java:20672)
+06-02 16:38:06.653 D/StrictMode( 8940): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:06.653 D/StrictMode( 8940): at android.widget.LinearLayout.setChildFrame(LinearLayout.java:1812)
+06-02 16:38:06.653 D/StrictMode( 8940): at android.widget.LinearLayout.layoutVertical(LinearLayout.java:1656)
+06-02 16:38:06.653 D/StrictMode( 8940): at android.widget.LinearLayout.onLayout(LinearLayout.java:1565)
+06-02 16:38:06.653 D/StrictMode( 8940): at android.view.View.layout(View.java:20672)
+06-02 16:38:06.653 D/StrictMode( 8940): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:06.653 D/StrictMode( 8940): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:06.653 D/StrictMode( 8940): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:06.653 D/StrictMode( 8940): at com.android.internal.policy.DecorView.onLayout(DecorView.java:753)
+06-02 16:38:06.653 D/StrictMode( 8940): at android.view.View.layout(View.java:20672)
+06-02 16:38:06.653 D/StrictMode( 8940): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:06.653 D/StrictMode( 8940): at android.view.ViewRootImpl.performLayout(ViewRootImpl.java:2792)
+06-02 16:38:06.653 D/StrictMode( 8940): at android.view.ViewRootImpl.performTraversals(ViewRootImpl.java:2319)
+06-02 16:38:06.653 D/StrictMode( 8940): at android.view.ViewRootImpl.doTraversal(ViewRootImpl.java:1460)
+06-02 16:38:06.653 D/StrictMode( 8940): at android.view.ViewRootImpl$TraversalRunnable.run(ViewRootImpl.java:7183)
+06-02 16:38:06.653 D/StrictMode( 8940): at android.view.Choreographer$CallbackRecord.run(Choreographer.java:949)
+06-02 16:38:06.653 D/StrictMode( 8940): at android.view.Choreographer.doCallbacks(Choreographer.java:761)
+06-02 16:38:06.653 D/StrictMode( 8940): at android.view.Choreographer.doFrame(Choreographer.java:696)
+06-02 16:38:06.653 D/StrictMode( 8940): at android.view.Choreographer$FrameDisplayEventReceiver.run(Choreographer.java:935)
+06-02 16:38:06.653 D/StrictMode( 8940): at android.os.Handler.handleCallback(Handler.java:873)
+06-02 16:38:06.653 D/StrictMode( 8940): at android.os.Handler.dispatchMessage(Handler.java:99)
+06-02 16:38:06.653 D/StrictMode( 8940): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:06.653 D/StrictMode( 8940): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:06.653 D/StrictMode( 8940): at java.lang
+06-02 16:38:06.653 W/lla.fenix.debu( 8940): Accessing hidden method Landroid/os/Trace;->asyncTraceEnd(JLjava/lang/String;I)V (light greylist, reflection)
+06-02 16:38:06.658 D/GeckoViewStartup( 8940): onEvent GeckoView:SetDefaultPrefs
+06-02 16:38:06.661 D/StrictMode( 8940): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/os/Trace;->asyncTraceEnd(JLjava/lang/String;I)V
+06-02 16:38:06.661 D/StrictMode( 8940): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:06.661 D/StrictMode( 8940): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:06.661 D/StrictMode( 8940): at java.lang.Class.getDeclaredMethodInternal(Native Method)
+06-02 16:38:06.661 D/StrictMode( 8940): at java.lang.Class.getPublicMethodRecursive(Class.java:2075)
+06-02 16:38:06.661 D/StrictMode( 8940): at java.lang.Class.getMethod(Class.java:2063)
+06-02 16:38:06.661 D/StrictMode( 8940): at java.lang.Class.getMethod(Class.java:1690)
+06-02 16:38:06.661 D/StrictMode( 8940): at androidx.core.os.TraceCompat.<clinit>(TraceCompat.java:54)
+06-02 16:38:06.661 D/StrictMode( 8940): at androidx.core.os.TraceCompat.beginSection(TraceCompat.java:100)
+06-02 16:38:06.661 D/StrictMode( 8940): at androidx.recyclerview.widget.RecyclerView.onLayout(RecyclerView.java:4403)
+06-02 16:38:06.661 D/StrictMode( 8940): at android.view.View.layout(View.java:20672)
+06-02 16:38:06.661 D/StrictMode( 8940): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:06.661 D/StrictMode( 8940): at com.google.android.material.appbar.HeaderScrollingViewBehavior.layoutChild(HeaderScrollingViewBehavior.java:148)
+06-02 16:38:06.661 D/StrictMode( 8940): at com.google.android.material.appbar.ViewOffsetBehavior.onLayoutChild(ViewOffsetBehavior.java:43)
+06-02 16:38:06.661 D/StrictMode( 8940): at com.google.android.material.appbar.AppBarLayout$ScrollingViewBehavior.onLayoutChild(AppBarLayout.java:1892)
+06-02 16:38:06.661 D/StrictMode( 8940): at androidx.coordinatorlayout.widget.CoordinatorLayout.onLayout(CoordinatorLayout.java:918)
+06-02 16:38:06.661 D/StrictMode( 8940): at android.view.View.layout(View.java:20672)
+06-02 16:38:06.661 D/StrictMode( 8940): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:06.661 D/StrictMode( 8940): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:06.661 D/StrictMode( 8940): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:06.661 D/StrictMode( 8940): at android.view.View.layout(View.java:20672)
+06-02 16:38:06.661 D/StrictMode( 8940): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:06.661 D/StrictMode( 8940): at android.widget.LinearLayout.setChildFrame(LinearLayout.java:1812)
+06-02 16:38:06.661 D/StrictMode( 8940): at android.widget.LinearLayout.layoutVertical(LinearLayout.java:1656)
+06-02 16:38:06.661 D/StrictMode( 8940): at android.widget.LinearLayout.onLayout(LinearLayout.java:1565)
+06-02 16:38:06.661 D/StrictMode( 8940): at android.view.View.layout(View.java:20672)
+06-02 16:38:06.661 D/StrictMode( 8940): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:06.661 D/StrictMode( 8940): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:06.661 D/StrictMode( 8940): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:06.661 D/StrictMode( 8940): at android.view.View.layout(View.java:20672)
+06-02 16:38:06.661 D/StrictMode( 8940): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:06.661 D/StrictMode( 8940): at android.widget.LinearLayout.setChildFrame(LinearLayout.java:1812)
+06-02 16:38:06.661 D/StrictMode( 8940): at android.widget.LinearLayout.layoutVertical(LinearLayout.java:1656)
+06-02 16:38:06.661 D/StrictMode( 8940): at android.widget.LinearLayout.onLayout(LinearLayout.java:1565)
+06-02 16:38:06.661 D/StrictMode( 8940): at android.view.View.layout(View.java:20672)
+06-02 16:38:06.661 D/StrictMode( 8940): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:06.661 D/StrictMode( 8940): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:06.661 D/StrictMode( 8940): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:06.661 D/StrictMode( 8940): at android.view.View.layout(View.java:20672)
+06-02 16:38:06.661 D/StrictMode( 8940): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:06.661 D/StrictMode( 8940): at android.widget.LinearLayout.setChildFrame(LinearLayout.java:1812)
+06-02 16:38:06.661 D/StrictMode( 8940): at android.widget.LinearLayout.layoutVertical(LinearLayout.java:1656)
+06-02 16:38:06.661 D/StrictMode( 8940): at android.widget.LinearLayout.onLayout(LinearLayout.java:1565)
+06-02 16:38:06.661 D/StrictMode( 8940): at android.view.View.layout(View.java:20672)
+06-02 16:38:06.661 D/StrictMode( 8940): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:06.661 D/StrictMode( 8940): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:06.661 D/StrictMode( 8940): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:06.661 D/StrictMode( 8940): at com.android.internal.policy.DecorView.onLayout(DecorView.java:753)
+06-02 16:38:06.661 D/StrictMode( 8940): at android.view.View.layout(View.java:20672)
+06-02 16:38:06.661 D/StrictMode( 8940): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:06.661 D/StrictMode( 8940): at android.view.ViewRootImpl.performLayout(ViewRootImpl.java:2792)
+06-02 16:38:06.661 D/StrictMode( 8940): at android.view.ViewRootImpl.performTraversals(ViewRootImpl.java:2319)
+06-02 16:38:06.661 D/StrictMode( 8940): at android.view.ViewRootImpl.doTraversal(ViewRootImpl.java:1460)
+06-02 16:38:06.661 D/StrictMode( 8940): at android.view.ViewRootImpl$TraversalRunnable.run(ViewRootImpl.java:7183)
+06-02 16:38:06.661 D/StrictMode( 8940): at android.view.Choreographer$CallbackRecord.run(Choreographer.java:949)
+06-02 16:38:06.661 D/StrictMode( 8940): at android.view.Choreographer.doCallbacks(Choreographer.java:761)
+06-02 16:38:06.661 D/StrictMode( 8940): at android.view.Choreographer.doFrame(Choreographer.java:696)
+06-02 16:38:06.661 D/StrictMode( 8940): at android.view.Choreographer$FrameDisplayEventReceiver.run(Choreographer.java:935)
+06-02 16:38:06.661 D/StrictMode( 8940): at android.os.Handler.handleCallback(Handler.java:873)
+06-02 16:38:06.661 D/StrictMode( 8940): at android.os.Handler.dispatchMessage(Handler.java:99)
+06-02 16:38:06.661 D/StrictMode( 8940): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:06.661 D/StrictMode( 8940): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:06.661 D/StrictMode( 8940): at java.lang.r
+06-02 16:38:06.662 W/lla.fenix.debu( 8940): Accessing hidden method Landroid/os/Trace;->traceCounter(JLjava/lang/String;I)V (light greylist, reflection)
+06-02 16:38:06.663 D/GeckoViewStartup( 8940): onEvent GeckoView:SetDefaultPrefs
+06-02 16:38:06.664 D/GeckoViewStartup( 8940): onEvent GeckoView:SetDefaultPrefs
+06-02 16:38:06.670 D/StrictMode( 8940): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/os/Trace;->traceCounter(JLjava/lang/String;I)V
+06-02 16:38:06.670 D/StrictMode( 8940): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:06.670 D/StrictMode( 8940): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:06.670 D/StrictMode( 8940): at java.lang.Class.getDeclaredMethodInternal(Native Method)
+06-02 16:38:06.670 D/StrictMode( 8940): at java.lang.Class.getPublicMethodRecursive(Class.java:2075)
+06-02 16:38:06.670 D/StrictMode( 8940): at java.lang.Class.getMethod(Class.java:2063)
+06-02 16:38:06.670 D/StrictMode( 8940): at java.lang.Class.getMethod(Class.java:1690)
+06-02 16:38:06.670 D/StrictMode( 8940): at androidx.core.os.TraceCompat.<clinit>(TraceCompat.java:56)
+06-02 16:38:06.670 D/StrictMode( 8940): at androidx.core.os.TraceCompat.beginSection(TraceCompat.java:100)
+06-02 16:38:06.670 D/StrictMode( 8940): at androidx.recyclerview.widget.RecyclerView.onLayout(RecyclerView.java:4403)
+06-02 16:38:06.670 D/StrictMode( 8940): at android.view.View.layout(View.java:20672)
+06-02 16:38:06.670 D/StrictMode( 8940): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:06.670 D/StrictMode( 8940): at com.google.android.material.appbar.HeaderScrollingViewBehavior.layoutChild(HeaderScrollingViewBehavior.java:148)
+06-02 16:38:06.670 D/StrictMode( 8940): at com.google.android.material.appbar.ViewOffsetBehavior.onLayoutChild(ViewOffsetBehavior.java:43)
+06-02 16:38:06.670 D/StrictMode( 8940): at com.google.android.material.appbar.AppBarLayout$ScrollingViewBehavior.onLayoutChild(AppBarLayout.java:1892)
+06-02 16:38:06.670 D/StrictMode( 8940): at androidx.coordinatorlayout.widget.CoordinatorLayout.onLayout(CoordinatorLayout.java:918)
+06-02 16:38:06.670 D/StrictMode( 8940): at android.view.View.layout(View.java:20672)
+06-02 16:38:06.670 D/StrictMode( 8940): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:06.670 D/StrictMode( 8940): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:06.670 D/StrictMode( 8940): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:06.670 D/StrictMode( 8940): at android.view.View.layout(View.java:20672)
+06-02 16:38:06.670 D/StrictMode( 8940): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:06.670 D/StrictMode( 8940): at android.widget.LinearLayout.setChildFrame(LinearLayout.java:1812)
+06-02 16:38:06.670 D/StrictMode( 8940): at android.widget.LinearLayout.layoutVertical(LinearLayout.java:1656)
+06-02 16:38:06.670 D/StrictMode( 8940): at android.widget.LinearLayout.onLayout(LinearLayout.java:1565)
+06-02 16:38:06.670 D/StrictMode( 8940): at android.view.View.layout(View.java:20672)
+06-02 16:38:06.670 D/StrictMode( 8940): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:06.670 D/StrictMode( 8940): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:06.670 D/StrictMode( 8940): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:06.670 D/StrictMode( 8940): at android.view.View.layout(View.java:20672)
+06-02 16:38:06.670 D/StrictMode( 8940): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:06.670 D/StrictMode( 8940): at android.widget.LinearLayout.setChildFrame(LinearLayout.java:1812)
+06-02 16:38:06.670 D/StrictMode( 8940): at android.widget.LinearLayout.layoutVertical(LinearLayout.java:1656)
+06-02 16:38:06.670 D/StrictMode( 8940): at android.widget.LinearLayout.onLayout(LinearLayout.java:1565)
+06-02 16:38:06.670 D/StrictMode( 8940): at android.view.View.layout(View.java:20672)
+06-02 16:38:06.670 D/StrictMode( 8940): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:06.670 D/StrictMode( 8940): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:06.670 D/StrictMode( 8940): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:06.670 D/StrictMode( 8940): at android.view.View.layout(View.java:20672)
+06-02 16:38:06.670 D/StrictMode( 8940): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:06.670 D/StrictMode( 8940): at android.widget.LinearLayout.setChildFrame(LinearLayout.java:1812)
+06-02 16:38:06.670 D/StrictMode( 8940): at android.widget.LinearLayout.layoutVertical(LinearLayout.java:1656)
+06-02 16:38:06.670 D/StrictMode( 8940): at android.widget.LinearLayout.onLayout(LinearLayout.java:1565)
+06-02 16:38:06.670 D/StrictMode( 8940): at android.view.View.layout(View.java:20672)
+06-02 16:38:06.670 D/StrictMode( 8940): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:06.670 D/StrictMode( 8940): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:06.670 D/StrictMode( 8940): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:06.670 D/StrictMode( 8940): at com.android.internal.policy.DecorView.onLayout(DecorView.java:753)
+06-02 16:38:06.670 D/StrictMode( 8940): at android.view.View.layout(View.java:20672)
+06-02 16:38:06.670 D/StrictMode( 8940): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:06.670 D/StrictMode( 8940): at android.view.ViewRootImpl.performLayout(ViewRootImpl.java:2792)
+06-02 16:38:06.670 D/StrictMode( 8940): at android.view.ViewRootImpl.performTraversals(ViewRootImpl.java:2319)
+06-02 16:38:06.670 D/StrictMode( 8940): at android.view.ViewRootImpl.doTraversal(ViewRootImpl.java:1460)
+06-02 16:38:06.670 D/StrictMode( 8940): at android.view.ViewRootImpl$TraversalRunnable.run(ViewRootImpl.java:7183)
+06-02 16:38:06.670 D/StrictMode( 8940): at android.view.Choreographer$CallbackRecord.run(Choreographer.java:949)
+06-02 16:38:06.670 D/StrictMode( 8940): at android.view.Choreographer.doCallbacks(Choreographer.java:761)
+06-02 16:38:06.670 D/StrictMode( 8940): at android.view.Choreographer.doFrame(Choreographer.java:696)
+06-02 16:38:06.670 D/StrictMode( 8940): at android.view.Choreographer$FrameDisplayEventReceiver.run(Choreographer.java:935)
+06-02 16:38:06.670 D/StrictMode( 8940): at android.os.Handler.handleCallback(Handler.java:873)
+06-02 16:38:06.670 D/StrictMode( 8940): at android.os.Handler.dispatchMessage(Handler.java:99)
+06-02 16:38:06.670 D/StrictMode( 8940): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:06.670 D/StrictMode( 8940): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:06.670 D/StrictMode( 8940): at java.lang.re
+06-02 16:38:06.673 D/GeckoViewStartup( 8940): onEvent GeckoView:SetDefaultPrefs
+06-02 16:38:06.693 D/GeckoViewStartup( 8940): onEvent GeckoView:SetDefaultPrefs
+06-02 16:38:06.702 D/GeckoNetworkManager( 8940): Incoming event receivedUpdate for state OnWithListeners -> OnWithListeners
+06-02 16:38:06.703 D/GeckoNetworkManager( 8940): New network state: UP, WIFI, WIFI
+06-02 16:38:06.715 D/GeckoViewConsole( 8940): onEvent GeckoView:RegisterWebExtension {"allowContentMessaging":true,"id":"webcompat@mozilla.com","locationUri":"resource://android/assets/extensions/webcompat/"}
+06-02 16:38:06.727 D/GeckoViewConsole( 8940): onEvent GeckoView:WebExtension:List null
+06-02 16:38:06.730 D/GeckoViewConsole( 8940): onEvent GeckoView:RegisterWebExtension {"allowContentMessaging":true,"id":"mozacBrowserIcons","locationUri":"resource://android/assets/extensions/browser-icons/"}
+06-02 16:38:06.735 D/GeckoViewConsole( 8940): onEvent GeckoView:RegisterWebExtension {"allowContentMessaging":true,"id":"mozacBrowserAds","locationUri":"resource://android/assets/extensions/ads/"}
+06-02 16:38:06.736 D/EGL_emulation( 8940): eglMakeCurrent: 0xe33868c0: ver 3 0 (tinfo 0xb3475200)
+06-02 16:38:06.737 D/GeckoViewConsole( 8940): onEvent GeckoView:RegisterWebExtension {"allowContentMessaging":true,"id":"BrowserCookiesExtension","locationUri":"resource://android/assets/extensions/cookies/"}
+06-02 16:38:06.782 D/glean/PingUploadWorker( 8940): Processing persisted pings at /data/user/0/org.mozilla.fenix.debug/glean_data/pending_pings
+06-02 16:38:06.782 D/glean/PingUploadWorker( 8940): Processing ping: 0f17279d-898f-4274-874d-358ab99b6d4e
+06-02 16:38:06.784 I/ActivityManager( 1869): Displayed org.mozilla.fenix.debug/.App: +3s625ms
+06-02 16:38:06.787 I/GoogleInputMethod( 1996): onFinishInput() : Dummy InputConnection bound
+06-02 16:38:06.789 I/GoogleInputMethod( 1996): onStartInput() : Dummy InputConnection bound
+06-02 16:38:06.800 E/adbd ( 4408): failed to connect to socket 'tcp:2829': Connection refused
+06-02 16:38:06.803 I/DefaultSupportedAddonsChecker( 8940): Register check for new supported add-ons
+06-02 16:38:06.808 D/glean/ConceptFetchHttpUploader( 8940): Submitting ping to: https://incoming.telemetry.mozilla.org/submit/org-mozilla-fenix-debug/baseline/1/0f17279d-898f-4274-874d-358ab99b6d4e
+06-02 16:38:06.838 D/StrictMode( 8940): StrictMode policy violation; ~duration=26 ms: android.os.strictmode.DiskReadViolation
+06-02 16:38:06.838 D/StrictMode( 8940): at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+06-02 16:38:06.838 D/StrictMode( 8940): at java.io.UnixFileSystem.checkAccess(UnixFileSystem.java:251)
+06-02 16:38:06.838 D/StrictMode( 8940): at java.io.File.exists(File.java:815)
+06-02 16:38:06.838 D/StrictMode( 8940): at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:605)
+06-02 16:38:06.838 D/StrictMode( 8940): at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:596)
+06-02 16:38:06.838 D/StrictMode( 8940): at android.app.ContextImpl.getFilesDir(ContextImpl.java:641)
+06-02 16:38:06.838 D/StrictMode( 8940): at android.content.ContextWrapper.getFilesDir(ContextWrapper.java:239)
+06-02 16:38:06.838 D/StrictMode( 8940): at mozilla.components.feature.tab.collections.TabCollectionStorage.<init>(TabCollectionStorage.kt:29)
+06-02 16:38:06.838 D/StrictMode( 8940): at org.mozilla.fenix.components.TabCollectionStorage$collectionStorage$2.invoke(TabCollectionStorage.kt:52)
+06-02 16:38:06.838 D/StrictMode( 8940): at org.mozilla.fenix.components.TabCollectionStorage$collectionStorage$2.invoke(TabCollectionStorage.kt:23)
+06-02 16:38:06.838 D/StrictMode( 8940): at kotlin.SynchronizedLazyImpl.getValue(LazyJVM.kt:74)
+06-02 16:38:06.838 D/StrictMode( 8940): at org.mozilla.fenix.components.TabCollectionStorage.getCollectionStorage(Unknown Source:7)
+06-02 16:38:06.838 D/StrictMode( 8940): at org.mozilla.fenix.components.TabCollectionStorage.getCollections(TabCollectionStorage.kt:70)
+06-02 16:38:06.838 D/StrictMode( 8940): at org.mozilla.fenix.components.TabCollectionStorage.getCollections$default(TabCollectionStorage.kt:69)
+06-02 16:38:06.838 D/StrictMode( 8940): at org.mozilla.fenix.home.HomeFragment.subscribeToTabCollections(HomeFragment.kt:750)
+06-02 16:38:06.838 D/StrictMode( 8940): at org.mozilla.fenix.home.HomeFragment.onStart(HomeFragment.kt:404)
+06-02 16:38:06.838 D/StrictMode( 8940): at androidx.fragment.app.Fragment.performStart(Fragment.java:2730)
+06-02 16:38:06.838 D/StrictMode( 8940): at androidx.fragment.app.FragmentStateManager.start(FragmentStateManager.java:365)
+06-02 16:38:06.838 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1194)
+06-02 16:38:06.838 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1356)
+06-02 16:38:06.838 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.moveFragmentToExpectedState(FragmentManager.java:1434)
+06-02 16:38:06.838 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1497)
+06-02 16:38:06.838 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.completeExecute(FragmentManager.java:2125)
+06-02 16:38:06.838 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager$StartEnterTransitionListener.completeTransaction(FragmentManager.java:3022)
+06-02 16:38:06.838 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.executePostponedTransaction(FragmentManager.java:1895)
+06-02 16:38:06.838 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.ensureExecReady(FragmentManager.java:1803)
+06-02 16:38:06.838 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager.execPendingActions(FragmentManager.java:1843)
+06-02 16:38:06.838 D/StrictMode( 8940): at androidx.fragment.app.FragmentManager$4.run(FragmentManager.java:413)
+06-02 16:38:06.838 D/StrictMode( 8940): at android.os.Handler.handleCallback(Handler.java:873)
+06-02 16:38:06.838 D/StrictMode( 8940): at android.os.Handler.dispatchMessage(Handler.java:99)
+06-02 16:38:06.838 D/StrictMode( 8940): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:06.838 D/StrictMode( 8940): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:06.838 D/StrictMode( 8940): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:06.838 D/StrictMode( 8940): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:06.838 D/StrictMode( 8940): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:06.902 E/adbd ( 4408): failed to connect to socket 'tcp:2829': Connection refused
+06-02 16:38:06.956 D/gralloc_ranchu( 1869): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:06.958 D/ ( 1869): HostConnection::get() New Host Connection established 0xc1e7f900, tid 1930
+06-02 16:38:06.961 D/gralloc_ranchu( 1869): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:06.961 D/ ( 1869): HostConnection::get() New Host Connection established 0xc1e7f900, tid 1930
+06-02 16:38:06.961 D/gralloc_ranchu( 1869): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:06.963 D/ ( 1869): HostConnection::get() New Host Connection established 0xc1e7f900, tid 1930
+06-02 16:38:06.967 D/gralloc_ranchu( 1869): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:06.967 W/SurfaceFlinger( 1728): Attempting to set client state on removed layer: Splash Screen org.mozilla.fenix.debug#0
+06-02 16:38:06.967 W/SurfaceFlinger( 1728): Attempting to destroy on removed layer: Splash Screen org.mozilla.fenix.debug#0
+06-02 16:38:06.971 D/gralloc_ranchu( 1897): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:06.972 D/ ( 1897): HostConnection::get() New Host Connection established 0xed7c3080, tid 1897
+06-02 16:38:06.973 D/gralloc_ranchu( 1897): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:06.973 D/ ( 1897): HostConnection::get() New Host Connection established 0xed7c3080, tid 1897
+06-02 16:38:06.974 D/gralloc_ranchu( 1897): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:06.974 D/ ( 1897): HostConnection::get() New Host Connection established 0xed7c3080, tid 1897
+06-02 16:38:06.974 D/gralloc_ranchu( 1897): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:06.981 D/ ( 1897): HostConnection::get() New Host Connection established 0xed7c3080, tid 1897
+06-02 16:38:07.005 E/adbd ( 4408): failed to connect to socket 'tcp:2829': Connection refused
+06-02 16:38:07.012 I/SupportedAddonsWorker( 8940): Trying to check for new supported add-ons
+06-02 16:38:07.013 E/BatteryExternalStatsWorker( 1869): no controller energy info supplied for wifi
+06-02 16:38:07.059 W/SurfaceFlinger( 1728): couldn't log to binary event log: overflow.
+06-02 16:38:07.106 E/adbd ( 4408): failed to connect to socket 'tcp:2829': Connection refused
+06-02 16:38:07.117 I/ActivityManager( 1869): Fully drawn org.mozilla.fenix.debug/.App: +3s965ms
+06-02 16:38:07.174 I/PBSessionCacheImpl( 2402): Deleted sessionId[359508686488] from persistence.
+06-02 16:38:07.183 W/SearchService( 2402): Abort, client detached.
+06-02 16:38:07.208 E/adbd ( 4408): failed to connect to socket 'tcp:2829': Connection refused
+06-02 16:38:07.302 D/App ( 8940): Installed browser-icons extension
+06-02 16:38:07.314 E/adbd ( 4408): failed to connect to socket 'tcp:2829': Connection refused
+06-02 16:38:07.343 D/ ( 8940): HostConnection::get() New Host Connection established 0xcb89acc0, tid 8968
+06-02 16:38:07.345 E/EGL_emulation( 8940): tid 8968: eglBindAPI(1259): error 0x300c (EGL_BAD_PARAMETER)
+06-02 16:38:07.348 D/EGL_emulation( 8940): eglCreateContext: 0xad7f82a0: maj 3 min 0 rcv 3
+06-02 16:38:07.349 D/EGL_emulation( 8940): eglMakeCurrent: 0xad7f82a0: ver 3 0 (tinfo 0xe3383c90)
+06-02 16:38:07.378 E/GeckoConsole( 8940): [JavaScript Error: "NetworkError when attempting to fetch resource."]
+06-02 16:38:07.378 E/GeckoConsole( 8940): get@resource://services-settings/RemoteSettingsClient.jsm:350:12
+06-02 16:38:07.422 E/adbd ( 4408): failed to connect to socket 'tcp:2829': Connection refused
+06-02 16:38:07.458 D/glean/ConceptFetchHttpUploader( 8940): Ping successfully sent (200)
+06-02 16:38:07.459 D/glean/PingUploadWorker( 8940): 0f17279d-898f-4274-874d-358ab99b6d4e was deleted: true
+06-02 16:38:07.462 I/WM-WorkerWrapper( 8940): Worker result SUCCESS for Work [ id=1bfad7a1-b690-499c-855a-ee2262e5654a, tags={ mozilla.telemetry.glean.scheduler.PingUploadWorker, mozac_service_glean_ping_upload_worker } ]
+06-02 16:38:07.531 E/adbd ( 4408): failed to connect to socket 'tcp:2829': Connection refused
+06-02 16:38:07.549 W/GeckoConsole( 8940): [JavaScript Warning: "Security wrapper denied access to property "ONE_QUARTER" on privileged Javascript object. Support for exposing privileged objects to untrusted content via __exposedProps__ has been removed - use WebIDL bindings or Components.utils.cloneInto instead. Note that only the first denied property access from a given global object will be reported." {file: "moz-extension://427eb36c-07f3-4ee8-9b9b-c2d41f22dafb/data/picture_in_picture_overrides.js" line: 26}]
+06-02 16:38:07.582 D/mozac-webcompat( 8940): Installed WebCompat webextension: webcompat@mozilla.com
+06-02 16:38:07.584 D/BrowserIcons( 8940): Loaded icon (source = DOWNLOAD): https://www.youtube.com/
+06-02 16:38:07.633 E/adbd ( 4408): failed to connect to socket 'tcp:2829': Connection refused
+06-02 16:38:07.640 D/BrowserIcons( 8940): Loaded icon (source = DOWNLOAD): https://www.wikipedia.org/
+06-02 16:38:07.651 E/GeckoConsole( 8940): [JavaScript Error: "can't access property "startupData", state is undefined" {file: "resource://gre/modules/addons/XPIProvider.jsm" line: 3079}]
+06-02 16:38:07.651 E/GeckoConsole( 8940): setStartupData@resource://gre/modules/addons/XPIProvider.jsm:3079:5
+06-02 16:38:07.651 E/GeckoConsole( 8940): saveStartupData@resource://gre/modules/Extension.jsm:2035:17
+06-02 16:38:07.651 E/GeckoConsole( 8940): _writePersistentListeners@resource://gre/modules/ExtensionCommon.jsm:2271:15
+06-02 16:38:07.651 E/GeckoConsole( 8940): savePersistentListener@resource://gre/modules/ExtensionCommon.jsm:2362:18
+06-02 16:38:07.651 E/GeckoConsole( 8940): addListener@resource://gre/modules/ExtensionCommon.jsm:2495:20
+06-02 16:38:07.651 E/GeckoConsole( 8940): addListener@resource://gre/modules/ExtensionCommon.jsm:2550:38
+06-02 16:38:07.651 E/GeckoConsole( 8940): recvAddListener@resource://gre/modules/ExtensionParent.jsm:1079:13
+06-02 16:38:07.693 I/chatty ( 8940): uid=10099(org.mozilla.fenix.debug) identical 24 lines
+06-02 16:38:07.721 E/GeckoConsole( 8940): [JavaScript Error: "can't access property "startupData", state is undefined" {file: "resource://gre/modules/addons/XPIProvider.jsm" line: 3079}]
+06-02 16:38:07.721 E/GeckoConsole( 8940): setStartupData@resource://gre/modules/addons/XPIProvider.jsm:3079:5
+06-02 16:38:07.721 E/GeckoConsole( 8940): saveStartupData@resource://gre/modules/Extension.jsm:2035:17
+06-02 16:38:07.721 E/GeckoConsole( 8940): _writePersistentListeners@resource://gre/modules/ExtensionCommon.jsm:2271:15
+06-02 16:38:07.721 E/GeckoConsole( 8940): savePersistentListener@resource://gre/modules/ExtensionCommon.jsm:2362:18
+06-02 16:38:07.721 E/GeckoConsole( 8940): addListener@resource://gre/modules/ExtensionCommon.jsm:2495:20
+06-02 16:38:07.721 E/GeckoConsole( 8940): addListener@resource://gre/modules/ExtensionCommon.jsm:2550:38
+06-02 16:38:07.721 E/GeckoConsole( 8940): recvAddListener@resource://gre/modules/ExtensionParent.jsm:1079:13
+06-02 16:38:07.738 E/adbd ( 4408): failed to connect to socket 'tcp:2829': Connection refused
+06-02 16:38:07.792 I/Gecko ( 8940): 1591130287792 Marionette INFO Listening on port 2829
+06-02 16:38:08.089 I/WM-WorkerWrapper( 8940): Worker result SUCCESS for Work [ id=d35286d8-b66e-412d-86a5-6db8a316b5b9, tags={ mozilla.components.feature.addons.migration.DefaultSupportedAddonsChecker.periodicWork, mozilla.components.feature.addons.migration.SupportedAddonsWorker } ]
+06-02 16:38:10.179 W/ctxmgr ( 2473): [AclManager]No 2 for (accnt=account#-517948760#, com.google.android.gms(10008):IndoorOutdoorProducer, vrsn=13280000, 0, 3pPkg = null , 3pMdlId = null , pid = 2473). Was: 3 for 57, account#-517948760#
+06-02 16:38:11.089 I/EventLogSendingHelper( 2402): Sending log events.
+06-02 16:38:11.106 I/WorkController( 2402): WorkProxy is not enqueued because WorkController is disposed: WorkProxy{Name=context::j, WorkerId=context, id=9ec66b4}
+06-02 16:38:11.106 I/WorkController( 2402): WorkProxy is not enqueued because WorkController is disposed: WorkProxy{Name=context::m, WorkerId=context, id=42ac2dd}
+06-02 16:38:11.106 I/WorkController( 2402): WorkProxy is not enqueued because WorkController is disposed: WorkProxy{Name=context::n, WorkerId=context, id=1827e52}
+06-02 16:38:11.106 I/WorkController( 2402): WorkProxy is not enqueued because WorkController is disposed: WorkProxy{Name=context::p, WorkerId=context, id=c78f523}
+06-02 16:38:11.123 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:11.677 I/FenixApplication( 8940): Kicking-off account manager...
+06-02 16:38:11.677 I/FenixApplication( 8940): Running post-visual completeness tasks...
+06-02 16:38:11.678 I/FenixApplication( 8940): Storage initialization...
+06-02 16:38:11.679 I/PlacesHistoryStorage( 8940): Warming up places storage...
+06-02 16:38:11.681 D/RustNativeSupport( 8940): findMegazordLibraryName(places, 0.59.0
+06-02 16:38:11.681 D/RustNativeSupport( 8940): lib in use: none
+06-02 16:38:11.681 D/RustNativeSupport( 8940): lib configured: megazord
+06-02 16:38:11.681 D/RustNativeSupport( 8940): lib version configured: 0.59.0
+06-02 16:38:11.681 D/RustNativeSupport( 8940): settled on megazord
+06-02 16:38:11.682 I/FirefoxAccountStateMachine( 8940): Enabling/updating sync with a new SyncConfig: SyncConfig(supportedEngines=[mozilla.components.service.fxa.SyncEngine$History@b024ebc, mozilla.components.service.fxa.SyncEngine$Bookmarks@560fe45, mozilla.components.service.fxa.SyncEngine$Passwords@623799a], syncPeriodInMinutes=240)
+06-02 16:38:11.683 D/places_ffi( 8940): places_api_new
+06-02 16:38:11.684 I/BgSyncManager( 8940): Periodic syncing enabled at a 240 interval
+06-02 16:38:11.684 I/FirefoxAccountStateMachine( 8940): Sync is enabled
+06-02 16:38:11.688 I/FenixApplication( 8940): 'Kicking-off account manager' took 10 ms
+06-02 16:38:11.688 I/FirefoxAccountStateMachine( 8940): Processing event Init for state Start. Next state is Start
+06-02 16:38:11.701 D/places::db::schema( 8940): Creating schema
+06-02 16:38:11.701 I/keystore( 1734): del USRPKEY_org.mozilla.fenix.debug 10099
+06-02 16:38:11.702 I/keystore( 1734): del USRCERT_org.mozilla.fenix.debug 10099
+06-02 16:38:11.702 I/keystore( 1734): del CACERT_org.mozilla.fenix.debug 10099
+06-02 16:38:11.726 I/FirefoxAccountStateMachine( 8940): Ran 'Init' side-effects for state Start, got successive event AccountNotFound
+06-02 16:38:11.727 I/FirefoxAccountStateMachine( 8940): Processing event AccountNotFound for state Start. Next state is NotAuthenticated
+06-02 16:38:11.728 D/sql_support::conn_ext( 8940): Transaction commited after 27.615273ms
+06-02 16:38:11.729 D/places_ffi( 8940): places_connection_new
+06-02 16:38:11.731 D/RustNativeSupport( 8940): findMegazordLibraryName(fxaclient, 0.59.0
+06-02 16:38:11.731 D/RustNativeSupport( 8940): lib in use: none
+06-02 16:38:11.731 D/RustNativeSupport( 8940): lib configured: megazord
+06-02 16:38:11.731 D/RustNativeSupport( 8940): lib version configured: 0.59.0
+06-02 16:38:11.731 D/RustNativeSupport( 8940): settled on megazord
+06-02 16:38:11.732 D/places_ffi( 8940): places_connection_new
+06-02 16:38:11.732 D/fxaclient_ffi( 8940): fxa_new
+06-02 16:38:11.734 W/FirefoxAccountStateMachine( 8940): Got invalid event Init for state NotAuthenticated.
+06-02 16:38:11.735 I/PlacesHistoryStorage( 8940): 'Warming up places storage' took 56 ms
+06-02 16:38:11.736 I/PlacesBookmarksStorage( 8940): Warming up places storage...
+06-02 16:38:11.736 D/places_ffi( 8940): places_connection_new
+06-02 16:38:11.739 I/PlacesBookmarksStorage( 8940): 'Warming up places storage' took 2 ms
+06-02 16:38:11.798 I/keystore( 1734): 1 0
+06-02 16:38:11.802 I/SyncableLoginsStorage( 8940): Warming up storage...
+06-02 16:38:11.812 D/RustNativeSupport( 8940): findMegazordLibraryName(logins, 0.59.0
+06-02 16:38:11.813 D/RustNativeSupport( 8940): lib in use: none
+06-02 16:38:11.813 D/RustNativeSupport( 8940): lib configured: megazord
+06-02 16:38:11.813 D/RustNativeSupport( 8940): lib version configured: 0.59.0
+06-02 16:38:11.813 D/RustNativeSupport( 8940): settled on megazord
+06-02 16:38:11.815 D/logins_ffi( 8940): sync15_passwords_state_new
+06-02 16:38:11.823 D/logins::schema( 8940): Creating schema
+06-02 16:38:11.945 I/SyncableLoginsStorage( 8940): 'Warming up storage' took 142 ms
+06-02 16:38:11.945 I/FenixApplication( 8940): 'Storage initialization' took 267 ms
+06-02 16:38:15.194 W/ctxmgr ( 2473): [AclManager]No 2 for (accnt=account#-517948760#, com.google.android.gms(10008):IndoorOutdoorProducer, vrsn=13280000, 0, 3pPkg = null , 3pMdlId = null , pid = 2473). Was: 3 for 57, account#-517948760#
+06-02 16:38:16.329 E/memtrack( 1869): Couldn't load memtrack module
+06-02 16:38:16.329 W/android.os.Debug( 1869): failed to get memory consumption info: -1
+06-02 16:38:16.789 E/memtrack( 1869): Couldn't load memtrack module
+06-02 16:38:16.789 W/android.os.Debug( 1869): failed to get memory consumption info: -1
+06-02 16:38:20.216 W/ctxmgr ( 2473): [AclManager]No 2 for (accnt=account#-517948760#, com.google.android.gms(10008):IndoorOutdoorProducer, vrsn=13280000, 0, 3pPkg = null , 3pMdlId = null , pid = 2473). Was: 3 for 57, account#-517948760#
+06-02 16:38:22.963 I/ActivityManager( 1869): Force stopping org.mozilla.fenix.debug appid=10099 user=0: clear data
+06-02 16:38:22.964 I/ActivityManager( 1869): Killing 8940:org.mozilla.fenix.debug/u0a99 (adj 0): stop org.mozilla.fenix.debug
+06-02 16:38:22.964 W/libprocessgroup( 1869): kill(-8940, 9) failed: No such process
+06-02 16:38:22.965 W/ActivityManager( 1869): Force removing ActivityRecord{caea9f6 u0 org.mozilla.fenix.debug/.App t388}: app died, no saved state
+06-02 16:38:22.965 I/ServiceChildProcess( 8988): Service has been unbound. Stopping.
+06-02 16:38:22.983 D/gralloc_ranchu( 1897): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:22.984 D/ ( 1897): HostConnection::get() New Host Connection established 0xed7c33c0, tid 5209
+06-02 16:38:22.984 D/gralloc_ranchu( 1897): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:22.984 D/ ( 1897): HostConnection::get() New Host Connection established 0xed7c33c0, tid 5209
+06-02 16:38:22.984 D/gralloc_ranchu( 1897): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:22.985 D/ ( 1897): HostConnection::get() New Host Connection established 0xed7c33c0, tid 5209
+06-02 16:38:22.985 D/gralloc_ranchu( 1897): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:22.985 D/ ( 1897): HostConnection::get() New Host Connection established 0xed7c33c0, tid 5209
+06-02 16:38:22.985 D/gralloc_ranchu( 1897): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:22.985 D/ ( 1897): HostConnection::get() New Host Connection established 0xed7c33c0, tid 5209
+06-02 16:38:22.985 D/gralloc_ranchu( 1897): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:22.989 D/ ( 1897): HostConnection::get() New Host Connection established 0xed7c33c0, tid 5209
+06-02 16:38:22.991 D/gralloc_ranchu( 1619): gralloc_alloc: Creating ashmem region of size 9334784
+06-02 16:38:22.992 I/ActivityManager( 1869): Killing 8988:org.mozilla.fenix.debug:tab0/u0a99 (adj 0): stop org.mozilla.fenix.debug
+06-02 16:38:22.994 I/ActivityManager( 1869): Force stopping org.mozilla.fenix.debug appid=10099 user=-1: clearApplicationUserData
+06-02 16:38:22.995 D/ZenLog ( 1869): config: removeAutomaticZenRules,ZenModeConfig[user=0,allowAlarms=true,allowMedia=true,allowSystem=false,allowReminders=false,allowEvents=false,allowCalls=true,allowRepeatCallers=true,allowMessages=false,allowCallsFrom=stars,allowMessagesFrom=contacts,suppressedVisualEffects=511,areChannelsBypassingDnd=false,automaticRules={EVENTS_DEFAULT_RULE=ZenRule[enabled=false,snoozing=false,name=Event,zenMode=ZEN_MODE_IMPORTANT_INTERRUPTIONS,conditionId=condition://android/event?userId=-10000&calendar=&reply=1,condition=Condition[id=condition://android/event?userId=-10000&calendar=&reply=1,summary=...,line1=...,line2=...,icon=0,state=STATE_FALSE,flags=2],component=ComponentInfo{android/com.android.server.notification.EventConditionProvider},id=EVENTS_DEFAULT_RULE,creationTime=1587308662810,enabler=null], EVERY_NIGHT_DEFAULT_RULE=ZenRule[enabled=false,snoozing=false,name=Sleeping,zenMode=ZEN_MODE_IMPORTANT_INTERRUPTIONS,conditionId=condition://android/schedule?days=1.2.3.4.5.6.7&start=22.0&end=7.0&exitAtAlarm=true,condition=Condition[id=condition://android/schedule?days=1.2.3.4.5.6.7&start=22.0&end=7.0&exitAtAlarm=true,summary=...,line1=...,line2=...,icon=0,state=STATE_FALSE,flags=2],component=ComponentInfo{android/com.android.server.notification.ScheduleConditionProvider},id=EVERY_NIGHT_DEFAULT_RULE,creationTime=1587308662810,enabler=null]},manualRule=null],Diff[]
+06-02 16:38:22.995 I/ConditionProviders( 1869): Disallowing condition provider org.mozilla.fenix.debug
+06-02 16:38:22.996 E/memtrack( 1869): Couldn't load memtrack module
+06-02 16:38:22.996 W/android.os.Debug( 1869): failed to get memory consumption info: -1
+06-02 16:38:22.996 D/ZenLog ( 1869): set_zen_mode: off,removeAutomaticZenRules
+06-02 16:38:23.002 D/gralloc_ranchu( 1619): gralloc_alloc: Creating ashmem region of size 9334784
+06-02 16:38:23.009 W/libprocessgroup( 1869): kill(-8940, 9) failed: No such process
+06-02 16:38:23.009 I/keystore( 1734): clear_uid 10099
+06-02 16:38:23.010 D/gralloc_ranchu( 1619): gralloc_alloc: Creating ashmem region of size 9334784
+06-02 16:38:23.026 D/SurfaceFlinger( 1728): duplicate layer name: changing com.google.android.apps.nexuslauncher/com.google.android.apps.nexuslauncher.NexusLauncherActivity to com.google.android.apps.nexuslauncher/com.google.android.apps.nexuslauncher.NexusLauncherActivity#1
+06-02 16:38:23.028 D/gralloc_ranchu( 1619): gralloc_alloc: Creating ashmem region of size 9334784
+06-02 16:38:23.030 W/InputDispatcher( 1869): channel 'd4edb65 org.mozilla.fenix.debug/org.mozilla.fenix.debug.App (server)' ~ Consumer closed input channel or an error occurred. events=0x9
+06-02 16:38:23.030 E/InputDispatcher( 1869): channel 'd4edb65 org.mozilla.fenix.debug/org.mozilla.fenix.debug.App (server)' ~ Channel is unrecoverably broken and will be disposed!
+06-02 16:38:23.031 D/gralloc_ranchu( 1728): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:23.031 D/ ( 1728): HostConnection::get() New Host Connection established 0xe90dda40, tid 1952
+06-02 16:38:23.032 D/gralloc_ranchu( 1728): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:23.032 D/ ( 1728): HostConnection::get() New Host Connection established 0xe7e99140, tid 1952
+06-02 16:38:23.032 D/gralloc_ranchu( 1728): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:23.033 D/ ( 1728): HostConnection::get() New Host Connection established 0xe7e99140, tid 1952
+06-02 16:38:23.033 D/gralloc_ranchu( 1728): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:23.040 I/WindowManager( 1869): WIN DEATH: Window{d4edb65 u0 org.mozilla.fenix.debug/org.mozilla.fenix.debug.App}
+06-02 16:38:23.040 W/InputDispatcher( 1869): Attempted to unregister already unregistered input channel 'd4edb65 org.mozilla.fenix.debug/org.mozilla.fenix.debug.App (server)'
+06-02 16:38:23.042 D/EGL_emulation( 2402): eglMakeCurrent: 0xe1911c80: ver 3 0 (tinfo 0xc8cbe260)
+06-02 16:38:23.044 D/gralloc_ranchu( 1619): gralloc_alloc: Creating ashmem region of size 9334784
+06-02 16:38:23.044 W/ActivityManager( 1869): setHasOverlayUi called on unknown pid: 8940
+06-02 16:38:23.049 I/Zygote ( 1729): Process 8940 exited due to signal (9)
+06-02 16:38:23.050 W/libprocessgroup( 1869): kill(-8940, 9) failed: No such process
+06-02 16:38:23.050 I/libprocessgroup( 1869): Successfully killed process cgroup uid 10099 pid 8940 in 85ms
+06-02 16:38:23.050 W/SurfaceFlinger( 1728): Attempting to destroy on removed layer: AppWindowToken{369c864 token=Token{86b81f7 ActivityRecord{caea9f6 u0 org.mozilla.fenix.debug/.App t388}}}#0
+06-02 16:38:23.050 W/SurfaceFlinger( 1728): Attempting to destroy on removed layer: Task=388#0
+06-02 16:38:23.054 W/libprocessgroup( 1869): kill(-8988, 9) failed: No such process
+06-02 16:38:23.054 I/ActivityManager( 1869): Force stopping org.mozilla.fenix.debug appid=10099 user=0: from pid 9106
+06-02 16:38:23.057 D/gralloc_ranchu( 1619): gralloc_alloc: Creating ashmem region of size 9334784
+06-02 16:38:23.059 I/Zygote ( 1729): Process 8988 exited due to signal (9)
+06-02 16:38:23.072 I/GoogleInputMethod( 1996): onFinishInput() : Dummy InputConnection bound
+06-02 16:38:23.072 I/GoogleInputMethod( 1996): onStartInput() : Dummy InputConnection bound
+06-02 16:38:23.081 D/EGL_emulation( 2488): eglMakeCurrent: 0xe3385ae0: ver 3 0 (tinfo 0xe33838f0)
+06-02 16:38:23.082 D/CarrierSvcBindHelper( 2121): No carrier app for: 0
+06-02 16:38:23.089 W/SessionLifecycleManager( 2402): Handover failed. Creating new session controller.
+06-02 16:38:23.099 W/libprocessgroup( 1869): kill(-8988, 9) failed: No such process
+06-02 16:38:23.099 I/libprocessgroup( 1869): Successfully killed process cgroup uid 10099 pid 8988 in 45ms
+06-02 16:38:23.105 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:23.112 V/SettingsProvider( 1869): Notifying for 0: content://settings/global/debug_app
+06-02 16:38:23.123 I/GeofencerStateMachine( 2473): removeGeofences: removeRequest=RemoveGeofencingRequest[REMOVE_ALL packageName=org.mozilla.fenix.debug]
+06-02 16:38:23.124 D/CarrierSvcBindHelper( 2121): No carrier app for: 0
+06-02 16:38:23.125 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:23.128 E/system_server( 1869): No package ID 7f found for ID 0x7f0801a6.
+06-02 16:38:23.128 E/system_server( 1869): No package ID 7f found for ID 0x7f13011d.
+06-02 16:38:23.128 E/system_server( 1869): No package ID 7f found for ID 0x7f13011d.
+06-02 16:38:23.128 E/system_server( 1869): No package ID 7f found for ID 0x7f0801a4.
+06-02 16:38:23.128 E/system_server( 1869): No package ID 7f found for ID 0x7f13011c.
+06-02 16:38:23.128 E/system_server( 1869): No package ID 7f found for ID 0x7f13011c.
+06-02 16:38:23.129 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:23.147 E/BatteryExternalStatsWorker( 1869): no controller energy info supplied for wifi
+06-02 16:38:23.145 I/chatty ( 2002): uid=10024(com.android.systemui) RenderThread identical 1 line
+06-02 16:38:23.149 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:23.151 I/.gms.persisten( 2473): Background concurrent copying GC freed 150132(6MB) AllocSpace objects, 3(60KB) LOS objects, 45% free, 7MB/13MB, paused 85us total 138.429ms
+06-02 16:38:23.159 D/vold ( 1558): Remounting 10099 as mode read
+06-02 16:38:23.161 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:23.166 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:23.168 I/LocationSettingsChecker( 2660): Removing dialog suppression flag for package org.mozilla.fenix.debug
+06-02 16:38:23.180 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:23.183 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:23.184 W/LocationOracle( 2402): No location history returned by ContextManager
+06-02 16:38:23.193 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:23.210 D/vold ( 1558): Remounting 10099 as mode write
+06-02 16:38:23.198 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:23.219 I/Icing ( 2660): doRemovePackageData org.mozilla.fenix.debug
+06-02 16:38:23.219 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:23.223 W/ctxmgr ( 2473): [AclManager]No 3 for (accnt=account#-517948760#, com.google.android.gms(10008):UserVelocityProducer, vrsn=13280022, 0, 3pPkg = null , 3pMdlId = null , pid = 2473). Was: 3 for 1, account#-517948760#
+06-02 16:38:23.223 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:23.227 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:23.235 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:23.238 I/system_server( 1869): Background concurrent copying GC freed 69934(4MB) AllocSpace objects, 41(1540KB) LOS objects, 36% free, 10MB/16MB, paused 82us total 208.824ms
+06-02 16:38:23.240 I/ProvidersCache( 4535): Provider returned no roots. Possibly naughty: com.google.android.apps.docs.storage
+06-02 16:38:23.248 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:23.251 I/MicroDetectionWorker( 2402): #startMicroDetector [speakerMode: 0]
+06-02 16:38:23.251 I/AudioController( 2402): Using mInputStreamFactoryBuilder
+06-02 16:38:23.252 I/AudioController( 2402): Created new AudioSource
+06-02 16:38:23.253 I/MicroDetectionWorker( 2402): onReady
+06-02 16:38:23.256 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:23.305 I/MicroRecognitionRunner( 2402): Starting detection.
+06-02 16:38:23.306 I/MicrophoneInputStream( 2402): mic_starting SR : 16000 CC : 16 SO : 6
+06-02 16:38:23.307 E/ ( 1627): Request requires android.permission.RECORD_AUDIO
+06-02 16:38:23.307 E/AudioPolicyIntefaceImpl( 1627): getInputForAttr permission denied: recording not allowed for uid 10039 pid 2402
+06-02 16:38:23.307 E/AudioFlinger( 1627): createRecord() checkRecordThread_l failed
+06-02 16:38:23.308 E/IAudioFlinger( 2402): createRecord returned error -22
+06-02 16:38:23.308 E/AudioRecord( 2402): AudioFlinger could not create record track, status: -22
+06-02 16:38:23.308 E/AudioRecord-JNI( 2402): Error creating AudioRecord instance: initialization check failed with status -22.
+06-02 16:38:23.309 E/android.media.AudioRecord( 2402): Error code -20 when initializing native AudioRecord object.
+06-02 16:38:23.309 I/MicrophoneInputStream( 2402): mic_started SR : 16000 CC : 16 SO : 6
+06-02 16:38:23.309 E/ActivityThread( 2402): Failed to find provider info for com.google.android.apps.gsa.testing.ui.audio.recorded
+06-02 16:38:23.309 I/MicroDetectionWorker( 2402): onReady
+06-02 16:38:23.311 I/MicrophoneInputStream( 2402): mic_close SR : 16000 CC : 16 SO : 6
+06-02 16:38:23.311 I/MicroRecognitionRunner( 2402): Detection finished
+06-02 16:38:23.311 W/ErrorReporter( 2402): reportError [type: 211, code: 524300]: Error reading from input stream
+06-02 16:38:23.311 I/MicroRecognitionRunner( 2402): Stopping hotword detection.
+06-02 16:38:23.312 W/ErrorProcessor( 2402): onFatalError, processing error from engine(4)
+06-02 16:38:23.312 W/ErrorProcessor( 2402): com.google.android.apps.gsa.shared.speech.b.g: Error reading from input stream
+06-02 16:38:23.312 W/ErrorProcessor( 2402): at com.google.android.apps.gsa.staticplugins.microdetection.d.k.a(SourceFile:91)
+06-02 16:38:23.312 W/ErrorProcessor( 2402): at com.google.android.apps.gsa.staticplugins.microdetection.d.l.run(Unknown Source:14)
+06-02 16:38:23.312 W/ErrorProcessor( 2402): at com.google.android.libraries.gsa.runner.a.a.b(SourceFile:32)
+06-02 16:38:23.312 W/ErrorProcessor( 2402): at com.google.android.libraries.gsa.runner.a.c.call(Unknown Source:4)
+06-02 16:38:23.312 W/ErrorProcessor( 2402): at java.util.concurrent.FutureTask.run(FutureTask.java:266)
+06-02 16:38:23.312 W/ErrorProcessor( 2402): at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:458)
+06-02 16:38:23.312 W/ErrorProcessor( 2402): at java.util.concurrent.FutureTask.run(FutureTask.java:266)
+06-02 16:38:23.312 W/ErrorProcessor( 2402): at com.google.android.apps.gsa.shared.util.concurrent.b.g.run(Unknown Source:4)
+06-02 16:38:23.312 W/ErrorProcessor( 2402): at com.google.android.apps.gsa.shared.util.concurrent.b.aw.run(SourceFile:4)
+06-02 16:38:23.312 W/ErrorProcessor( 2402): at com.google.android.apps.gsa.shared.util.concurrent.b.aw.run(SourceFile:4)
+06-02 16:38:23.312 W/ErrorProcessor( 2402): at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1167)
+06-02 16:38:23.312 W/ErrorProcessor( 2402): at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:641)
+06-02 16:38:23.312 W/ErrorProcessor( 2402): at java.lang.Thread.run(Thread.java:764)
+06-02 16:38:23.312 W/ErrorProcessor( 2402): at com.google.android.apps.gsa.shared.util.concurrent.b.i.run(SourceFile:6)
+06-02 16:38:23.312 W/ErrorProcessor( 2402): Caused by: com.google.android.apps.gsa.shared.exception.GsaIOException: Error code: 393238 | Buffer overflow, no available space.
+06-02 16:38:23.312 W/ErrorProcessor( 2402): at com.google.android.apps.gsa.speech.audio.Tee.j(SourceFile:103)
+06-02 16:38:23.312 W/ErrorProcessor( 2402): at com.google.android.apps.gsa.speech.audio.au.read(SourceFile:2)
+06-02 16:38:23.312 W/ErrorProcessor( 2402): at java.io.InputStream.read(InputStream.java:101)
+06-02 16:38:23.312 W/ErrorProcessor( 2402): at com.google.android.apps.gsa.speech.audio.ao.run(SourceFile:17)
+06-02 16:38:23.312 W/ErrorProcessor( 2402): at com.google.android.apps.gsa.speech.audio.an.run(SourceFile:2)
+06-02 16:38:23.312 W/ErrorProcessor( 2402): at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:458)
+06-02 16:38:23.312 W/ErrorProcessor( 2402): ... 10 more
+06-02 16:38:23.312 I/AudioController( 2402): internalShutdown
+06-02 16:38:23.313 I/MicroDetector( 2402): Keeping mic open: false
+06-02 16:38:23.313 I/DeviceStateChecker( 2402): DeviceStateChecker cancelled
+06-02 16:38:23.313 I/MicroDetectionWorker( 2402): #onError(false)
+06-02 16:38:23.324 I/ActivityManager( 1869): Force stopping org.mozilla.fenix.debug appid=10099 user=-1: set debug app
+06-02 16:38:23.324 V/SettingsProvider( 1869): Notifying for 0: content://settings/global/debug_app
+06-02 16:38:23.328 I/Places ( 2473): ?: PlacesBleScanner start() with priority 2
+06-02 16:38:23.337 I/Places ( 2473): ?: PlacesBleScanner start() with priority 2
+06-02 16:38:23.337 I/PlaceInferenceEngine( 2473): [anon] Changed inference mode: 1
+06-02 16:38:23.345 I/Places ( 2473): Converted 0 out of 1 WiFi scans
+06-02 16:38:23.357 I/ActivityManager( 1869): START u0 {flg=0x10000000 cmp=org.mozilla.fenix.debug/.App (has extras)} from uid 0
+06-02 16:38:23.364 I/Places ( 2473): ?: PlacesBleScanner start() with priority 2
+06-02 16:38:23.372 I/Places ( 2473): ?: PlacesBleScanner start() with priority 2
+06-02 16:38:23.374 I/PlaceInferenceEngine( 2473): [anon] Changed inference mode: 1
+06-02 16:38:23.380 I/PlaceInferenceEngine( 2473): No beacon scan available - ignoring candidates.
+06-02 16:38:23.381 I/lla.fenix.debu( 9156): Not late-enabling -Xcheck:jni (already on)
+06-02 16:38:23.386 I/ActivityManager( 1869): Start proc 9156:org.mozilla.fenix.debug/u0a99 for activity org.mozilla.fenix.debug/.App
+06-02 16:38:23.390 I/Places ( 2473): Converted 0 out of 1 WiFi scans
+06-02 16:38:23.399 W/lla.fenix.debu( 9156): Unexpected CPU variant for X86 using defaults: x86
+06-02 16:38:23.402 D/gralloc_ranchu( 1619): gralloc_alloc: Creating ashmem region of size 9334784
+06-02 16:38:23.403 I/PlaceInferenceEngine( 2473): No beacon scan available - ignoring candidates.
+06-02 16:38:23.410 D/ ( 1869): HostConnection::get() New Host Connection established 0xcb8a3340, tid 1930
+06-02 16:38:23.419 W/ctxmgr ( 2473): [AclManager]No 2 for (accnt=account#-517948760#, com.google.android.gms(10008):PlacesProducer, vrsn=13280000, 0, 3pPkg = null , 3pMdlId = null , pid = 2473). Was: 3 for 18, account#-517948760#
+06-02 16:38:23.454 W/ActivityThread( 9156): Application org.mozilla.fenix.debug can be debugged on port 8100...
+06-02 16:38:23.457 I/lla.fenix.debu( 9156): The ClassLoaderContext is a special shared library.
+06-02 16:38:23.470 D/gralloc_ranchu( 1897): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:23.470 D/ ( 1897): HostConnection::get() New Host Connection established 0xed7c33c0, tid 5209
+06-02 16:38:23.471 D/gralloc_ranchu( 1897): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:23.471 D/EGL_emulation( 2402): eglMakeCurrent: 0xe1911c80: ver 3 0 (tinfo 0xc8cbe260)
+06-02 16:38:23.472 D/ ( 1897): HostConnection::get() New Host Connection established 0xed7c33c0, tid 5209
+06-02 16:38:23.472 D/gralloc_ranchu( 1897): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:23.472 D/ ( 1897): HostConnection::get() New Host Connection established 0xed7c33c0, tid 5209
+06-02 16:38:23.472 D/gralloc_ranchu( 1897): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:23.474 D/EGL_emulation( 2488): eglMakeCurrent: 0xe3385ae0: ver 3 0 (tinfo 0xe33838f0)
+06-02 16:38:23.476 D/gralloc_ranchu( 1728): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:23.477 D/ ( 1728): HostConnection::get() New Host Connection established 0xe90ddac0, tid 1952
+06-02 16:38:23.477 D/gralloc_ranchu( 1728): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:23.479 D/ ( 1897): HostConnection::get() New Host Connection established 0xed7c33c0, tid 5209
+06-02 16:38:23.479 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:23.480 D/ ( 1728): HostConnection::get() New Host Connection established 0xe90ddd80, tid 1756
+06-02 16:38:23.480 D/ ( 1728): HostConnection::get() New Host Connection established 0xe90ddac0, tid 1952
+06-02 16:38:23.480 D/gralloc_ranchu( 1728): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:23.480 D/gralloc_ranchu( 1728): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:23.481 D/ ( 1728): HostConnection::get() New Host Connection established 0xe90ddd80, tid 1756
+06-02 16:38:23.481 D/ ( 1728): HostConnection::get() New Host Connection established 0xe90ddac0, tid 1952
+06-02 16:38:23.481 D/gralloc_ranchu( 1728): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:23.482 D/ ( 1728): HostConnection::get() New Host Connection established 0xe90ddd80, tid 1756
+06-02 16:38:23.482 D/gralloc_ranchu( 1728): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:23.482 D/gralloc_ranchu( 1728): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:23.482 D/ ( 1728): HostConnection::get() New Host Connection established 0xe90ddac0, tid 1952
+06-02 16:38:23.483 D/gralloc_ranchu( 1728): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:23.484 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:23.594 I/chatty ( 2002): uid=10024(com.android.systemui) RenderThread identical 13 lines
+06-02 16:38:23.600 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:24.020 D/FirebaseApp( 9156): Default FirebaseApp failed to initialize because no default options were found. This usually means that com.google.gms:google-services was not applied to your gradle project.
+06-02 16:38:24.020 I/FirebaseInitProvider( 9156): FirebaseApp initialization unsuccessful
+06-02 16:38:24.088 D/FenixApplication( 9156): Initializing Glean (uploadEnabled=true, isFennec=false)
+06-02 16:38:24.100 D/RustNativeSupport( 9156): findMegazordLibraryName(viaduct, 0.59.0
+06-02 16:38:24.100 D/RustNativeSupport( 9156): lib in use: none
+06-02 16:38:24.100 D/RustNativeSupport( 9156): lib configured: megazord
+06-02 16:38:24.100 D/RustNativeSupport( 9156): lib version configured: 0.59.0
+06-02 16:38:24.100 D/RustNativeSupport( 9156): settled on megazord
+06-02 16:38:24.147 D/libglean_ffi( 9156): glean_ffi: Android logging should be hooked up!
+06-02 16:38:24.150 I/glean/Glean( 9156): Registering pings for mozilla.telemetry.glean.GleanMetrics.Pings
+06-02 16:38:24.152 I/libglean_ffi( 9156): glean_core: Creating new Glean
+06-02 16:38:24.152 D/libglean_ffi( 9156): glean_core::database: Database path: "/data/user/0/org.mozilla.fenix.debug/glean_data/db"
+06-02 16:38:24.152 I/libglean_ffi( 9156): glean_core::database: Database initialized
+06-02 16:38:24.153 D/RustNativeSupport( 9156): findMegazordLibraryName(rustlog, 0.59.0
+06-02 16:38:24.153 D/RustNativeSupport( 9156): lib in use: none
+06-02 16:38:24.153 D/RustNativeSupport( 9156): lib configured: megazord
+06-02 16:38:24.153 D/RustNativeSupport( 9156): lib version configured: 0.59.0
+06-02 16:38:24.153 D/RustNativeSupport( 9156): settled on megazord
+06-02 16:38:24.155 I/rc_log_ffi::ios( 9156): rc_log adapter initialized!
+06-02 16:38:24.170 I/libglean_ffi( 9156): glean_ffi: Glean initialized
+06-02 16:38:24.170 I/GeckoRuntime( 9156): Adding debug configuration from: /data/local/tmp/org.mozilla.fenix.debug-geckoview-config.yaml
+06-02 16:38:24.170 D/GeckoDebugConfig( 9156): Adding environment variables from debug config: {MOZ_CRASHREPORTER=1, MOZ_CRASHREPORTER_NO_REPORT=1, MOZ_CRASHREPORTER_SHUTDOWN=1}
+06-02 16:38:24.170 D/GeckoDebugConfig( 9156): Adding arguments from debug config: [-marionette, -profile, /mnt/sdcard/org.mozilla.fenix.debug-geckodriver-profile]
+06-02 16:38:24.171 D/GeckoThread( 9156): State changed to LAUNCHED
+06-02 16:38:24.172 I/GeckoThread( 9156): preparing to run Gecko
+06-02 16:38:24.173 D/GeckoThread( 9156): env var: MOZ_CRASHREPORTER=1
+06-02 16:38:24.173 D/GeckoThread( 9156): env var: MOZ_CRASHREPORTER_NO_REPORT=1
+06-02 16:38:24.173 D/GeckoThread( 9156): env var: MOZ_CRASHREPORTER_SHUTDOWN=1
+06-02 16:38:24.178 D/GeckoRuntime( 9156): Lifecycle: onCreate
+06-02 16:38:24.181 D/GeckoThread( 9156): State changed to MOZGLUE_READY
+06-02 16:38:24.200 W/Settings( 9156): Setting animator_duration_scale has moved from android.provider.Settings.System to android.provider.Settings.Global, returning read-only global URI.
+06-02 16:38:24.204 E/GeckoLibLoad( 9156): Load sqlite start
+06-02 16:38:24.216 E/GeckoLibLoad( 9156): Load sqlite done
+06-02 16:38:24.216 E/GeckoLibLoad( 9156): Load nss start
+06-02 16:38:24.216 E/GeckoLibLoad( 9156): Load nss done
+06-02 16:38:24.227 I/glean/MetricsPingSched( 9156): The application just updated. Send metrics ping now.
+06-02 16:38:24.246 I/glean/MetricsPingSched( 9156): Collecting the 'metrics' ping, now = Tue Jun 02 16:38:24 EDT 2020, startup = true, reason = upgrade
+06-02 16:38:24.262 D/LeakCanary( 9156): Updated AppWatcher.config: Config(no changes)
+06-02 16:38:24.274 E/GeckoLibLoad( 9156): Loaded libs in 58.434504ms total, 0ms(60ms) user, 40ms(70ms) system, 0(0) faults
+06-02 16:38:24.275 D/GeckoThread( 9156): State changed to LIBS_READY
+06-02 16:38:24.275 I/libglean_ffi( 9156): glean_core::ping: Collecting metrics
+06-02 16:38:24.275 I/libglean_ffi( 9156): glean_core::ping: Storage for metrics empty. Bailing out.
+06-02 16:38:24.275 I/libglean_ffi( 9156): glean_core: No content for ping 'metrics', therefore no ping queued.
+06-02 16:38:24.276 D/glean/MetricsPingSched( 9156): Scheduling the 'metrics' ping in 40895781ms
+06-02 16:38:24.278 W/GeckoThread( 9156): zerdatime 4668409 - runGecko
+06-02 16:38:24.280 D/GeckoProfile( 9156): Loading profile at: null name: default
+06-02 16:38:24.281 D/GeckoProfile( 9156): Created new profile dir.
+06-02 16:38:24.282 I/GeckoProfile( 9156): Enqueuing profile init.
+06-02 16:38:24.284 D/GeckoProfile( 9156): Found profile dir: /data/user/0/org.mozilla.fenix.debug/files/mozilla/9w50stya.default
+06-02 16:38:24.284 D/GeckoProfile( 9156): Attempting to write new client ID properties
+06-02 16:38:24.285 D/GeckoProfile( 9156): Creating profile dir: /data/user/0/org.mozilla.fenix.debug/files/mozilla/9w50stya.default
+06-02 16:38:24.286 D/LeakCanary( 9156): Updated LeakCanary.config: Config(no changes)
+06-02 16:38:24.289 D/App ( 9156): DebugMetricController: start
+06-02 16:38:24.289 D/App ( 9156): DebugMetricController: start
+06-02 16:38:24.291 W/PushConfig( 9156): No firebase configuration found; cannot support push service.
+06-02 16:38:24.299 I/Gecko:DumpUtils( 9156): Fifo watcher disabled via pref.
+06-02 16:38:24.302 D/StrictMode( 9156): StrictMode policy violation; ~duration=144 ms: android.os.strictmode.DiskReadViolation
+06-02 16:38:24.302 D/StrictMode( 9156): at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+06-02 16:38:24.302 D/StrictMode( 9156): at java.io.FileInputStream.<init>(FileInputStream.java:163)
+06-02 16:38:24.302 D/StrictMode( 9156): at org.mozilla.gecko.util.DebugConfig.fromFile(DebugConfig.java:49)
+06-02 16:38:24.302 D/StrictMode( 9156): at org.mozilla.geckoview.GeckoRuntime.init(GeckoRuntime.java:363)
+06-02 16:38:24.302 D/StrictMode( 9156): at org.mozilla.geckoview.GeckoRuntime.create(GeckoRuntime.java:574)
+06-02 16:38:24.302 D/StrictMode( 9156): at GeckoProvider.createRuntime(GeckoProvider.kt:58)
+06-02 16:38:24.302 D/StrictMode( 9156): at GeckoProvider.getOrCreateRuntime(GeckoProvider.kt:28)
+06-02 16:38:24.302 D/StrictMode( 9156): at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:79)
+06-02 16:38:24.302 D/StrictMode( 9156): at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:57)
+06-02 16:38:24.302 D/StrictMode( 9156): at kotlin.SynchronizedLazyImpl.getValue(LazyJVM.kt:74)
+06-02 16:38:24.302 D/StrictMode( 9156): at org.mozilla.fenix.components.Core.getEngine(Unknown Source:7)
+06-02 16:38:24.302 D/StrictMode( 9156): at org.mozilla.fenix.FenixApplication.setupInMainProcessOnly(FenixApplication.kt:128)
+06-02 16:38:24.302 D/StrictMode( 9156): at org.mozilla.fenix.FenixApplication.onCreate(FenixApplication.kt:90)
+06-02 16:38:24.302 D/StrictMode( 9156): at android.app.Instrumentation.callApplicationOnCreate(Instrumentation.java:1154)
+06-02 16:38:24.302 D/StrictMode( 9156): at android.app.ActivityThread.handleBindApplication(ActivityThread.java:5871)
+06-02 16:38:24.302 D/StrictMode( 9156): at android.app.ActivityThread.access$1100(ActivityThread.java:199)
+06-02 16:38:24.302 D/StrictMode( 9156): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1650)
+06-02 16:38:24.302 D/StrictMode( 9156): at android.os.Handler.dispatchMessage(Handler.java:106)
+06-02 16:38:24.302 D/StrictMode( 9156): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:24.302 D/StrictMode( 9156): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:24.302 D/StrictMode( 9156): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:24.302 D/StrictMode( 9156): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:24.302 D/StrictMode( 9156): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:24.303 D/StrictMode( 9156): StrictMode policy violation; ~duration=140 ms: android.os.strictmode.DiskReadViolation
+06-02 16:38:24.303 D/StrictMode( 9156): at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+06-02 16:38:24.303 D/StrictMode( 9156): at libcore.io.BlockGuardOs.read(BlockGuardOs.java:253)
+06-02 16:38:24.303 D/StrictMode( 9156): at libcore.io.IoBridge.read(IoBridge.java:501)
+06-02 16:38:24.303 D/StrictMode( 9156): at java.io.FileInputStream.read(FileInputStream.java:307)
+06-02 16:38:24.303 D/StrictMode( 9156): at java.io.FilterInputStream.read(FilterInputStream.java:133)
+06-02 16:38:24.303 D/StrictMode( 9156): at java.io.PushbackInputStream.read(PushbackInputStream.java:186)
+06-02 16:38:24.303 D/StrictMode( 9156): at org.yaml.snakeyaml.reader.UnicodeReader.init(UnicodeReader.java:92)
+06-02 16:38:24.303 D/StrictMode( 9156): at org.yaml.snakeyaml.reader.UnicodeReader.read(UnicodeReader.java:124)
+06-02 16:38:24.303 D/StrictMode( 9156): at org.yaml.snakeyaml.reader.StreamReader.update(StreamReader.java:183)
+06-02 16:38:24.303 D/StrictMode( 9156): at org.yaml.snakeyaml.reader.StreamReader.ensureEnoughData(StreamReader.java:176)
+06-02 16:38:24.303 D/StrictMode( 9156): at org.yaml.snakeyaml.reader.StreamReader.ensureEnoughData(StreamReader.java:171)
+06-02 16:38:24.303 D/StrictMode( 9156): at org.yaml.snakeyaml.reader.StreamReader.peek(StreamReader.java:126)
+06-02 16:38:24.303 D/StrictMode( 9156): at org.yaml.snakeyaml.scanner.ScannerImpl.scanToNextToken(ScannerImpl.java:1177)
+06-02 16:38:24.303 D/StrictMode( 9156): at org.yaml.snakeyaml.scanner.ScannerImpl.fetchMoreTokens(ScannerImpl.java:287)
+06-02 16:38:24.303 D/StrictMode( 9156): at org.yaml.snakeyaml.scanner.ScannerImpl.checkToken(ScannerImpl.java:227)
+06-02 16:38:24.303 D/StrictMode( 9156): at org.yaml.snakeyaml.parser.ParserImpl$ParseImplicitDocumentStart.produce(ParserImpl.java:195)
+06-02 16:38:24.303 D/StrictMode( 9156): at org.yaml.snakeyaml.parser.ParserImpl.peekEvent(ParserImpl.java:158)
+06-02 16:38:24.303 D/StrictMode( 9156): at org.yaml.snakeyaml.parser.ParserImpl.checkEvent(ParserImpl.java:148)
+06-02 16:38:24.303 D/StrictMode( 9156): at org.yaml.snakeyaml.composer.Composer.getSingleNode(Composer.java:107)
+06-02 16:38:24.303 D/StrictMode( 9156): at org.yaml.snakeyaml.constructor.BaseConstructor.getSingleData(BaseConstructor.java:141)
+06-02 16:38:24.303 D/StrictMode( 9156): at org.yaml.snakeyaml.Yaml.loadFromReader(Yaml.java:525)
+06-02 16:38:24.303 D/StrictMode( 9156): at org.yaml.snakeyaml.Yaml.load(Yaml.java:453)
+06-02 16:38:24.303 D/StrictMode( 9156): at org.mozilla.gecko.util.DebugConfig.fromFile(DebugConfig.java:51)
+06-02 16:38:24.303 D/StrictMode( 9156): at org.mozilla.geckoview.GeckoRuntime.init(GeckoRuntime.java:363)
+06-02 16:38:24.303 D/StrictMode( 9156): at org.mozilla.geckoview.GeckoRuntime.create(GeckoRuntime.java:574)
+06-02 16:38:24.303 D/StrictMode( 9156): at GeckoProvider.createRuntime(GeckoProvider.kt:58)
+06-02 16:38:24.303 D/StrictMode( 9156): at GeckoProvider.getOrCreateRuntime(GeckoProvider.kt:28)
+06-02 16:38:24.303 D/StrictMode( 9156): at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:79)
+06-02 16:38:24.303 D/StrictMode( 9156): at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:57)
+06-02 16:38:24.303 D/StrictMode( 9156): at kotlin.SynchronizedLazyImpl.getValue(LazyJVM.kt:74)
+06-02 16:38:24.303 D/StrictMode( 9156): at org.mozilla.fenix.components.Core.getEngine(Unknown Source:7)
+06-02 16:38:24.303 D/StrictMode( 9156): at org.mozilla.fenix.FenixApplication.setupInMainProcessOnly(FenixApplication.kt:128)
+06-02 16:38:24.303 D/StrictMode( 9156): at org.mozilla.fenix.FenixApplication.onCreate(FenixApplication.kt:90)
+06-02 16:38:24.303 D/StrictMode( 9156): at android.app.Instrumentation.callApplicationOnCreate(Instrumentation.java:1154)
+06-02 16:38:24.303 D/StrictMode( 9156): at android.app.ActivityThread.handleBindApplication(ActivityThread.java:5871)
+06-02 16:38:24.303 D/StrictMode( 9156): at android.app.ActivityThread.access$1100(ActivityThread.java:199)
+06-02 16:38:24.303 D/StrictMode( 9156): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1650)
+06-02 16:38:24.303 D/StrictMode( 9156): at android.os.Handler.dispatchMessage(Handler.java:106)
+06-02 16:38:24.303 D/StrictMode( 9156): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:24.303 D/StrictMode( 9156): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:24.303 D/StrictMode( 9156): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:24.303 D/StrictMode( 9156): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:24.303 D/StrictMode( 9156): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:24.305 D/StrictMode( 9156): StrictMode policy violation; ~duration=140 ms: android.os.strictmode.DiskReadViolation
+06-02 16:38:24.305 D/StrictMode( 9156): at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+06-02 16:38:24.305 D/StrictMode( 9156): at libcore.io.BlockGuardOs.read(BlockGuardOs.java:253)
+06-02 16:38:24.305 D/StrictMode( 9156): at libcore.io.IoBridge.read(IoBridge.java:501)
+06-02 16:38:24.305 D/StrictMode( 9156): at java.io.FileInputStream.read(FileInputStream.java:307)
+06-02 16:38:24.305 D/StrictMode( 9156): at java.io.FilterInputStream.read(FilterInputStream.java:133)
+06-02 16:38:24.305 D/StrictMode( 9156): at java.io.PushbackInputStream.read(PushbackInputStream.java:186)
+06-02 16:38:24.305 D/StrictMode( 9156): at sun.nio.cs.StreamDecoder.readBytes(StreamDecoder.java:288)
+06-02 16:38:24.305 D/StrictMode( 9156): at sun.nio.cs.StreamDecoder.implRead(StreamDecoder.java:351)
+06-02 16:38:24.305 D/StrictMode( 9156): at sun.nio.cs.StreamDecoder.read(StreamDecoder.java:180)
+06-02 16:38:24.305 D/StrictMode( 9156): at java.io.InputStreamReader.read(InputStreamReader.java:184)
+06-02 16:38:24.305 D/StrictMode( 9156): at org.yaml.snakeyaml.reader.UnicodeReader.read(UnicodeReader.java:125)
+06-02 16:38:24.305 D/StrictMode( 9156): at org.yaml.snakeyaml.reader.StreamReader.update(StreamReader.java:183)
+06-02 16:38:24.305 D/StrictMode( 9156): at org.yaml.snakeyaml.reader.StreamReader.ensureEnoughData(StreamReader.java:176)
+06-02 16:38:24.305 D/StrictMode( 9156): at org.yaml.snakeyaml.reader.StreamReader.ensureEnoughData(StreamReader.java:171)
+06-02 16:38:24.305 D/StrictMode( 9156): at org.yaml.snakeyaml.reader.StreamReader.peek(StreamReader.java:126)
+06-02 16:38:24.305 D/StrictMode( 9156): at org.yaml.snakeyaml.scanner.ScannerImpl.scanToNextToken(ScannerImpl.java:1177)
+06-02 16:38:24.305 D/StrictMode( 9156): at org.yaml.snakeyaml.scanner.ScannerImpl.fetchMoreTokens(ScannerImpl.java:287)
+06-02 16:38:24.305 D/StrictMode( 9156): at org.yaml.snakeyaml.scanner.ScannerImpl.checkToken(ScannerImpl.java:227)
+06-02 16:38:24.305 D/StrictMode( 9156): at org.yaml.snakeyaml.parser.ParserImpl$ParseImplicitDocumentStart.produce(ParserImpl.java:195)
+06-02 16:38:24.305 D/StrictMode( 9156): at org.yaml.snakeyaml.parser.ParserImpl.peekEvent(ParserImpl.java:158)
+06-02 16:38:24.305 D/StrictMode( 9156): at org.yaml.snakeyaml.parser.ParserImpl.checkEvent(ParserImpl.java:148)
+06-02 16:38:24.305 D/StrictMode( 9156): at org.yaml.snakeyaml.composer.Composer.getSingleNode(Composer.java:107)
+06-02 16:38:24.305 D/StrictMode( 9156): at org.yaml.snakeyaml.constructor.BaseConstructor.getSingleData(BaseConstructor.java:141)
+06-02 16:38:24.305 D/StrictMode( 9156): at org.yaml.snakeyaml.Yaml.loadFromReader(Yaml.java:525)
+06-02 16:38:24.305 D/StrictMode( 9156): at org.yaml.snakeyaml.Yaml.load(Yaml.java:453)
+06-02 16:38:24.305 D/StrictMode( 9156): at org.mozilla.gecko.util.DebugConfig.fromFile(DebugConfig.java:51)
+06-02 16:38:24.305 D/StrictMode( 9156): at org.mozilla.geckoview.GeckoRuntime.init(GeckoRuntime.java:363)
+06-02 16:38:24.305 D/StrictMode( 9156): at org.mozilla.geckoview.GeckoRuntime.create(GeckoRuntime.java:574)
+06-02 16:38:24.305 D/StrictMode( 9156): at GeckoProvider.createRuntime(GeckoProvider.kt:58)
+06-02 16:38:24.305 D/StrictMode( 9156): at GeckoProvider.getOrCreateRuntime(GeckoProvider.kt:28)
+06-02 16:38:24.305 D/StrictMode( 9156): at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:79)
+06-02 16:38:24.305 D/StrictMode( 9156): at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:57)
+06-02 16:38:24.305 D/StrictMode( 9156): at kotlin.SynchronizedLazyImpl.getValue(LazyJVM.kt:74)
+06-02 16:38:24.305 D/StrictMode( 9156): at org.mozilla.fenix.components.Core.getEngine(Unknown Source:7)
+06-02 16:38:24.305 D/StrictMode( 9156): at org.mozilla.fenix.FenixApplication.setupInMainProcessOnly(FenixApplication.kt:128)
+06-02 16:38:24.305 D/StrictMode( 9156): at org.mozilla.fenix.FenixApplication.onCreate(FenixApplication.kt:90)
+06-02 16:38:24.305 D/StrictMode( 9156): at android.app.Instrumentation.callApplicationOnCreate(Instrumentation.java:1154)
+06-02 16:38:24.305 D/StrictMode( 9156): at android.app.ActivityThread.handleBindApplication(ActivityThread.java:5871)
+06-02 16:38:24.305 D/StrictMode( 9156): at android.app.ActivityThread.access$1100(ActivityThread.java:199)
+06-02 16:38:24.305 D/StrictMode( 9156): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1650)
+06-02 16:38:24.305 D/StrictMode( 9156): at android.os.Handler.dispatchMessage(Handler.java:106)
+06-02 16:38:24.305 D/StrictMode( 9156): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:24.305 D/StrictMode( 9156): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:24.305 D/StrictMode( 9156): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:24.305 D/StrictMode( 9156): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:24.305 D/StrictMode( 9156): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:24.308 D/StrictMode( 9156): StrictMode policy violation; ~duration=133 ms: android.os.strictmode.DiskReadViolation
+06-02 16:38:24.308 D/StrictMode( 9156): at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+06-02 16:38:24.308 D/StrictMode( 9156): at libcore.io.BlockGuardOs.read(BlockGuardOs.java:253)
+06-02 16:38:24.308 D/StrictMode( 9156): at libcore.io.IoBridge.read(IoBridge.java:501)
+06-02 16:38:24.308 D/StrictMode( 9156): at java.io.FileInputStream.read(FileInputStream.java:307)
+06-02 16:38:24.308 D/StrictMode( 9156): at java.io.FilterInputStream.read(FilterInputStream.java:133)
+06-02 16:38:24.308 D/StrictMode( 9156): at java.io.PushbackInputStream.read(PushbackInputStream.java:186)
+06-02 16:38:24.308 D/StrictMode( 9156): at sun.nio.cs.StreamDecoder.readBytes(StreamDecoder.java:288)
+06-02 16:38:24.308 D/StrictMode( 9156): at sun.nio.cs.StreamDecoder.implRead(StreamDecoder.java:351)
+06-02 16:38:24.308 D/StrictMode( 9156): at sun.nio.cs.StreamDecoder.read(StreamDecoder.java:180)
+06-02 16:38:24.308 D/StrictMode( 9156): at java.io.InputStreamReader.read(InputStreamReader.java:184)
+06-02 16:38:24.308 D/StrictMode( 9156): at org.yaml.snakeyaml.reader.UnicodeReader.read(UnicodeReader.java:125)
+06-02 16:38:24.308 D/StrictMode( 9156): at org.yaml.snakeyaml.reader.StreamReader.update(StreamReader.java:183)
+06-02 16:38:24.308 D/StrictMode( 9156): at org.yaml.snakeyaml.reader.StreamReader.ensureEnoughData(StreamReader.java:176)
+06-02 16:38:24.308 D/StrictMode( 9156): at org.yaml.snakeyaml.reader.StreamReader.peek(StreamReader.java:136)
+06-02 16:38:24.308 D/StrictMode( 9156): at org.yaml.snakeyaml.scanner.ScannerImpl.scanPlain(ScannerImpl.java:1999)
+06-02 16:38:24.308 D/StrictMode( 9156): at org.yaml.snakeyaml.scanner.ScannerImpl.fetchPlain(ScannerImpl.java:1044)
+06-02 16:38:24.308 D/StrictMode( 9156): at org.yaml.snakeyaml.scanner.ScannerImpl.fetchMoreTokens(ScannerImpl.java:399)
+06-02 16:38:24.308 D/StrictMode( 9156): at org.yaml.snakeyaml.scanner.ScannerImpl.checkToken(ScannerImpl.java:227)
+06-02 16:38:24.308 D/StrictMode( 9156): at org.yaml.snakeyaml.parser.ParserImpl$ParseBlockSequenceEntry.produce(ParserImpl.java:504)
+06-02 16:38:24.308 D/StrictMode( 9156): at org.yaml.snakeyaml.parser.ParserImpl.peekEvent(ParserImpl.java:158)
+06-02 16:38:24.308 D/StrictMode( 9156): at org.yaml.snakeyaml.parser.ParserImpl.checkEvent(ParserImpl.java:148)
+06-02 16:38:24.308 D/StrictMode( 9156): at org.yaml.snakeyaml.composer.Composer.composeSequenceNode(Composer.java:188)
+06-02 16:38:24.308 D/StrictMode( 9156): at org.yaml.snakeyaml.composer.Composer.composeNode(Composer.java:142)
+06-02 16:38:24.308 D/StrictMode( 9156): at org.yaml.snakeyaml.composer.Composer.composeValueNode(Composer.java:236)
+06-02 16:38:24.308 D/StrictMode( 9156): at org.yaml.snakeyaml.composer.Composer.composeMappingChildren(Composer.java:227)
+06-02 16:38:24.308 D/StrictMode( 9156): at org.yaml.snakeyaml.composer.Composer.composeMappingNode(Composer.java:215)
+06-02 16:38:24.308 D/StrictMode( 9156): at org.yaml.snakeyaml.composer.Composer.composeNode(Composer.java:144)
+06-02 16:38:24.308 D/StrictMode( 9156): at org.yaml.snakeyaml.composer.Composer.getNode(Composer.java:85)
+06-02 16:38:24.308 D/StrictMode( 9156): at org.yaml.snakeyaml.composer.Composer.getSingleNode(Composer.java:108)
+06-02 16:38:24.308 D/StrictMode( 9156): at org.yaml.snakeyaml.constructor.BaseConstructor.getSingleData(BaseConstructor.java:141)
+06-02 16:38:24.308 D/StrictMode( 9156): at org.yaml.snakeyaml.Yaml.loadFromReader(Yaml.java:525)
+06-02 16:38:24.308 D/StrictMode( 9156): at org.yaml.snakeyaml.Yaml.load(Yaml.java:453)
+06-02 16:38:24.308 D/StrictMode( 9156): at org.mozilla.gecko.util.DebugConfig.fromFile(DebugConfig.java:51)
+06-02 16:38:24.308 D/StrictMode( 9156): at org.mozilla.geckoview.GeckoRuntime.init(GeckoRuntime.java:363)
+06-02 16:38:24.308 D/StrictMode( 9156): at org.mozilla.geckoview.GeckoRuntime.create(GeckoRuntime.java:574)
+06-02 16:38:24.308 D/StrictMode( 9156): at GeckoProvider.createRuntime(GeckoProvider.kt:58)
+06-02 16:38:24.308 D/StrictMode( 9156): at GeckoProvider.getOrCreateRuntime(GeckoProvider.kt:28)
+06-02 16:38:24.308 D/StrictMode( 9156): at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:79)
+06-02 16:38:24.308 D/StrictMode( 9156): at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:57)
+06-02 16:38:24.308 D/StrictMode( 9156): at kotlin.SynchronizedLazyImpl.getValue(LazyJVM.kt:74)
+06-02 16:38:24.308 D/StrictMode( 9156): at org.mozilla.fenix.components.Core.getEngine(Unknown Source:7)
+06-02 16:38:24.308 D/StrictMode( 9156): at org.mozilla.fenix.FenixApplication.setupInMainProcessOnly(FenixApplication.kt:128)
+06-02 16:38:24.308 D/StrictMode( 9156): at org.mozilla.fenix.FenixApplication.onCreate(FenixApplication.kt:90)
+06-02 16:38:24.308 D/StrictMode( 9156): at android.app.Instrumentation.callApplicationOnCreate(Instrumentation.java:1154)
+06-02 16:38:24.308 D/StrictMode( 9156): at android.app.ActivityThread.handleBindApplication(ActivityThread.java:5871)
+06-02 16:38:24.308 D/StrictMode( 9156): at android.app.ActivityThread.access$1100(ActivityThread.java:199)
+06-02 16:38:24.308 D/StrictMode( 9156): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1650)
+06-02 16:38:24.308 D/StrictMode( 9156): at android.os.Handler.dispatchMessage(Handler.java:106)
+06-02 16:38:24.308 D/StrictMode( 9156): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:24.308 D/StrictMode( 9156): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:24.308 D/StrictMode( 9156): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:24.308 D/StrictMode( 9156): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:24.308 D/StrictMode( 9156): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:24.311 D/StrictMode( 9156): StrictMode policy violation; ~duration=120 ms: android.os.strictmode.DiskReadViolation
+06-02 16:38:24.311 D/StrictMode( 9156): at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+06-02 16:38:24.311 D/StrictMode( 9156): at java.io.UnixFileSystem.checkAccess(UnixFileSystem.java:251)
+06-02 16:38:24.311 D/StrictMode( 9156): at java.io.File.exists(File.java:815)
+06-02 16:38:24.311 D/StrictMode( 9156): at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:605)
+06-02 16:38:24.311 D/StrictMode( 9156): at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:596)
+06-02 16:38:24.311 D/StrictMode( 9156): at android.app.ContextImpl.getPreferencesDir(ContextImpl.java:552)
+06-02 16:38:24.311 D/StrictMode( 9156): at android.app.ContextImpl.getSharedPreferencesPath(ContextImpl.java:747)
+06-02 16:38:24.311 D/StrictMode( 9156): at android.app.ContextImpl.getSharedPreferences(ContextImpl.java:400)
+06-02 16:38:24.311 D/StrictMode( 9156): at android.content.ContextWrapper.getSharedPreferences(ContextWrapper.java:174)
+06-02 16:38:24.311 D/StrictMode( 9156): at mozilla.components.browser.engine.gecko.GeckoEngine.<init>(GeckoEngine.kt:68)
+06-02 16:38:24.311 D/StrictMode( 9156): at mozilla.components.browser.engine.gecko.GeckoEngine.<init>(GeckoEngine.kt:63)
+06-02 16:38:24.311 D/StrictMode( 9156): at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:76)
+06-02 16:38:24.311 D/StrictMode( 9156): at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:57)
+06-02 16:38:24.311 D/StrictMode( 9156): at kotlin.SynchronizedLazyImpl.getValue(LazyJVM.kt:74)
+06-02 16:38:24.311 D/StrictMode( 9156): at org.mozilla.fenix.components.Core.getEngine(Unknown Source:7)
+06-02 16:38:24.311 D/StrictMode( 9156): at org.mozilla.fenix.FenixApplication.setupInMainProcessOnly(FenixApplication.kt:128)
+06-02 16:38:24.311 D/StrictMode( 9156): at org.mozilla.fenix.FenixApplication.onCreate(FenixApplication.kt:90)
+06-02 16:38:24.311 D/StrictMode( 9156): at android.app.Instrumentation.callApplicationOnCreate(Instrumentation.java:1154)
+06-02 16:38:24.311 D/StrictMode( 9156): at android.app.ActivityThread.handleBindApplication(ActivityThread.java:5871)
+06-02 16:38:24.311 D/StrictMode( 9156): at android.app.ActivityThread.access$1100(ActivityThread.java:199)
+06-02 16:38:24.311 D/StrictMode( 9156): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1650)
+06-02 16:38:24.311 D/StrictMode( 9156): at android.os.Handler.dispatchMessage(Handler.java:106)
+06-02 16:38:24.311 D/StrictMode( 9156): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:24.311 D/StrictMode( 9156): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:24.311 D/StrictMode( 9156): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:24.311 D/StrictMode( 9156): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:24.311 D/StrictMode( 9156): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:24.312 D/StrictMode( 9156): StrictMode policy violation; ~duration=43 ms: android.os.strictmode.DiskReadViolation
+06-02 16:38:24.312 D/StrictMode( 9156): at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+06-02 16:38:24.312 D/StrictMode( 9156): at java.io.UnixFileSystem.checkAccess(UnixFileSystem.java:251)
+06-02 16:38:24.312 D/StrictMode( 9156): at java.io.File.exists(File.java:815)
+06-02 16:38:24.312 D/StrictMode( 9156): at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:605)
+06-02 16:38:24.312 D/StrictMode( 9156): at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:596)
+06-02 16:38:24.312 D/StrictMode( 9156): at android.app.ContextImpl.getPreferencesDir(ContextImpl.java:552)
+06-02 16:38:24.312 D/StrictMode( 9156): at android.app.ContextImpl.getSharedPreferencesPath(ContextImpl.java:747)
+06-02 16:38:24.312 D/StrictMode( 9156): at android.app.ContextImpl.getSharedPreferences(ContextImpl.java:400)
+06-02 16:38:24.312 D/StrictMode( 9156): at android.content.ContextWrapper.getSharedPreferences(ContextWrapper.java:174)
+06-02 16:38:24.312 D/StrictMode( 9156): at androidx.preference.PreferenceManager.getDefaultSharedPreferences(PreferenceManager.java:119)
+06-02 16:38:24.312 D/StrictMode( 9156): at org.mozilla.fenix.DebugFenixApplication.setupLeakCanary(DebugFenixApplication.kt:15)
+06-02 16:38:24.312 D/StrictMode( 9156): at org.mozilla.fenix.FenixApplication.setupInMainProcessOnly(FenixApplication.kt:140)
+06-02 16:38:24.312 D/StrictMode( 9156): at org.mozilla.fenix.FenixApplication.onCreate(FenixApplication.kt:90)
+06-02 16:38:24.312 D/StrictMode( 9156): at android.app.Instrumentation.callApplicationOnCreate(Instrumentation.java:1154)
+06-02 16:38:24.312 D/StrictMode( 9156): at android.app.ActivityThread.handleBindApplication(ActivityThread.java:5871)
+06-02 16:38:24.312 D/StrictMode( 9156): at android.app.ActivityThread.access$1100(ActivityThread.java:199)
+06-02 16:38:24.312 D/StrictMode( 9156): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1650)
+06-02 16:38:24.312 D/StrictMode( 9156): at android.os.Handler.dispatchMessage(Handler.java:106)
+06-02 16:38:24.312 D/StrictMode( 9156): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:24.312 D/StrictMode( 9156): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:24.312 D/StrictMode( 9156): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:24.312 D/StrictMode( 9156): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:24.312 D/StrictMode( 9156): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:24.313 D/StrictMode( 9156): StrictMode policy violation; ~duration=41 ms: android.os.strictmode.DiskReadViolation
+06-02 16:38:24.313 D/StrictMode( 9156): at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+06-02 16:38:24.313 D/StrictMode( 9156): at android.app.SharedPreferencesImpl.awaitLoadedLocked(SharedPreferencesImpl.java:256)
+06-02 16:38:24.313 D/StrictMode( 9156): at android.app.SharedPreferencesImpl.getBoolean(SharedPreferencesImpl.java:325)
+06-02 16:38:24.313 D/StrictMode( 9156): at org.mozilla.fenix.DebugFenixApplication.setupLeakCanary(DebugFenixApplication.kt:16)
+06-02 16:38:24.313 D/StrictMode( 9156): at org.mozilla.fenix.FenixApplication.setupInMainProcessOnly(FenixApplication.kt:140)
+06-02 16:38:24.313 D/StrictMode( 9156): at org.mozilla.fenix.FenixApplication.onCreate(FenixApplication.kt:90)
+06-02 16:38:24.313 D/StrictMode( 9156): at android.app.Instrumentation.callApplicationOnCreate(Instrumentation.java:1154)
+06-02 16:38:24.313 D/StrictMode( 9156): at android.app.ActivityThread.handleBindApplication(ActivityThread.java:5871)
+06-02 16:38:24.313 D/StrictMode( 9156): at android.app.ActivityThread.access$1100(ActivityThread.java:199)
+06-02 16:38:24.313 D/StrictMode( 9156): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1650)
+06-02 16:38:24.313 D/StrictMode( 9156): at android.os.Handler.dispatchMessage(Handler.java:106)
+06-02 16:38:24.313 D/StrictMode( 9156): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:24.313 D/StrictMode( 9156): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:24.313 D/StrictMode( 9156): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:24.313 D/StrictMode( 9156): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:24.313 D/StrictMode( 9156): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:24.317 D/GeckoSysInfo( 9156): System memory: 1494MB.
+06-02 16:38:24.317 W/lla.fenix.debu( 9156): Accessing hidden method Landroid/os/MessageQueue;->next()Landroid/os/Message; (light greylist, JNI)
+06-02 16:38:24.318 D/StrictMode( 9156): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/os/MessageQueue;->next()Landroid/os/Message;
+06-02 16:38:24.318 D/StrictMode( 9156): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:24.318 D/StrictMode( 9156): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:24.318 D/StrictMode( 9156): at org.mozilla.gecko.mozglue.GeckoLoader.nativeRun(Native Method)
+06-02 16:38:24.318 D/StrictMode( 9156): at org.mozilla.gecko.GeckoThread.run(GeckoThread.java:449)
+06-02 16:38:24.318 W/lla.fenix.debu( 9156): Accessing hidden field Landroid/os/MessageQueue;->mMessages:Landroid/os/Message; (light greylist, JNI)
+06-02 16:38:24.318 D/StrictMode( 9156): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/os/MessageQueue;->mMessages:Landroid/os/Message;
+06-02 16:38:24.318 D/StrictMode( 9156): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:24.318 D/StrictMode( 9156): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:24.318 D/StrictMode( 9156): at org.mozilla.gecko.mozglue.GeckoLoader.nativeRun(Native Method)
+06-02 16:38:24.318 D/StrictMode( 9156): at org.mozilla.gecko.GeckoThread.run(GeckoThread.java:449)
+06-02 16:38:24.319 W/lla.fenix.debu( 9156): Accessing hidden field Ljava/lang/Boolean;->value:Z (light greylist, JNI)
+06-02 16:38:24.319 D/StrictMode( 9156): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Ljava/lang/Boolean;->value:Z
+06-02 16:38:24.319 D/StrictMode( 9156): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:24.319 D/StrictMode( 9156): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:24.319 D/StrictMode( 9156): at org.mozilla.gecko.mozglue.GeckoLoader.nativeRun(Native Method)
+06-02 16:38:24.319 D/StrictMode( 9156): at org.mozilla.gecko.GeckoThread.run(GeckoThread.java:449)
+06-02 16:38:24.319 W/lla.fenix.debu( 9156): Accessing hidden field Ljava/lang/Integer;->value:I (light greylist, JNI)
+06-02 16:38:24.320 D/StrictMode( 9156): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Ljava/lang/Integer;->value:I
+06-02 16:38:24.320 D/StrictMode( 9156): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:24.320 D/StrictMode( 9156): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:24.320 D/StrictMode( 9156): at org.mozilla.gecko.mozglue.GeckoLoader.nativeRun(Native Method)
+06-02 16:38:24.320 D/StrictMode( 9156): at org.mozilla.gecko.GeckoThread.run(GeckoThread.java:449)
+06-02 16:38:24.320 W/lla.fenix.debu( 9156): Accessing hidden field Ljava/lang/Double;->value:D (light greylist, JNI)
+06-02 16:38:24.320 D/StrictMode( 9156): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Ljava/lang/Double;->value:D
+06-02 16:38:24.320 D/StrictMode( 9156): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:24.320 D/StrictMode( 9156): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:24.320 D/StrictMode( 9156): at org.mozilla.gecko.mozglue.GeckoLoader.nativeRun(Native Method)
+06-02 16:38:24.320 D/StrictMode( 9156): at org.mozilla.gecko.GeckoThread.run(GeckoThread.java:449)
+06-02 16:38:24.321 D/GeckoThread( 9156): State changed to JNI_READY
+06-02 16:38:24.325 D/ServiceAllocator( 9156): org.mozilla.gecko.process.GeckoChildProcessServices$tab0 updateBindings: BACKGROUND priority, 0 importance, 2 successful binds, 0 failed binds, 0 successful unbinds, 0 failed unbinds
+06-02 16:38:24.331 I/enix.debug:tab( 9201): Not late-enabling -Xcheck:jni (already on)
+06-02 16:38:24.337 I/ActivityManager( 1869): Start proc 9201:org.mozilla.fenix.debug:tab0/u0a99 for service org.mozilla.fenix.debug/org.mozilla.gecko.process.GeckoChildProcessServices$tab0
+06-02 16:38:24.341 W/enix.debug:tab( 9201): Unexpected CPU variant for X86 using defaults: x86
+06-02 16:38:24.370 I/enix.debug:tab( 9201): The ClassLoaderContext is a special shared library.
+06-02 16:38:24.380 W/lla.fenix.debu( 9156): Accessing hidden method Landroid/content/res/Resources$Theme;->rebase()V (dark greylist, reflection)
+06-02 16:38:24.381 D/StrictMode( 9156): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/content/res/Resources$Theme;->rebase()V
+06-02 16:38:24.381 D/StrictMode( 9156): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:24.381 D/StrictMode( 9156): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:24.381 D/StrictMode( 9156): at java.lang.Class.getDeclaredMethodInternal(Native Method)
+06-02 16:38:24.381 D/StrictMode( 9156): at java.lang.Class.getMethod(Class.java:2064)
+06-02 16:38:24.381 D/StrictMode( 9156): at java.lang.Class.getDeclaredMethod(Class.java:2047)
+06-02 16:38:24.381 D/StrictMode( 9156): at androidx.core.content.res.ResourcesCompat$ThemeCompat$ImplApi23.rebase(ResourcesCompat.java:501)
+06-02 16:38:24.381 D/StrictMode( 9156): at androidx.core.content.res.ResourcesCompat$ThemeCompat.rebase(ResourcesCompat.java:477)
+06-02 16:38:24.381 D/StrictMode( 9156): at androidx.appcompat.app.AppCompatDelegateImpl.attachBaseContext2(AppCompatDelegateImpl.java:465)
+06-02 16:38:24.381 D/StrictMode( 9156): at androidx.appcompat.app.AppCompatActivity.attachBaseContext(AppCompatActivity.java:107)
+06-02 16:38:24.381 D/StrictMode( 9156): at mozilla.components.support.locale.LocaleAwareAppCompatActivity.attachBaseContext(LocaleAwareAppCompatActivity.kt:19)
+06-02 16:38:24.381 D/StrictMode( 9156): at android.app.Activity.attach(Activity.java:7051)
+06-02 16:38:24.381 D/StrictMode( 9156): at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2873)
+06-02 16:38:24.381 D/StrictMode( 9156): at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:3048)
+06-02 16:38:24.381 D/StrictMode( 9156): at android.app.servertransaction.LaunchActivityItem.execute(LaunchActivityItem.java:78)
+06-02 16:38:24.381 D/StrictMode( 9156): at android.app.servertransaction.TransactionExecutor.executeCallbacks(TransactionExecutor.java:108)
+06-02 16:38:24.381 D/StrictMode( 9156): at android.app.servertransaction.TransactionExecutor.execute(TransactionExecutor.java:68)
+06-02 16:38:24.381 D/StrictMode( 9156): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1808)
+06-02 16:38:24.381 D/StrictMode( 9156): at android.os.Handler.dispatchMessage(Handler.java:106)
+06-02 16:38:24.381 D/StrictMode( 9156): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:24.381 D/StrictMode( 9156): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:24.381 D/StrictMode( 9156): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:24.381 D/StrictMode( 9156): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:24.381 D/StrictMode( 9156): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:24.383 I/ResourcesCompat( 9156): Failed to retrieve rebase() method
+06-02 16:38:24.383 I/ResourcesCompat( 9156): java.lang.NoSuchMethodException: rebase []
+06-02 16:38:24.383 I/ResourcesCompat( 9156): at java.lang.Class.getMethod(Class.java:2068)
+06-02 16:38:24.383 I/ResourcesCompat( 9156): at java.lang.Class.getDeclaredMethod(Class.java:2047)
+06-02 16:38:24.383 I/ResourcesCompat( 9156): at androidx.core.content.res.ResourcesCompat$ThemeCompat$ImplApi23.rebase(ResourcesCompat.java:501)
+06-02 16:38:24.383 I/ResourcesCompat( 9156): at androidx.core.content.res.ResourcesCompat$ThemeCompat.rebase(ResourcesCompat.java:477)
+06-02 16:38:24.383 I/ResourcesCompat( 9156): at androidx.appcompat.app.AppCompatDelegateImpl.attachBaseContext2(AppCompatDelegateImpl.java:465)
+06-02 16:38:24.383 I/ResourcesCompat( 9156): at androidx.appcompat.app.AppCompatActivity.attachBaseContext(AppCompatActivity.java:107)
+06-02 16:38:24.383 I/ResourcesCompat( 9156): at mozilla.components.support.locale.LocaleAwareAppCompatActivity.attachBaseContext(LocaleAwareAppCompatActivity.kt:19)
+06-02 16:38:24.383 I/ResourcesCompat( 9156): at android.app.Activity.attach(Activity.java:7051)
+06-02 16:38:24.383 I/ResourcesCompat( 9156): at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2873)
+06-02 16:38:24.383 I/ResourcesCompat( 9156): at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:3048)
+06-02 16:38:24.383 I/ResourcesCompat( 9156): at android.app.servertransaction.LaunchActivityItem.execute(LaunchActivityItem.java:78)
+06-02 16:38:24.383 I/ResourcesCompat( 9156): at android.app.servertransaction.TransactionExecutor.executeCallbacks(TransactionExecutor.java:108)
+06-02 16:38:24.383 I/ResourcesCompat( 9156): at android.app.servertransaction.TransactionExecutor.execute(TransactionExecutor.java:68)
+06-02 16:38:24.383 I/ResourcesCompat( 9156): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1808)
+06-02 16:38:24.383 I/ResourcesCompat( 9156): at android.os.Handler.dispatchMessage(Handler.java:106)
+06-02 16:38:24.383 I/ResourcesCompat( 9156): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:24.383 I/ResourcesCompat( 9156): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:24.383 I/ResourcesCompat( 9156): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:24.383 I/ResourcesCompat( 9156): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:24.383 I/ResourcesCompat( 9156): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:24.391 W/lla.fenix.debu( 9156): Accessing hidden method Landroid/graphics/drawable/Drawable;->getOpticalInsets()Landroid/graphics/Insets; (light greylist, linking)
+06-02 16:38:24.391 W/lla.fenix.debu( 9156): Accessing hidden field Landroid/graphics/Insets;->left:I (light greylist, linking)
+06-02 16:38:24.391 W/lla.fenix.debu( 9156): Accessing hidden field Landroid/graphics/Insets;->right:I (light greylist, linking)
+06-02 16:38:24.391 W/lla.fenix.debu( 9156): Accessing hidden field Landroid/graphics/Insets;->top:I (light greylist, linking)
+06-02 16:38:24.391 W/lla.fenix.debu( 9156): Accessing hidden field Landroid/graphics/Insets;->bottom:I (light greylist, linking)
+06-02 16:38:24.428 W/lla.fenix.debu( 9156): Accessing hidden method Landroid/view/View;->getAccessibilityDelegate()Landroid/view/View$AccessibilityDelegate; (light greylist, linking)
+06-02 16:38:24.434 D/GeckoViewStartup( 9156): observe: app-startup
+06-02 16:38:24.434 W/lla.fenix.debu( 9156): Accessing hidden method Landroid/view/View;->computeFitSystemWindows(Landroid/graphics/Rect;Landroid/graphics/Rect;)Z (light greylist, reflection)
+06-02 16:38:24.435 D/StrictMode( 9156): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/view/View;->computeFitSystemWindows(Landroid/graphics/Rect;Landroid/graphics/Rect;)Z
+06-02 16:38:24.435 D/StrictMode( 9156): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:24.435 D/StrictMode( 9156): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:24.435 D/StrictMode( 9156): at java.lang.Class.getDeclaredMethodInternal(Native Method)
+06-02 16:38:24.435 D/StrictMode( 9156): at java.lang.Class.getMethod(Class.java:2064)
+06-02 16:38:24.435 D/StrictMode( 9156): at java.lang.Class.getDeclaredMethod(Class.java:2047)
+06-02 16:38:24.435 D/StrictMode( 9156): at androidx.appcompat.widget.ViewUtils.<clinit>(ViewUtils.java:44)
+06-02 16:38:24.435 D/StrictMode( 9156): at androidx.appcompat.widget.ViewUtils.makeOptionalFitsSystemWindows(ViewUtils.java:80)
+06-02 16:38:24.435 D/StrictMode( 9156): at androidx.appcompat.app.AppCompatDelegateImpl.createSubDecor(AppCompatDelegateImpl.java:970)
+06-02 16:38:24.435 D/StrictMode( 9156): at androidx.appcompat.app.AppCompatDelegateImpl.ensureSubDecor(AppCompatDelegateImpl.java:803)
+06-02 16:38:24.435 D/StrictMode( 9156): at androidx.appcompat.app.AppCompatDelegateImpl.setContentView(AppCompatDelegateImpl.java:692)
+06-02 16:38:24.435 D/StrictMode( 9156): at androidx.appcompat.app.AppCompatActivity.setContentView(AppCompatActivity.java:170)
+06-02 16:38:24.435 D/StrictMode( 9156): at org.mozilla.fenix.HomeActivity.onCreate(HomeActivity.kt:130)
+06-02 16:38:24.435 D/StrictMode( 9156): at android.app.Activity.performCreate(Activity.java:7136)
+06-02 16:38:24.435 D/StrictMode( 9156): at android.app.Activity.performCreate(Activity.java:7127)
+06-02 16:38:24.435 D/StrictMode( 9156): at android.app.Instrumentation.callActivityOnCreate(Instrumentation.java:1271)
+06-02 16:38:24.435 D/StrictMode( 9156): at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2893)
+06-02 16:38:24.435 D/StrictMode( 9156): at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:3048)
+06-02 16:38:24.435 D/StrictMode( 9156): at android.app.servertransaction.LaunchActivityItem.execute(LaunchActivityItem.java:78)
+06-02 16:38:24.435 D/StrictMode( 9156): at android.app.servertransaction.TransactionExecutor.executeCallbacks(TransactionExecutor.java:108)
+06-02 16:38:24.435 D/StrictMode( 9156): at android.app.servertransaction.TransactionExecutor.execute(TransactionExecutor.java:68)
+06-02 16:38:24.435 D/StrictMode( 9156): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1808)
+06-02 16:38:24.435 D/StrictMode( 9156): at android.os.Handler.dispatchMessage(Handler.java:106)
+06-02 16:38:24.435 D/StrictMode( 9156): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:24.435 D/StrictMode( 9156): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:24.435 D/StrictMode( 9156): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:24.435 D/StrictMode( 9156): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:24.435 D/StrictMode( 9156): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:24.435 W/lla.fenix.debu( 9156): Accessing hidden method Landroid/view/ViewGroup;->makeOptionalFitsSystemWindows()V (light greylist, reflection)
+06-02 16:38:24.436 D/StrictMode( 9156): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/view/ViewGroup;->makeOptionalFitsSystemWindows()V
+06-02 16:38:24.436 D/StrictMode( 9156): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:24.436 D/StrictMode( 9156): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:24.436 D/StrictMode( 9156): at java.lang.Class.getDeclaredMethodInternal(Native Method)
+06-02 16:38:24.436 D/StrictMode( 9156): at java.lang.Class.getPublicMethodRecursive(Class.java:2075)
+06-02 16:38:24.436 D/StrictMode( 9156): at java.lang.Class.getMethod(Class.java:2063)
+06-02 16:38:24.436 D/StrictMode( 9156): at java.lang.Class.getMethod(Class.java:1690)
+06-02 16:38:24.436 D/StrictMode( 9156): at androidx.appcompat.widget.ViewUtils.makeOptionalFitsSystemWindows(ViewUtils.java:84)
+06-02 16:38:24.436 D/StrictMode( 9156): at androidx.appcompat.app.AppCompatDelegateImpl.createSubDecor(AppCompatDelegateImpl.java:970)
+06-02 16:38:24.436 D/StrictMode( 9156): at androidx.appcompat.app.AppCompatDelegateImpl.ensureSubDecor(AppCompatDelegateImpl.java:803)
+06-02 16:38:24.436 D/StrictMode( 9156): at androidx.appcompat.app.AppCompatDelegateImpl.setContentView(AppCompatDelegateImpl.java:692)
+06-02 16:38:24.436 D/StrictMode( 9156): at androidx.appcompat.app.AppCompatActivity.setContentView(AppCompatActivity.java:170)
+06-02 16:38:24.436 D/StrictMode( 9156): at org.mozilla.fenix.HomeActivity.onCreate(HomeActivity.kt:130)
+06-02 16:38:24.436 D/StrictMode( 9156): at android.app.Activity.performCreate(Activity.java:7136)
+06-02 16:38:24.436 D/StrictMode( 9156): at android.app.Activity.performCreate(Activity.java:7127)
+06-02 16:38:24.436 D/StrictMode( 9156): at android.app.Instrumentation.callActivityOnCreate(Instrumentation.java:1271)
+06-02 16:38:24.436 D/StrictMode( 9156): at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2893)
+06-02 16:38:24.436 D/StrictMode( 9156): at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:3048)
+06-02 16:38:24.436 D/StrictMode( 9156): at android.app.servertransaction.LaunchActivityItem.execute(LaunchActivityItem.java:78)
+06-02 16:38:24.436 D/StrictMode( 9156): at android.app.servertransaction.TransactionExecutor.executeCallbacks(TransactionExecutor.java:108)
+06-02 16:38:24.436 D/StrictMode( 9156): at android.app.servertransaction.TransactionExecutor.execute(TransactionExecutor.java:68)
+06-02 16:38:24.436 D/StrictMode( 9156): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1808)
+06-02 16:38:24.436 D/StrictMode( 9156): at android.os.Handler.dispatchMessage(Handler.java:106)
+06-02 16:38:24.436 D/StrictMode( 9156): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:24.436 D/StrictMode( 9156): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:24.436 D/StrictMode( 9156): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:24.436 D/StrictMode( 9156): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:24.436 D/StrictMode( 9156): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:24.442 D/GeckoViewConsole( 9156): enabled = true
+06-02 16:38:24.482 I/AJC ( 9156): isPerformanceTest
+06-02 16:38:24.483 I/AJC ( 9156): isPerformanceTest : isPhonePlugged: true
+06-02 16:38:24.483 I/AJC ( 9156): isPerformanceTest : isAdbEnabled: true
+06-02 16:38:24.487 D/App ( 9156): DebugMetricController: track event: org.mozilla.fenix.components.metrics.Event$DismissedOnboarding@6354493
+06-02 16:38:24.505 I/GeckoConsole( 9156): No chrome package registered for chrome://browser/content/built_in_addons.json
+06-02 16:38:24.582 W/lla.fenix.debu( 9156): Accessing hidden method Landroid/graphics/FontFamily;-><init>()V (light greylist, reflection)
+06-02 16:38:24.585 D/StrictMode( 9156): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/graphics/FontFamily;-><init>()V
+06-02 16:38:24.585 D/StrictMode( 9156): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:24.585 D/StrictMode( 9156): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:24.585 D/StrictMode( 9156): at java.lang.Class.getDeclaredConstructorInternal(Native Method)
+06-02 16:38:24.585 D/StrictMode( 9156): at java.lang.Class.getConstructor0(Class.java:2325)
+06-02 16:38:24.585 D/StrictMode( 9156): at java.lang.Class.getConstructor(Class.java:1725)
+06-02 16:38:24.585 D/StrictMode( 9156): at androidx.core.graphics.TypefaceCompatApi26Impl.obtainFontFamilyCtor(TypefaceCompatApi26Impl.java:321)
+06-02 16:38:24.585 D/StrictMode( 9156): at androidx.core.graphics.TypefaceCompatApi26Impl.<init>(TypefaceCompatApi26Impl.java:84)
+06-02 16:38:24.585 D/StrictMode( 9156): at androidx.core.graphics.TypefaceCompatApi28Impl.<init>(TypefaceCompatApi28Impl.java:36)
+06-02 16:38:24.585 D/StrictMode( 9156): at androidx.core.graphics.TypefaceCompat.<clinit>(TypefaceCompat.java:51)
+06-02 16:38:24.585 D/StrictMode( 9156): at androidx.core.graphics.TypefaceCompat.create(TypefaceCompat.java:194)
+06-02 16:38:24.585 D/StrictMode( 9156): at androidx.appcompat.widget.AppCompatTextView.setTypeface(AppCompatTextView.java:708)
+06-02 16:38:24.585 D/StrictMode( 9156): at android.widget.TextView.resolveStyleAndSetTypeface(TextView.java:2037)
+06-02 16:38:24.585 D/StrictMode( 9156): at android.widget.TextView.setTypefaceFromAttrs(TextView.java:2008)
+06-02 16:38:24.585 D/StrictMode( 9156): at android.widget.TextView.applyTextAppearance(TextView.java:3640)
+06-02 16:38:24.585 D/StrictMode( 9156): at android.widget.TextView.<init>(TextView.java:1498)
+06-02 16:38:24.585 D/StrictMode( 9156): at android.widget.TextView.<init>(TextView.java:869)
+06-02 16:38:24.585 D/StrictMode( 9156): at androidx.appcompat.widget.AppCompatTextView.<init>(AppCompatTextView.java:100)
+06-02 16:38:24.585 D/StrictMode( 9156): at androidx.appcompat.widget.AppCompatTextView.<init>(AppCompatTextView.java:95)
+06-02 16:38:24.585 D/StrictMode( 9156): at androidx.appcompat.app.AppCompatViewInflater.createTextView(AppCompatViewInflater.java:194)
+06-02 16:38:24.585 D/StrictMode( 9156): at androidx.appcompat.app.AppCompatViewInflater.createView(AppCompatViewInflater.java:115)
+06-02 16:38:24.585 D/StrictMode( 9156): at androidx.appcompat.app.AppCompatDelegateImpl.createView(AppCompatDelegateImpl.java:1548)
+06-02 16:38:24.585 D/StrictMode( 9156): at androidx.appcompat.app.AppCompatDelegateImpl.onCreateView(AppCompatDelegateImpl.java:1599)
+06-02 16:38:24.585 D/StrictMode( 9156): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:772)
+06-02 16:38:24.585 D/StrictMode( 9156): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:730)
+06-02 16:38:24.585 D/StrictMode( 9156): at android.view.LayoutInflater.rInflate(LayoutInflater.java:863)
+06-02 16:38:24.585 D/StrictMode( 9156): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:24.585 D/StrictMode( 9156): at android.view.LayoutInflater.rInflate(LayoutInflater.java:866)
+06-02 16:38:24.585 D/StrictMode( 9156): at android.view.LayoutInflater.inflate(LayoutInflater.java:489)
+06-02 16:38:24.585 D/StrictMode( 9156): at android.view.LayoutInflater.inflate(LayoutInflater.java:423)
+06-02 16:38:24.585 D/StrictMode( 9156): at android.view.LayoutInflater.inflate(LayoutInflater.java:374)
+06-02 16:38:24.585 D/StrictMode( 9156): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(TabCounter.kt:30)
+06-02 16:38:24.585 D/StrictMode( 9156): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(TabCounter.kt:22)
+06-02 16:38:24.585 D/StrictMode( 9156): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(Unknown Source:6)
+06-02 16:38:24.585 D/StrictMode( 9156): at java.lang.reflect.Constructor.newInstance0(Native Method)
+06-02 16:38:24.585 D/StrictMode( 9156): at java.lang.reflect.Constructor.newInstance(Constructor.java:343)
+06-02 16:38:24.585 D/StrictMode( 9156): at android.view.LayoutInflater.createView(LayoutInflater.java:647)
+06-02 16:38:24.585 D/StrictMode( 9156): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:790)
+06-02 16:38:24.585 D/StrictMode( 9156): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:730)
+06-02 16:38:24.585 D/StrictMode( 9156): at android.view.LayoutInflater.rInflate(LayoutInflater.java:863)
+06-02 16:38:24.585 D/StrictMode( 9156): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:24.585 D/StrictMode( 9156): at android.view.LayoutInflater.rInflate(LayoutInflater.java:866)
+06-02 16:38:24.585 D/StrictMode( 9156): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:24.585 D/StrictMode( 9156): at android.view.LayoutInflater.inflate(LayoutInflater.java:515)
+06-02 16:38:24.585 D/StrictMode( 9156): at android.view.LayoutInflater.inflate(LayoutInflater.java:423)
+06-02 16:38:24.585 D/StrictMode( 9156): at org.mozilla.fenix.home.HomeFragment.onCreateView(HomeFragment.kt:183)
+06-02 16:38:24.585 D/StrictMode( 9156): at androidx.fragment.app.Fragment.performCreateView(Fragment.java:2698)
+06-02 16:38:24.585 D/StrictMode( 9156): at androidx.fragment.app.FragmentStateManager.createView(FragmentStateManager.java:320)
+06-02 16:38:24.585 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1187)
+06-02 16:38:24.585 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.addAddedFragments(FragmentManager.java:2224)
+06-02 16:38:24.585 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.executeOpsTogether(FragmentManager.java:1997)
+06-02 16:38:24.585 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.removeRedundantOperationsAndExecute(FragmentManager.java:1953)
+06-02 16:38:24.585 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.execPendingActions(FragmentManager.java:1849)
+06-02 16:38:24.585 D/StrictMode( 9156): at
+06-02 16:38:24.585 W/lla.fenix.debu( 9156): Accessing hidden method Landroid/graphics/FontFamily;->addFontFromAssetManager(Landroid/content/res/AssetManager;Ljava/lang/String;IZIII[Landroid/graphics/fonts/FontVariationAxis;)Z (light greylist, reflection)
+06-02 16:38:24.588 D/StrictMode( 9156): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/graphics/FontFamily;->addFontFromAssetManager(Landroid/content/res/AssetManager;Ljava/lang/String;IZIII[Landroid/graphics/fonts/FontVariationAxis;)Z
+06-02 16:38:24.588 D/StrictMode( 9156): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:24.588 D/StrictMode( 9156): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:24.588 D/StrictMode( 9156): at java.lang.Class.getDeclaredMethodInternal(Native Method)
+06-02 16:38:24.588 D/StrictMode( 9156): at java.lang.Class.getPublicMethodRecursive(Class.java:2075)
+06-02 16:38:24.588 D/StrictMode( 9156): at java.lang.Class.getMethod(Class.java:2063)
+06-02 16:38:24.588 D/StrictMode( 9156): at java.lang.Class.getMethod(Class.java:1690)
+06-02 16:38:24.588 D/StrictMode( 9156): at androidx.core.graphics.TypefaceCompatApi26Impl.obtainAddFontFromAssetManagerMethod(TypefaceCompatApi26Impl.java:326)
+06-02 16:38:24.588 D/StrictMode( 9156): at androidx.core.graphics.TypefaceCompatApi26Impl.<init>(TypefaceCompatApi26Impl.java:85)
+06-02 16:38:24.588 D/StrictMode( 9156): at androidx.core.graphics.TypefaceCompatApi28Impl.<init>(TypefaceCompatApi28Impl.java:36)
+06-02 16:38:24.588 D/StrictMode( 9156): at androidx.core.graphics.TypefaceCompat.<clinit>(TypefaceCompat.java:51)
+06-02 16:38:24.588 D/StrictMode( 9156): at androidx.core.graphics.TypefaceCompat.create(TypefaceCompat.java:194)
+06-02 16:38:24.588 D/StrictMode( 9156): at androidx.appcompat.widget.AppCompatTextView.setTypeface(AppCompatTextView.java:708)
+06-02 16:38:24.588 D/StrictMode( 9156): at android.widget.TextView.resolveStyleAndSetTypeface(TextView.java:2037)
+06-02 16:38:24.588 D/StrictMode( 9156): at android.widget.TextView.setTypefaceFromAttrs(TextView.java:2008)
+06-02 16:38:24.588 D/StrictMode( 9156): at android.widget.TextView.applyTextAppearance(TextView.java:3640)
+06-02 16:38:24.588 D/StrictMode( 9156): at android.widget.TextView.<init>(TextView.java:1498)
+06-02 16:38:24.588 D/StrictMode( 9156): at android.widget.TextView.<init>(TextView.java:869)
+06-02 16:38:24.588 D/StrictMode( 9156): at androidx.appcompat.widget.AppCompatTextView.<init>(AppCompatTextView.java:100)
+06-02 16:38:24.588 D/StrictMode( 9156): at androidx.appcompat.widget.AppCompatTextView.<init>(AppCompatTextView.java:95)
+06-02 16:38:24.588 D/StrictMode( 9156): at androidx.appcompat.app.AppCompatViewInflater.createTextView(AppCompatViewInflater.java:194)
+06-02 16:38:24.588 D/StrictMode( 9156): at androidx.appcompat.app.AppCompatViewInflater.createView(AppCompatViewInflater.java:115)
+06-02 16:38:24.588 D/StrictMode( 9156): at androidx.appcompat.app.AppCompatDelegateImpl.createView(AppCompatDelegateImpl.java:1548)
+06-02 16:38:24.588 D/StrictMode( 9156): at androidx.appcompat.app.AppCompatDelegateImpl.onCreateView(AppCompatDelegateImpl.java:1599)
+06-02 16:38:24.588 D/StrictMode( 9156): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:772)
+06-02 16:38:24.588 D/StrictMode( 9156): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:730)
+06-02 16:38:24.588 D/StrictMode( 9156): at android.view.LayoutInflater.rInflate(LayoutInflater.java:863)
+06-02 16:38:24.588 D/StrictMode( 9156): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:24.588 D/StrictMode( 9156): at android.view.LayoutInflater.rInflate(LayoutInflater.java:866)
+06-02 16:38:24.588 D/StrictMode( 9156): at android.view.LayoutInflater.inflate(LayoutInflater.java:489)
+06-02 16:38:24.588 D/StrictMode( 9156): at android.view.LayoutInflater.inflate(LayoutInflater.java:423)
+06-02 16:38:24.588 D/StrictMode( 9156): at android.view.LayoutInflater.inflate(LayoutInflater.java:374)
+06-02 16:38:24.588 D/StrictMode( 9156): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(TabCounter.kt:30)
+06-02 16:38:24.588 D/StrictMode( 9156): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(TabCounter.kt:22)
+06-02 16:38:24.588 D/StrictMode( 9156): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(Unknown Source:6)
+06-02 16:38:24.588 D/StrictMode( 9156): at java.lang.reflect.Constructor.newInstance0(Native Method)
+06-02 16:38:24.588 D/StrictMode( 9156): at java.lang.reflect.Constructor.newInstance(Constructor.java:343)
+06-02 16:38:24.588 D/StrictMode( 9156): at android.view.LayoutInflater.createView(LayoutInflater.java:647)
+06-02 16:38:24.588 D/StrictMode( 9156): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:790)
+06-02 16:38:24.588 D/StrictMode( 9156): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:730)
+06-02 16:38:24.588 D/StrictMode( 9156): at android.view.LayoutInflater.rInflate(LayoutInflater.java:863)
+06-02 16:38:24.588 D/StrictMode( 9156): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:24.588 D/StrictMode( 9156): at android.view.LayoutInflater.rInflate(LayoutInflater.java:866)
+06-02 16:38:24.588 D/StrictMode( 9156): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:24.588 D/StrictMode( 9156): at android.view.LayoutInflater.inflate(LayoutInflater.java:515)
+06-02 16:38:24.588 D/StrictMode( 9156): at android.view.LayoutInflater.inflate(LayoutInflater.java:423)
+06-02 16:38:24.588 D/StrictMode( 9156): at org.mozilla.fenix.home.HomeFragment.onCreateView(HomeFragment.kt:183)
+06-02 16:38:24.588 D/StrictMode( 9156): at androidx.fragment.app.Fragment.performCreateView(Fragment.java:2698)
+06-02 16:38:24.588 D/StrictMode( 9156): at androidx.fragment.app.FragmentStateManager.createView(FragmentStateManager.java:320)
+06-02 16:38:24.588 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1187)
+06-02 16:38:24.588 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.addAddedFragments(FragmentManager.java:2224)
+06-02 16:38:24.588 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.executeOpsTogether(FragmentManager.java:1997)
+06-02 16:38:24.588 D/StrictMode( 9156): at androidx.fragme
+06-02 16:38:24.588 W/lla.fenix.debu( 9156): Accessing hidden method Landroid/graphics/FontFamily;->addFontFromBuffer(Ljava/nio/ByteBuffer;I[Landroid/graphics/fonts/FontVariationAxis;II)Z (light greylist, reflection)
+06-02 16:38:24.590 D/StrictMode( 9156): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/graphics/FontFamily;->addFontFromBuffer(Ljava/nio/ByteBuffer;I[Landroid/graphics/fonts/FontVariationAxis;II)Z
+06-02 16:38:24.590 D/StrictMode( 9156): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:24.590 D/StrictMode( 9156): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:24.590 D/StrictMode( 9156): at java.lang.Class.getDeclaredMethodInternal(Native Method)
+06-02 16:38:24.590 D/StrictMode( 9156): at java.lang.Class.getPublicMethodRecursive(Class.java:2075)
+06-02 16:38:24.590 D/StrictMode( 9156): at java.lang.Class.getMethod(Class.java:2063)
+06-02 16:38:24.590 D/StrictMode( 9156): at java.lang.Class.getMethod(Class.java:1690)
+06-02 16:38:24.590 D/StrictMode( 9156): at androidx.core.graphics.TypefaceCompatApi26Impl.obtainAddFontFromBufferMethod(TypefaceCompatApi26Impl.java:333)
+06-02 16:38:24.590 D/StrictMode( 9156): at androidx.core.graphics.TypefaceCompatApi26Impl.<init>(TypefaceCompatApi26Impl.java:86)
+06-02 16:38:24.590 D/StrictMode( 9156): at androidx.core.graphics.TypefaceCompatApi28Impl.<init>(TypefaceCompatApi28Impl.java:36)
+06-02 16:38:24.590 D/StrictMode( 9156): at androidx.core.graphics.TypefaceCompat.<clinit>(TypefaceCompat.java:51)
+06-02 16:38:24.590 D/StrictMode( 9156): at androidx.core.graphics.TypefaceCompat.create(TypefaceCompat.java:194)
+06-02 16:38:24.590 D/StrictMode( 9156): at androidx.appcompat.widget.AppCompatTextView.setTypeface(AppCompatTextView.java:708)
+06-02 16:38:24.590 D/StrictMode( 9156): at android.widget.TextView.resolveStyleAndSetTypeface(TextView.java:2037)
+06-02 16:38:24.590 D/StrictMode( 9156): at android.widget.TextView.setTypefaceFromAttrs(TextView.java:2008)
+06-02 16:38:24.590 D/StrictMode( 9156): at android.widget.TextView.applyTextAppearance(TextView.java:3640)
+06-02 16:38:24.590 D/StrictMode( 9156): at android.widget.TextView.<init>(TextView.java:1498)
+06-02 16:38:24.590 D/StrictMode( 9156): at android.widget.TextView.<init>(TextView.java:869)
+06-02 16:38:24.590 D/StrictMode( 9156): at androidx.appcompat.widget.AppCompatTextView.<init>(AppCompatTextView.java:100)
+06-02 16:38:24.590 D/StrictMode( 9156): at androidx.appcompat.widget.AppCompatTextView.<init>(AppCompatTextView.java:95)
+06-02 16:38:24.590 D/StrictMode( 9156): at androidx.appcompat.app.AppCompatViewInflater.createTextView(AppCompatViewInflater.java:194)
+06-02 16:38:24.590 D/StrictMode( 9156): at androidx.appcompat.app.AppCompatViewInflater.createView(AppCompatViewInflater.java:115)
+06-02 16:38:24.590 D/StrictMode( 9156): at androidx.appcompat.app.AppCompatDelegateImpl.createView(AppCompatDelegateImpl.java:1548)
+06-02 16:38:24.590 D/StrictMode( 9156): at androidx.appcompat.app.AppCompatDelegateImpl.onCreateView(AppCompatDelegateImpl.java:1599)
+06-02 16:38:24.590 D/StrictMode( 9156): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:772)
+06-02 16:38:24.590 D/StrictMode( 9156): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:730)
+06-02 16:38:24.590 D/StrictMode( 9156): at android.view.LayoutInflater.rInflate(LayoutInflater.java:863)
+06-02 16:38:24.590 D/StrictMode( 9156): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:24.590 D/StrictMode( 9156): at android.view.LayoutInflater.rInflate(LayoutInflater.java:866)
+06-02 16:38:24.590 D/StrictMode( 9156): at android.view.LayoutInflater.inflate(LayoutInflater.java:489)
+06-02 16:38:24.590 D/StrictMode( 9156): at android.view.LayoutInflater.inflate(LayoutInflater.java:423)
+06-02 16:38:24.590 D/StrictMode( 9156): at android.view.LayoutInflater.inflate(LayoutInflater.java:374)
+06-02 16:38:24.590 D/StrictMode( 9156): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(TabCounter.kt:30)
+06-02 16:38:24.590 D/StrictMode( 9156): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(TabCounter.kt:22)
+06-02 16:38:24.590 D/StrictMode( 9156): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(Unknown Source:6)
+06-02 16:38:24.590 D/StrictMode( 9156): at java.lang.reflect.Constructor.newInstance0(Native Method)
+06-02 16:38:24.590 D/StrictMode( 9156): at java.lang.reflect.Constructor.newInstance(Constructor.java:343)
+06-02 16:38:24.590 D/StrictMode( 9156): at android.view.LayoutInflater.createView(LayoutInflater.java:647)
+06-02 16:38:24.590 D/StrictMode( 9156): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:790)
+06-02 16:38:24.590 D/StrictMode( 9156): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:730)
+06-02 16:38:24.590 D/StrictMode( 9156): at android.view.LayoutInflater.rInflate(LayoutInflater.java:863)
+06-02 16:38:24.590 D/StrictMode( 9156): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:24.590 D/StrictMode( 9156): at android.view.LayoutInflater.rInflate(LayoutInflater.java:866)
+06-02 16:38:24.590 D/StrictMode( 9156): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:24.590 D/StrictMode( 9156): at android.view.LayoutInflater.inflate(LayoutInflater.java:515)
+06-02 16:38:24.590 D/StrictMode( 9156): at android.view.LayoutInflater.inflate(LayoutInflater.java:423)
+06-02 16:38:24.590 D/StrictMode( 9156): at org.mozilla.fenix.home.HomeFragment.onCreateView(HomeFragment.kt:183)
+06-02 16:38:24.590 D/StrictMode( 9156): at androidx.fragment.app.Fragment.performCreateView(Fragment.java:2698)
+06-02 16:38:24.590 D/StrictMode( 9156): at androidx.fragment.app.FragmentStateManager.createView(FragmentStateManager.java:320)
+06-02 16:38:24.590 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1187)
+06-02 16:38:24.590 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.addAddedFragments(FragmentManager.java:2224)
+06-02 16:38:24.590 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.executeOpsTogether(FragmentManager.java:1997)
+06-02 16:38:24.590 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.removeRedundantOperati
+06-02 16:38:24.591 W/lla.fenix.debu( 9156): Accessing hidden method Landroid/graphics/FontFamily;->freeze()Z (light greylist, reflection)
+06-02 16:38:24.591 D/StrictMode( 9156): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/graphics/FontFamily;->freeze()Z
+06-02 16:38:24.591 D/StrictMode( 9156): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:24.591 D/StrictMode( 9156): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:24.591 D/StrictMode( 9156): at java.lang.Class.getDeclaredMethodInternal(Native Method)
+06-02 16:38:24.591 D/StrictMode( 9156): at java.lang.Class.getPublicMethodRecursive(Class.java:2075)
+06-02 16:38:24.591 D/StrictMode( 9156): at java.lang.Class.getMethod(Class.java:2063)
+06-02 16:38:24.591 D/StrictMode( 9156): at java.lang.Class.getMethod(Class.java:1690)
+06-02 16:38:24.591 D/StrictMode( 9156): at androidx.core.graphics.TypefaceCompatApi26Impl.obtainFreezeMethod(TypefaceCompatApi26Impl.java:339)
+06-02 16:38:24.591 D/StrictMode( 9156): at androidx.core.graphics.TypefaceCompatApi26Impl.<init>(TypefaceCompatApi26Impl.java:87)
+06-02 16:38:24.591 D/StrictMode( 9156): at androidx.core.graphics.TypefaceCompatApi28Impl.<init>(TypefaceCompatApi28Impl.java:36)
+06-02 16:38:24.591 D/StrictMode( 9156): at androidx.core.graphics.TypefaceCompat.<clinit>(TypefaceCompat.java:51)
+06-02 16:38:24.591 D/StrictMode( 9156): at androidx.core.graphics.TypefaceCompat.create(TypefaceCompat.java:194)
+06-02 16:38:24.591 D/StrictMode( 9156): at androidx.appcompat.widget.AppCompatTextView.setTypeface(AppCompatTextView.java:708)
+06-02 16:38:24.591 D/StrictMode( 9156): at android.widget.TextView.resolveStyleAndSetTypeface(TextView.java:2037)
+06-02 16:38:24.591 D/StrictMode( 9156): at android.widget.TextView.setTypefaceFromAttrs(TextView.java:2008)
+06-02 16:38:24.591 D/StrictMode( 9156): at android.widget.TextView.applyTextAppearance(TextView.java:3640)
+06-02 16:38:24.591 D/StrictMode( 9156): at android.widget.TextView.<init>(TextView.java:1498)
+06-02 16:38:24.591 D/StrictMode( 9156): at android.widget.TextView.<init>(TextView.java:869)
+06-02 16:38:24.591 D/StrictMode( 9156): at androidx.appcompat.widget.AppCompatTextView.<init>(AppCompatTextView.java:100)
+06-02 16:38:24.591 D/StrictMode( 9156): at androidx.appcompat.widget.AppCompatTextView.<init>(AppCompatTextView.java:95)
+06-02 16:38:24.591 D/StrictMode( 9156): at androidx.appcompat.app.AppCompatViewInflater.createTextView(AppCompatViewInflater.java:194)
+06-02 16:38:24.591 D/StrictMode( 9156): at androidx.appcompat.app.AppCompatViewInflater.createView(AppCompatViewInflater.java:115)
+06-02 16:38:24.591 D/StrictMode( 9156): at androidx.appcompat.app.AppCompatDelegateImpl.createView(AppCompatDelegateImpl.java:1548)
+06-02 16:38:24.591 D/StrictMode( 9156): at androidx.appcompat.app.AppCompatDelegateImpl.onCreateView(AppCompatDelegateImpl.java:1599)
+06-02 16:38:24.591 D/StrictMode( 9156): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:772)
+06-02 16:38:24.591 D/StrictMode( 9156): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:730)
+06-02 16:38:24.591 D/StrictMode( 9156): at android.view.LayoutInflater.rInflate(LayoutInflater.java:863)
+06-02 16:38:24.591 D/StrictMode( 9156): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:24.591 D/StrictMode( 9156): at android.view.LayoutInflater.rInflate(LayoutInflater.java:866)
+06-02 16:38:24.591 D/StrictMode( 9156): at android.view.LayoutInflater.inflate(LayoutInflater.java:489)
+06-02 16:38:24.591 D/StrictMode( 9156): at android.view.LayoutInflater.inflate(LayoutInflater.java:423)
+06-02 16:38:24.591 D/StrictMode( 9156): at android.view.LayoutInflater.inflate(LayoutInflater.java:374)
+06-02 16:38:24.591 D/StrictMode( 9156): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(TabCounter.kt:30)
+06-02 16:38:24.591 D/StrictMode( 9156): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(TabCounter.kt:22)
+06-02 16:38:24.591 D/StrictMode( 9156): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(Unknown Source:6)
+06-02 16:38:24.591 D/StrictMode( 9156): at java.lang.reflect.Constructor.newInstance0(Native Method)
+06-02 16:38:24.591 D/StrictMode( 9156): at java.lang.reflect.Constructor.newInstance(Constructor.java:343)
+06-02 16:38:24.591 D/StrictMode( 9156): at android.view.LayoutInflater.createView(LayoutInflater.java:647)
+06-02 16:38:24.591 D/StrictMode( 9156): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:790)
+06-02 16:38:24.591 D/StrictMode( 9156): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:730)
+06-02 16:38:24.591 D/StrictMode( 9156): at android.view.LayoutInflater.rInflate(LayoutInflater.java:863)
+06-02 16:38:24.591 D/StrictMode( 9156): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:24.591 D/StrictMode( 9156): at android.view.LayoutInflater.rInflate(LayoutInflater.java:866)
+06-02 16:38:24.591 D/StrictMode( 9156): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:24.591 D/StrictMode( 9156): at android.view.LayoutInflater.inflate(LayoutInflater.java:515)
+06-02 16:38:24.591 D/StrictMode( 9156): at android.view.LayoutInflater.inflate(LayoutInflater.java:423)
+06-02 16:38:24.591 D/StrictMode( 9156): at org.mozilla.fenix.home.HomeFragment.onCreateView(HomeFragment.kt:183)
+06-02 16:38:24.591 D/StrictMode( 9156): at androidx.fragment.app.Fragment.performCreateView(Fragment.java:2698)
+06-02 16:38:24.591 D/StrictMode( 9156): at androidx.fragment.app.FragmentStateManager.createView(FragmentStateManager.java:320)
+06-02 16:38:24.591 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1187)
+06-02 16:38:24.591 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.addAddedFragments(FragmentManager.java:2224)
+06-02 16:38:24.591 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.executeOpsTogether(FragmentManager.java:1997)
+06-02 16:38:24.591 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.removeRedundantOperationsAndExecute(FragmentManager.java:1953)
+06-02 16:38:24.591 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.execPe
+06-02 16:38:24.591 W/lla.fenix.debu( 9156): Accessing hidden method Landroid/graphics/FontFamily;->abortCreation()V (light greylist, reflection)
+06-02 16:38:24.592 D/StrictMode( 9156): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/graphics/FontFamily;->abortCreation()V
+06-02 16:38:24.592 D/StrictMode( 9156): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:24.592 D/StrictMode( 9156): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:24.592 D/StrictMode( 9156): at java.lang.Class.getDeclaredMethodInternal(Native Method)
+06-02 16:38:24.592 D/StrictMode( 9156): at java.lang.Class.getPublicMethodRecursive(Class.java:2075)
+06-02 16:38:24.592 D/StrictMode( 9156): at java.lang.Class.getMethod(Class.java:2063)
+06-02 16:38:24.592 D/StrictMode( 9156): at java.lang.Class.getMethod(Class.java:1690)
+06-02 16:38:24.592 D/StrictMode( 9156): at androidx.core.graphics.TypefaceCompatApi26Impl.obtainAbortCreationMethod(TypefaceCompatApi26Impl.java:343)
+06-02 16:38:24.592 D/StrictMode( 9156): at androidx.core.graphics.TypefaceCompatApi26Impl.<init>(TypefaceCompatApi26Impl.java:88)
+06-02 16:38:24.592 D/StrictMode( 9156): at androidx.core.graphics.TypefaceCompatApi28Impl.<init>(TypefaceCompatApi28Impl.java:36)
+06-02 16:38:24.592 D/StrictMode( 9156): at androidx.core.graphics.TypefaceCompat.<clinit>(TypefaceCompat.java:51)
+06-02 16:38:24.592 D/StrictMode( 9156): at androidx.core.graphics.TypefaceCompat.create(TypefaceCompat.java:194)
+06-02 16:38:24.592 D/StrictMode( 9156): at androidx.appcompat.widget.AppCompatTextView.setTypeface(AppCompatTextView.java:708)
+06-02 16:38:24.592 D/StrictMode( 9156): at android.widget.TextView.resolveStyleAndSetTypeface(TextView.java:2037)
+06-02 16:38:24.592 D/StrictMode( 9156): at android.widget.TextView.setTypefaceFromAttrs(TextView.java:2008)
+06-02 16:38:24.592 D/StrictMode( 9156): at android.widget.TextView.applyTextAppearance(TextView.java:3640)
+06-02 16:38:24.592 D/StrictMode( 9156): at android.widget.TextView.<init>(TextView.java:1498)
+06-02 16:38:24.592 D/StrictMode( 9156): at android.widget.TextView.<init>(TextView.java:869)
+06-02 16:38:24.592 D/StrictMode( 9156): at androidx.appcompat.widget.AppCompatTextView.<init>(AppCompatTextView.java:100)
+06-02 16:38:24.592 D/StrictMode( 9156): at androidx.appcompat.widget.AppCompatTextView.<init>(AppCompatTextView.java:95)
+06-02 16:38:24.592 D/StrictMode( 9156): at androidx.appcompat.app.AppCompatViewInflater.createTextView(AppCompatViewInflater.java:194)
+06-02 16:38:24.592 D/StrictMode( 9156): at androidx.appcompat.app.AppCompatViewInflater.createView(AppCompatViewInflater.java:115)
+06-02 16:38:24.592 D/StrictMode( 9156): at androidx.appcompat.app.AppCompatDelegateImpl.createView(AppCompatDelegateImpl.java:1548)
+06-02 16:38:24.592 D/StrictMode( 9156): at androidx.appcompat.app.AppCompatDelegateImpl.onCreateView(AppCompatDelegateImpl.java:1599)
+06-02 16:38:24.592 D/StrictMode( 9156): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:772)
+06-02 16:38:24.592 D/StrictMode( 9156): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:730)
+06-02 16:38:24.592 D/StrictMode( 9156): at android.view.LayoutInflater.rInflate(LayoutInflater.java:863)
+06-02 16:38:24.592 D/StrictMode( 9156): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:24.592 D/StrictMode( 9156): at android.view.LayoutInflater.rInflate(LayoutInflater.java:866)
+06-02 16:38:24.592 D/StrictMode( 9156): at android.view.LayoutInflater.inflate(LayoutInflater.java:489)
+06-02 16:38:24.592 D/StrictMode( 9156): at android.view.LayoutInflater.inflate(LayoutInflater.java:423)
+06-02 16:38:24.592 D/StrictMode( 9156): at android.view.LayoutInflater.inflate(LayoutInflater.java:374)
+06-02 16:38:24.592 D/StrictMode( 9156): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(TabCounter.kt:30)
+06-02 16:38:24.592 D/StrictMode( 9156): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(TabCounter.kt:22)
+06-02 16:38:24.592 D/StrictMode( 9156): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(Unknown Source:6)
+06-02 16:38:24.592 D/StrictMode( 9156): at java.lang.reflect.Constructor.newInstance0(Native Method)
+06-02 16:38:24.592 D/StrictMode( 9156): at java.lang.reflect.Constructor.newInstance(Constructor.java:343)
+06-02 16:38:24.592 D/StrictMode( 9156): at android.view.LayoutInflater.createView(LayoutInflater.java:647)
+06-02 16:38:24.592 D/StrictMode( 9156): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:790)
+06-02 16:38:24.592 D/StrictMode( 9156): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:730)
+06-02 16:38:24.592 D/StrictMode( 9156): at android.view.LayoutInflater.rInflate(LayoutInflater.java:863)
+06-02 16:38:24.592 D/StrictMode( 9156): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:24.592 D/StrictMode( 9156): at android.view.LayoutInflater.rInflate(LayoutInflater.java:866)
+06-02 16:38:24.592 D/StrictMode( 9156): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:24.592 D/StrictMode( 9156): at android.view.LayoutInflater.inflate(LayoutInflater.java:515)
+06-02 16:38:24.592 D/StrictMode( 9156): at android.view.LayoutInflater.inflate(LayoutInflater.java:423)
+06-02 16:38:24.592 D/StrictMode( 9156): at org.mozilla.fenix.home.HomeFragment.onCreateView(HomeFragment.kt:183)
+06-02 16:38:24.592 D/StrictMode( 9156): at androidx.fragment.app.Fragment.performCreateView(Fragment.java:2698)
+06-02 16:38:24.592 D/StrictMode( 9156): at androidx.fragment.app.FragmentStateManager.createView(FragmentStateManager.java:320)
+06-02 16:38:24.592 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1187)
+06-02 16:38:24.592 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.addAddedFragments(FragmentManager.java:2224)
+06-02 16:38:24.592 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.executeOpsTogether(FragmentManager.java:1997)
+06-02 16:38:24.592 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.removeRedundantOperationsAndExecute(FragmentManager.java:1953)
+06-02 16:38:24.592 D/StrictMode( 9156): at androidx.fragment.app.Fragment
+06-02 16:38:24.592 W/lla.fenix.debu( 9156): Accessing hidden method Landroid/graphics/Typeface;->createFromFamiliesWithDefault([Landroid/graphics/FontFamily;Ljava/lang/String;II)Landroid/graphics/Typeface; (light greylist, reflection)
+06-02 16:38:24.594 D/StrictMode( 9156): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/graphics/Typeface;->createFromFamiliesWithDefault([Landroid/graphics/FontFamily;Ljava/lang/String;II)Landroid/graphics/Typeface;
+06-02 16:38:24.594 D/StrictMode( 9156): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:24.594 D/StrictMode( 9156): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:24.594 D/StrictMode( 9156): at java.lang.Class.getDeclaredMethodInternal(Native Method)
+06-02 16:38:24.594 D/StrictMode( 9156): at java.lang.Class.getMethod(Class.java:2064)
+06-02 16:38:24.594 D/StrictMode( 9156): at java.lang.Class.getDeclaredMethod(Class.java:2047)
+06-02 16:38:24.594 D/StrictMode( 9156): at androidx.core.graphics.TypefaceCompatApi28Impl.obtainCreateFromFamiliesWithDefaultMethod(TypefaceCompatApi28Impl.java:62)
+06-02 16:38:24.594 D/StrictMode( 9156): at androidx.core.graphics.TypefaceCompatApi26Impl.<init>(TypefaceCompatApi26Impl.java:89)
+06-02 16:38:24.594 D/StrictMode( 9156): at androidx.core.graphics.TypefaceCompatApi28Impl.<init>(TypefaceCompatApi28Impl.java:36)
+06-02 16:38:24.594 D/StrictMode( 9156): at androidx.core.graphics.TypefaceCompat.<clinit>(TypefaceCompat.java:51)
+06-02 16:38:24.594 D/StrictMode( 9156): at androidx.core.graphics.TypefaceCompat.create(TypefaceCompat.java:194)
+06-02 16:38:24.594 D/StrictMode( 9156): at androidx.appcompat.widget.AppCompatTextView.setTypeface(AppCompatTextView.java:708)
+06-02 16:38:24.594 D/StrictMode( 9156): at android.widget.TextView.resolveStyleAndSetTypeface(TextView.java:2037)
+06-02 16:38:24.594 D/StrictMode( 9156): at android.widget.TextView.setTypefaceFromAttrs(TextView.java:2008)
+06-02 16:38:24.594 D/StrictMode( 9156): at android.widget.TextView.applyTextAppearance(TextView.java:3640)
+06-02 16:38:24.594 D/StrictMode( 9156): at android.widget.TextView.<init>(TextView.java:1498)
+06-02 16:38:24.594 D/StrictMode( 9156): at android.widget.TextView.<init>(TextView.java:869)
+06-02 16:38:24.594 D/StrictMode( 9156): at androidx.appcompat.widget.AppCompatTextView.<init>(AppCompatTextView.java:100)
+06-02 16:38:24.594 D/StrictMode( 9156): at androidx.appcompat.widget.AppCompatTextView.<init>(AppCompatTextView.java:95)
+06-02 16:38:24.594 D/StrictMode( 9156): at androidx.appcompat.app.AppCompatViewInflater.createTextView(AppCompatViewInflater.java:194)
+06-02 16:38:24.594 D/StrictMode( 9156): at androidx.appcompat.app.AppCompatViewInflater.createView(AppCompatViewInflater.java:115)
+06-02 16:38:24.594 D/StrictMode( 9156): at androidx.appcompat.app.AppCompatDelegateImpl.createView(AppCompatDelegateImpl.java:1548)
+06-02 16:38:24.594 D/StrictMode( 9156): at androidx.appcompat.app.AppCompatDelegateImpl.onCreateView(AppCompatDelegateImpl.java:1599)
+06-02 16:38:24.594 D/StrictMode( 9156): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:772)
+06-02 16:38:24.594 D/StrictMode( 9156): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:730)
+06-02 16:38:24.594 D/StrictMode( 9156): at android.view.LayoutInflater.rInflate(LayoutInflater.java:863)
+06-02 16:38:24.594 D/StrictMode( 9156): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:24.594 D/StrictMode( 9156): at android.view.LayoutInflater.rInflate(LayoutInflater.java:866)
+06-02 16:38:24.594 D/StrictMode( 9156): at android.view.LayoutInflater.inflate(LayoutInflater.java:489)
+06-02 16:38:24.594 D/StrictMode( 9156): at android.view.LayoutInflater.inflate(LayoutInflater.java:423)
+06-02 16:38:24.594 D/StrictMode( 9156): at android.view.LayoutInflater.inflate(LayoutInflater.java:374)
+06-02 16:38:24.594 D/StrictMode( 9156): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(TabCounter.kt:30)
+06-02 16:38:24.594 D/StrictMode( 9156): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(TabCounter.kt:22)
+06-02 16:38:24.594 D/StrictMode( 9156): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(Unknown Source:6)
+06-02 16:38:24.594 D/StrictMode( 9156): at java.lang.reflect.Constructor.newInstance0(Native Method)
+06-02 16:38:24.594 D/StrictMode( 9156): at java.lang.reflect.Constructor.newInstance(Constructor.java:343)
+06-02 16:38:24.594 D/StrictMode( 9156): at android.view.LayoutInflater.createView(LayoutInflater.java:647)
+06-02 16:38:24.594 D/StrictMode( 9156): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:790)
+06-02 16:38:24.594 D/StrictMode( 9156): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:730)
+06-02 16:38:24.594 D/StrictMode( 9156): at android.view.LayoutInflater.rInflate(LayoutInflater.java:863)
+06-02 16:38:24.594 D/StrictMode( 9156): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:24.594 D/StrictMode( 9156): at android.view.LayoutInflater.rInflate(LayoutInflater.java:866)
+06-02 16:38:24.594 D/StrictMode( 9156): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:24.594 D/StrictMode( 9156): at android.view.LayoutInflater.inflate(LayoutInflater.java:515)
+06-02 16:38:24.594 D/StrictMode( 9156): at android.view.LayoutInflater.inflate(LayoutInflater.java:423)
+06-02 16:38:24.594 D/StrictMode( 9156): at org.mozilla.fenix.home.HomeFragment.onCreateView(HomeFragment.kt:183)
+06-02 16:38:24.594 D/StrictMode( 9156): at androidx.fragment.app.Fragment.performCreateView(Fragment.java:2698)
+06-02 16:38:24.594 D/StrictMode( 9156): at androidx.fragment.app.FragmentStateManager.createView(FragmentStateManager.java:320)
+06-02 16:38:24.594 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1187)
+06-02 16:38:24.594 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.addAddedFragments(FragmentManager.java:2224)
+06-02 16:38:24.594 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.executeOpsTogether(FragmentManager.java:1997)
+06-02 16:38:24.594 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.removeRedundantOperationsAndExecute(FragmentMa
+06-02 16:38:24.706 D/GeckoRuntime( 9156): Lifecycle: onStart
+06-02 16:38:24.710 D/GeckoRuntime( 9156): Lifecycle: onResume
+06-02 16:38:24.713 D/GeckoNetworkManager( 9156): Incoming event start for state OffNoListeners -> OnNoListeners
+06-02 16:38:24.714 D/GeckoNetworkManager( 9156): New network state: UP, WIFI, WIFI
+06-02 16:38:24.716 D/OpenGLRenderer( 9156): Skia GL Pipeline
+06-02 16:38:24.733 E/SurfaceFlinger( 1728): ro.sf.lcd_density must be defined as a build property
+06-02 16:38:24.756 D/StrictMode( 9156): StrictMode policy violation; ~duration=372 ms: android.os.strictmode.DiskReadViolation
+06-02 16:38:24.756 D/StrictMode( 9156): at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+06-02 16:38:24.756 D/StrictMode( 9156): at java.io.UnixFileSystem.checkAccess(UnixFileSystem.java:251)
+06-02 16:38:24.756 D/StrictMode( 9156): at java.io.File.exists(File.java:815)
+06-02 16:38:24.756 D/StrictMode( 9156): at android.app.ContextImpl.getDataDir(ContextImpl.java:2253)
+06-02 16:38:24.756 D/StrictMode( 9156): at android.app.ContextImpl.getPreferencesDir(ContextImpl.java:550)
+06-02 16:38:24.756 D/StrictMode( 9156): at android.app.ContextImpl.getSharedPreferencesPath(ContextImpl.java:747)
+06-02 16:38:24.756 D/StrictMode( 9156): at android.app.ContextImpl.getSharedPreferences(ContextImpl.java:400)
+06-02 16:38:24.756 D/StrictMode( 9156): at mozilla.components.support.locale.LocaleManager$Storage.getSharedPreferences(LocaleManager.kt:123)
+06-02 16:38:24.756 D/StrictMode( 9156): at mozilla.components.support.locale.LocaleManager$Storage.getLocale(LocaleManager.kt:99)
+06-02 16:38:24.756 D/StrictMode( 9156): at mozilla.components.support.locale.LocaleManager.getCurrentLocale(LocaleManager.kt:42)
+06-02 16:38:24.756 D/StrictMode( 9156): at mozilla.components.support.locale.LocaleManager.updateResources$support_locale_release(LocaleManager.kt:72)
+06-02 16:38:24.756 D/StrictMode( 9156): at mozilla.components.support.locale.LocaleAwareAppCompatActivity.attachBaseContext(LocaleAwareAppCompatActivity.kt:18)
+06-02 16:38:24.756 D/StrictMode( 9156): at android.app.Activity.attach(Activity.java:7051)
+06-02 16:38:24.756 D/StrictMode( 9156): at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2873)
+06-02 16:38:24.756 D/StrictMode( 9156): at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:3048)
+06-02 16:38:24.756 D/StrictMode( 9156): at android.app.servertransaction.LaunchActivityItem.execute(LaunchActivityItem.java:78)
+06-02 16:38:24.756 D/StrictMode( 9156): at android.app.servertransaction.TransactionExecutor.executeCallbacks(TransactionExecutor.java:108)
+06-02 16:38:24.756 D/StrictMode( 9156): at android.app.servertransaction.TransactionExecutor.execute(TransactionExecutor.java:68)
+06-02 16:38:24.756 D/StrictMode( 9156): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1808)
+06-02 16:38:24.756 D/StrictMode( 9156): at android.os.Handler.dispatchMessage(Handler.java:106)
+06-02 16:38:24.756 D/StrictMode( 9156): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:24.756 D/StrictMode( 9156): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:24.756 D/StrictMode( 9156): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:24.756 D/StrictMode( 9156): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:24.756 D/StrictMode( 9156): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:24.762 D/StrictMode( 9156): StrictMode policy violation; ~duration=372 ms: android.os.strictmode.DiskReadViolation
+06-02 16:38:24.762 D/StrictMode( 9156): at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+06-02 16:38:24.762 D/StrictMode( 9156): at java.io.UnixFileSystem.checkAccess(UnixFileSystem.java:251)
+06-02 16:38:24.762 D/StrictMode( 9156): at java.io.File.exists(File.java:815)
+06-02 16:38:24.762 D/StrictMode( 9156): at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:605)
+06-02 16:38:24.762 D/StrictMode( 9156): at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:596)
+06-02 16:38:24.762 D/StrictMode( 9156): at android.app.ContextImpl.getPreferencesDir(ContextImpl.java:552)
+06-02 16:38:24.762 D/StrictMode( 9156): at android.app.ContextImpl.getSharedPreferencesPath(ContextImpl.java:747)
+06-02 16:38:24.762 D/StrictMode( 9156): at android.app.ContextImpl.getSharedPreferences(ContextImpl.java:400)
+06-02 16:38:24.762 D/StrictMode( 9156): at mozilla.components.support.locale.LocaleManager$Storage.getSharedPreferences(LocaleManager.kt:123)
+06-02 16:38:24.762 D/StrictMode( 9156): at mozilla.components.support.locale.LocaleManager$Storage.getLocale(LocaleManager.kt:99)
+06-02 16:38:24.762 D/StrictMode( 9156): at mozilla.components.support.locale.LocaleManager.getCurrentLocale(LocaleManager.kt:42)
+06-02 16:38:24.762 D/StrictMode( 9156): at mozilla.components.support.locale.LocaleManager.updateResources$support_locale_release(LocaleManager.kt:72)
+06-02 16:38:24.762 D/StrictMode( 9156): at mozilla.components.support.locale.LocaleAwareAppCompatActivity.attachBaseContext(LocaleAwareAppCompatActivity.kt:18)
+06-02 16:38:24.762 D/StrictMode( 9156): at android.app.Activity.attach(Activity.java:7051)
+06-02 16:38:24.762 D/StrictMode( 9156): at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2873)
+06-02 16:38:24.762 D/StrictMode( 9156): at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:3048)
+06-02 16:38:24.762 D/StrictMode( 9156): at android.app.servertransaction.LaunchActivityItem.execute(LaunchActivityItem.java:78)
+06-02 16:38:24.762 D/StrictMode( 9156): at android.app.servertransaction.TransactionExecutor.executeCallbacks(TransactionExecutor.java:108)
+06-02 16:38:24.762 D/StrictMode( 9156): at android.app.servertransaction.TransactionExecutor.execute(TransactionExecutor.java:68)
+06-02 16:38:24.762 D/StrictMode( 9156): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1808)
+06-02 16:38:24.762 D/StrictMode( 9156): at android.os.Handler.dispatchMessage(Handler.java:106)
+06-02 16:38:24.762 D/StrictMode( 9156): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:24.762 D/StrictMode( 9156): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:24.762 D/StrictMode( 9156): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:24.762 D/StrictMode( 9156): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:24.762 D/StrictMode( 9156): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:24.765 D/StrictMode( 9156): StrictMode policy violation; ~duration=259 ms: android.os.strictmode.DiskReadViolation
+06-02 16:38:24.765 D/StrictMode( 9156): at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+06-02 16:38:24.765 D/StrictMode( 9156): at java.io.UnixFileSystem.checkAccess(UnixFileSystem.java:251)
+06-02 16:38:24.765 D/StrictMode( 9156): at java.io.File.exists(File.java:815)
+06-02 16:38:24.765 D/StrictMode( 9156): at android.app.ContextImpl.getDataDir(ContextImpl.java:2253)
+06-02 16:38:24.765 D/StrictMode( 9156): at android.app.ContextImpl.getPreferencesDir(ContextImpl.java:550)
+06-02 16:38:24.765 D/StrictMode( 9156): at android.app.ContextImpl.getSharedPreferencesPath(ContextImpl.java:747)
+06-02 16:38:24.765 D/StrictMode( 9156): at android.app.ContextImpl.getSharedPreferences(ContextImpl.java:400)
+06-02 16:38:24.765 D/StrictMode( 9156): at android.content.ContextWrapper.getSharedPreferences(ContextWrapper.java:174)
+06-02 16:38:24.765 D/StrictMode( 9156): at android.content.ContextWrapper.getSharedPreferences(ContextWrapper.java:174)
+06-02 16:38:24.765 D/StrictMode( 9156): at org.mozilla.fenix.onboarding.FenixOnboarding.<init>(FenixOnboarding.kt:15)
+06-02 16:38:24.765 D/StrictMode( 9156): at org.mozilla.fenix.perf.Performance.disableOnboarding(Performance.kt:72)
+06-02 16:38:24.765 D/StrictMode( 9156): at org.mozilla.fenix.perf.Performance.processIntentIfPerformanceTest(Performance.kt:32)
+06-02 16:38:24.765 D/StrictMode( 9156): at org.mozilla.fenix.HomeActivity.onCreate(HomeActivity.kt:145)
+06-02 16:38:24.765 D/StrictMode( 9156): at android.app.Activity.performCreate(Activity.java:7136)
+06-02 16:38:24.765 D/StrictMode( 9156): at android.app.Activity.performCreate(Activity.java:7127)
+06-02 16:38:24.765 D/StrictMode( 9156): at android.app.Instrumentation.callActivityOnCreate(Instrumentation.java:1271)
+06-02 16:38:24.765 D/StrictMode( 9156): at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2893)
+06-02 16:38:24.765 D/StrictMode( 9156): at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:3048)
+06-02 16:38:24.765 D/StrictMode( 9156): at android.app.servertransaction.LaunchActivityItem.execute(LaunchActivityItem.java:78)
+06-02 16:38:24.765 D/StrictMode( 9156): at android.app.servertransaction.TransactionExecutor.executeCallbacks(TransactionExecutor.java:108)
+06-02 16:38:24.765 D/StrictMode( 9156): at android.app.servertransaction.TransactionExecutor.execute(TransactionExecutor.java:68)
+06-02 16:38:24.765 D/StrictMode( 9156): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1808)
+06-02 16:38:24.765 D/StrictMode( 9156): at android.os.Handler.dispatchMessage(Handler.java:106)
+06-02 16:38:24.765 D/StrictMode( 9156): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:24.765 D/StrictMode( 9156): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:24.765 D/StrictMode( 9156): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:24.765 D/StrictMode( 9156): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:24.765 D/StrictMode( 9156): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:24.772 D/StrictMode( 9156): StrictMode policy violation; ~duration=259 ms: android.os.strictmode.DiskReadViolation
+06-02 16:38:24.772 D/StrictMode( 9156): at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+06-02 16:38:24.772 D/StrictMode( 9156): at java.io.UnixFileSystem.checkAccess(UnixFileSystem.java:251)
+06-02 16:38:24.772 D/StrictMode( 9156): at java.io.File.exists(File.java:815)
+06-02 16:38:24.772 D/StrictMode( 9156): at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:605)
+06-02 16:38:24.772 D/StrictMode( 9156): at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:596)
+06-02 16:38:24.772 D/StrictMode( 9156): at android.app.ContextImpl.getPreferencesDir(ContextImpl.java:552)
+06-02 16:38:24.772 D/StrictMode( 9156): at android.app.ContextImpl.getSharedPreferencesPath(ContextImpl.java:747)
+06-02 16:38:24.772 D/StrictMode( 9156): at android.app.ContextImpl.getSharedPreferences(ContextImpl.java:400)
+06-02 16:38:24.772 D/StrictMode( 9156): at android.content.ContextWrapper.getSharedPreferences(ContextWrapper.java:174)
+06-02 16:38:24.772 D/StrictMode( 9156): at android.content.ContextWrapper.getSharedPreferences(ContextWrapper.java:174)
+06-02 16:38:24.772 D/StrictMode( 9156): at org.mozilla.fenix.onboarding.FenixOnboarding.<init>(FenixOnboarding.kt:15)
+06-02 16:38:24.772 D/StrictMode( 9156): at org.mozilla.fenix.perf.Performance.disableOnboarding(Performance.kt:72)
+06-02 16:38:24.772 D/StrictMode( 9156): at org.mozilla.fenix.perf.Performance.processIntentIfPerformanceTest(Performance.kt:32)
+06-02 16:38:24.772 D/StrictMode( 9156): at org.mozilla.fenix.HomeActivity.onCreate(HomeActivity.kt:145)
+06-02 16:38:24.772 D/StrictMode( 9156): at android.app.Activity.performCreate(Activity.java:7136)
+06-02 16:38:24.772 D/StrictMode( 9156): at android.app.Activity.performCreate(Activity.java:7127)
+06-02 16:38:24.772 D/StrictMode( 9156): at android.app.Instrumentation.callActivityOnCreate(Instrumentation.java:1271)
+06-02 16:38:24.772 D/StrictMode( 9156): at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2893)
+06-02 16:38:24.772 D/StrictMode( 9156): at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:3048)
+06-02 16:38:24.772 D/StrictMode( 9156): at android.app.servertransaction.LaunchActivityItem.execute(LaunchActivityItem.java:78)
+06-02 16:38:24.772 D/StrictMode( 9156): at android.app.servertransaction.TransactionExecutor.executeCallbacks(TransactionExecutor.java:108)
+06-02 16:38:24.772 D/StrictMode( 9156): at android.app.servertransaction.TransactionExecutor.execute(TransactionExecutor.java:68)
+06-02 16:38:24.772 D/StrictMode( 9156): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1808)
+06-02 16:38:24.772 D/StrictMode( 9156): at android.os.Handler.dispatchMessage(Handler.java:106)
+06-02 16:38:24.772 D/StrictMode( 9156): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:24.772 D/StrictMode( 9156): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:24.772 D/StrictMode( 9156): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:24.772 D/StrictMode( 9156): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:24.772 D/StrictMode( 9156): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:24.774 D/StrictMode( 9156): StrictMode policy violation; ~duration=257 ms: android.os.strictmode.DiskReadViolation
+06-02 16:38:24.774 D/StrictMode( 9156): at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+06-02 16:38:24.774 D/StrictMode( 9156): at android.app.SharedPreferencesImpl.awaitLoadedLocked(SharedPreferencesImpl.java:256)
+06-02 16:38:24.774 D/StrictMode( 9156): at android.app.SharedPreferencesImpl.edit(SharedPreferencesImpl.java:349)
+06-02 16:38:24.774 D/StrictMode( 9156): at org.mozilla.fenix.onboarding.FenixOnboarding.setOnboardedVersion(FenixOnboarding.kt:42)
+06-02 16:38:24.774 D/StrictMode( 9156): at org.mozilla.fenix.onboarding.FenixOnboarding.finish(FenixOnboarding.kt:25)
+06-02 16:38:24.774 D/StrictMode( 9156): at org.mozilla.fenix.perf.Performance.disableOnboarding(Performance.kt:72)
+06-02 16:38:24.774 D/StrictMode( 9156): at org.mozilla.fenix.perf.Performance.processIntentIfPerformanceTest(Performance.kt:32)
+06-02 16:38:24.774 D/StrictMode( 9156): at org.mozilla.fenix.HomeActivity.onCreate(HomeActivity.kt:145)
+06-02 16:38:24.774 D/StrictMode( 9156): at android.app.Activity.performCreate(Activity.java:7136)
+06-02 16:38:24.774 D/StrictMode( 9156): at android.app.Activity.performCreate(Activity.java:7127)
+06-02 16:38:24.774 D/StrictMode( 9156): at android.app.Instrumentation.callActivityOnCreate(Instrumentation.java:1271)
+06-02 16:38:24.774 D/StrictMode( 9156): at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2893)
+06-02 16:38:24.774 D/StrictMode( 9156): at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:3048)
+06-02 16:38:24.774 D/StrictMode( 9156): at android.app.servertransaction.LaunchActivityItem.execute(LaunchActivityItem.java:78)
+06-02 16:38:24.774 D/StrictMode( 9156): at android.app.servertransaction.TransactionExecutor.executeCallbacks(TransactionExecutor.java:108)
+06-02 16:38:24.774 D/StrictMode( 9156): at android.app.servertransaction.TransactionExecutor.execute(TransactionExecutor.java:68)
+06-02 16:38:24.774 D/StrictMode( 9156): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1808)
+06-02 16:38:24.774 D/StrictMode( 9156): at android.os.Handler.dispatchMessage(Handler.java:106)
+06-02 16:38:24.774 D/StrictMode( 9156): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:24.774 D/StrictMode( 9156): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:24.774 D/StrictMode( 9156): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:24.774 D/StrictMode( 9156): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:24.774 D/StrictMode( 9156): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:24.775 D/StrictMode( 9156): StrictMode policy violation; ~duration=100 ms: android.os.strictmode.DiskReadViolation
+06-02 16:38:24.775 D/StrictMode( 9156): at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+06-02 16:38:24.775 D/StrictMode( 9156): at java.io.UnixFileSystem.checkAccess(UnixFileSystem.java:251)
+06-02 16:38:24.775 D/StrictMode( 9156): at java.io.File.exists(File.java:815)
+06-02 16:38:24.775 D/StrictMode( 9156): at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:605)
+06-02 16:38:24.775 D/StrictMode( 9156): at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:596)
+06-02 16:38:24.775 D/StrictMode( 9156): at android.app.ContextImpl.getPreferencesDir(ContextImpl.java:552)
+06-02 16:38:24.775 D/StrictMode( 9156): at android.app.ContextImpl.getSharedPreferencesPath(ContextImpl.java:747)
+06-02 16:38:24.775 D/StrictMode( 9156): at android.app.ContextImpl.getSharedPreferences(ContextImpl.java:400)
+06-02 16:38:24.775 D/StrictMode( 9156): at android.content.ContextWrapper.getSharedPreferences(ContextWrapper.java:174)
+06-02 16:38:24.775 D/StrictMode( 9156): at mozilla.components.support.locale.LocaleManager$Storage.getSharedPreferences(LocaleManager.kt:123)
+06-02 16:38:24.775 D/StrictMode( 9156): at mozilla.components.support.locale.LocaleManager$Storage.getLocale(LocaleManager.kt:99)
+06-02 16:38:24.775 D/StrictMode( 9156): at mozilla.components.support.locale.LocaleManager.getCurrentLocale(LocaleManager.kt:42)
+06-02 16:38:24.775 D/StrictMode( 9156): at org.mozilla.fenix.settings.advanced.LocaleManagerExtensionKt.getSelectedLocale(LocaleManagerExtension.kt:39)
+06-02 16:38:24.775 D/StrictMode( 9156): at org.mozilla.fenix.settings.advanced.LocaleManagerExtensionKt.getSelectedLocale$default(LocaleManagerExtension.kt:37)
+06-02 16:38:24.775 D/StrictMode( 9156): at org.mozilla.fenix.components.TopSiteStorage.addDefaultTopSites(TopSiteStorage.kt:57)
+06-02 16:38:24.775 D/StrictMode( 9156): at org.mozilla.fenix.components.TopSiteStorage.<init>(TopSiteStorage.kt:30)
+06-02 16:38:24.775 D/StrictMode( 9156): at org.mozilla.fenix.components.Core$topSiteStorage$2.invoke(Core.kt:216)
+06-02 16:38:24.775 D/StrictMode( 9156): at org.mozilla.fenix.components.Core$topSiteStorage$2.invoke(Core.kt:57)
+06-02 16:38:24.775 D/StrictMode( 9156): at kotlin.SynchronizedLazyImpl.getValue(LazyJVM.kt:74)
+06-02 16:38:24.775 D/StrictMode( 9156): at org.mozilla.fenix.components.Core.getTopSiteStorage(Unknown Source:8)
+06-02 16:38:24.775 D/StrictMode( 9156): at org.mozilla.fenix.home.HomeFragment$onCreateView$2.invoke(HomeFragment.kt:210)
+06-02 16:38:24.775 D/StrictMode( 9156): at org.mozilla.fenix.home.HomeFragment$onCreateView$2.invoke(HomeFragment.kt:114)
+06-02 16:38:24.775 D/StrictMode( 9156): at org.mozilla.fenix.components.StoreProviderFactory.create(StoreProvider.kt:42)
+06-02 16:38:24.775 D/StrictMode( 9156): at androidx.lifecycle.ViewModelProvider.get(ViewModelProvider.java:187)
+06-02 16:38:24.775 D/StrictMode( 9156): at androidx.lifecycle.ViewModelProvider.get(ViewModelProvider.java:150)
+06-02 16:38:24.775 D/StrictMode( 9156): at org.mozilla.fenix.components.StoreProvider$Companion.get(StoreProvider.kt:46)
+06-02 16:38:24.775 D/StrictMode( 9156): at org.mozilla.fenix.home.HomeFragment.onCreateView(HomeFragment.kt:203)
+06-02 16:38:24.775 D/StrictMode( 9156): at androidx.fragment.app.Fragment.performCreateView(Fragment.java:2698)
+06-02 16:38:24.775 D/StrictMode( 9156): at androidx.fragment.app.FragmentStateManager.createView(FragmentStateManager.java:320)
+06-02 16:38:24.775 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1187)
+06-02 16:38:24.775 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.addAddedFragments(FragmentManager.java:2224)
+06-02 16:38:24.775 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.executeOpsTogether(FragmentManager.java:1997)
+06-02 16:38:24.775 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.removeRedundantOperationsAndExecute(FragmentManager.java:1953)
+06-02 16:38:24.775 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.execPendingActions(FragmentManager.java:1849)
+06-02 16:38:24.775 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.dispatchStateChange(FragmentManager.java:2629)
+06-02 16:38:24.775 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.dispatchActivityCreated(FragmentManager.java:2577)
+06-02 16:38:24.775 D/StrictMode( 9156): at androidx.fragment.app.Fragment.performActivityCreated(Fragment.java:2722)
+06-02 16:38:24.775 D/StrictMode( 9156): at androidx.fragment.app.FragmentStateManager.activityCreated(FragmentStateManager.java:346)
+06-02 16:38:24.775 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1188)
+06-02 16:38:24.775 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1356)
+06-02 16:38:24.775 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.moveFragmentToExpectedState(FragmentManager.java:1434)
+06-02 16:38:24.775 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1497)
+06-02 16:38:24.775 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.dispatchStateChange(FragmentManager.java:2625)
+06-02 16:38:24.775 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.dispatchActivityCreated(FragmentManager.java:2577)
+06-02 16:38:24.775 D/StrictMode( 9156): at androidx.fragment.app.FragmentController.dispatchActivityCreated(FragmentController.java:247)
+06-02 16:38:24.775 D/StrictMode( 9156): at androidx.fragment.app.FragmentActivity.onStart(FragmentActivity.java:541)
+06-02 16:38:24.775 D/StrictMode( 9156): at androidx.appcompat.app.AppCompatActivity.onStart(AppCompatActivity.java:210)
+06-02 16:38:24.775 D/StrictMode( 9156): at android.app.Instrumentation.callActivityOnStart(Instrumentation.java:1391)
+06-02 16:38:24.775 D/StrictMode( 9156):
+06-02 16:38:24.779 D/StrictMode( 9156): StrictMode policy violation; ~duration=58 ms: android.os.strictmode.DiskReadViolation
+06-02 16:38:24.779 D/StrictMode( 9156): at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+06-02 16:38:24.779 D/StrictMode( 9156): at java.io.UnixFileSystem.checkAccess(UnixFileSystem.java:251)
+06-02 16:38:24.779 D/StrictMode( 9156): at java.io.File.exists(File.java:815)
+06-02 16:38:24.779 D/StrictMode( 9156): at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:605)
+06-02 16:38:24.779 D/StrictMode( 9156): at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:596)
+06-02 16:38:24.779 D/StrictMode( 9156): at android.app.ContextImpl.getPreferencesDir(ContextImpl.java:552)
+06-02 16:38:24.779 D/StrictMode( 9156): at android.app.ContextImpl.getSharedPreferencesPath(ContextImpl.java:747)
+06-02 16:38:24.779 D/StrictMode( 9156): at android.app.ContextImpl.getSharedPreferences(ContextImpl.java:400)
+06-02 16:38:24.779 D/StrictMode( 9156): at android.content.ContextWrapper.getSharedPreferences(ContextWrapper.java:174)
+06-02 16:38:24.779 D/StrictMode( 9156): at org.mozilla.fenix.components.AccountAbnormalities.<init>(AccountAbnormalities.kt:78)
+06-02 16:38:24.779 D/StrictMode( 9156): at org.mozilla.fenix.components.AccountAbnormalities.<init>(AccountAbnormalities.kt:60)
+06-02 16:38:24.779 D/StrictMode( 9156): at org.mozilla.fenix.components.BackgroundServices.<init>(BackgroundServices.kt:103)
+06-02 16:38:24.779 D/StrictMode( 9156): at org.mozilla.fenix.components.Components$backgroundServices$2.invoke(Components.kt:34)
+06-02 16:38:24.779 D/StrictMode( 9156): at org.mozilla.fenix.components.Components$backgroundServices$2.invoke(Components.kt:32)
+06-02 16:38:24.779 D/StrictMode( 9156): at kotlin.SynchronizedLazyImpl.getValue(LazyJVM.kt:74)
+06-02 16:38:24.779 D/StrictMode( 9156): at org.mozilla.fenix.components.Components.getBackgroundServices(Unknown Source:7)
+06-02 16:38:24.779 D/StrictMode( 9156): at org.mozilla.fenix.home.HomeMenu$coreMenuItems$2.invoke(HomeMenu.kt:131)
+06-02 16:38:24.779 D/StrictMode( 9156): at org.mozilla.fenix.home.HomeMenu$coreMenuItems$2.invoke(HomeMenu.kt:31)
+06-02 16:38:24.779 D/StrictMode( 9156): at kotlin.SynchronizedLazyImpl.getValue(LazyJVM.kt:74)
+06-02 16:38:24.779 D/StrictMode( 9156): at org.mozilla.fenix.home.HomeMenu.getCoreMenuItems(Unknown Source:7)
+06-02 16:38:24.779 D/StrictMode( 9156): at org.mozilla.fenix.home.HomeMenu.<init>(HomeMenu.kt:170)
+06-02 16:38:24.779 D/StrictMode( 9156): at org.mozilla.fenix.home.HomeFragment.createHomeMenu(HomeFragment.kt:668)
+06-02 16:38:24.779 D/StrictMode( 9156): at org.mozilla.fenix.home.HomeFragment.onViewCreated(HomeFragment.kt:337)
+06-02 16:38:24.779 D/StrictMode( 9156): at androidx.fragment.app.FragmentStateManager.createView(FragmentStateManager.java:332)
+06-02 16:38:24.779 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1187)
+06-02 16:38:24.779 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.addAddedFragments(FragmentManager.java:2224)
+06-02 16:38:24.779 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.executeOpsTogether(FragmentManager.java:1997)
+06-02 16:38:24.779 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.removeRedundantOperationsAndExecute(FragmentManager.java:1953)
+06-02 16:38:24.779 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.execPendingActions(FragmentManager.java:1849)
+06-02 16:38:24.779 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.dispatchStateChange(FragmentManager.java:2629)
+06-02 16:38:24.779 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.dispatchActivityCreated(FragmentManager.java:2577)
+06-02 16:38:24.779 D/StrictMode( 9156): at androidx.fragment.app.Fragment.performActivityCreated(Fragment.java:2722)
+06-02 16:38:24.779 D/StrictMode( 9156): at androidx.fragment.app.FragmentStateManager.activityCreated(FragmentStateManager.java:346)
+06-02 16:38:24.779 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1188)
+06-02 16:38:24.779 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1356)
+06-02 16:38:24.779 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.moveFragmentToExpectedState(FragmentManager.java:1434)
+06-02 16:38:24.779 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1497)
+06-02 16:38:24.779 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.dispatchStateChange(FragmentManager.java:2625)
+06-02 16:38:24.779 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.dispatchActivityCreated(FragmentManager.java:2577)
+06-02 16:38:24.779 D/StrictMode( 9156): at androidx.fragment.app.FragmentController.dispatchActivityCreated(FragmentController.java:247)
+06-02 16:38:24.779 D/StrictMode( 9156): at androidx.fragment.app.FragmentActivity.onStart(FragmentActivity.java:541)
+06-02 16:38:24.779 D/StrictMode( 9156): at androidx.appcompat.app.AppCompatActivity.onStart(AppCompatActivity.java:210)
+06-02 16:38:24.779 D/StrictMode( 9156): at android.app.Instrumentation.callActivityOnStart(Instrumentation.java:1391)
+06-02 16:38:24.779 D/StrictMode( 9156): at android.app.Activity.performStart(Activity.java:7157)
+06-02 16:38:24.779 D/StrictMode( 9156): at android.app.ActivityThread.handleStartActivity(ActivityThread.java:2937)
+06-02 16:38:24.779 D/StrictMode( 9156): at android.app.servertransaction.TransactionExecutor.performLifecycleSequence(TransactionExecutor.java:180)
+06-02 16:38:24.779 D/StrictMode( 9156): at android.app.servertransaction.TransactionExecutor.cycleToPath(TransactionExecutor.java:165)
+06-02 16:38:24.779 D/StrictMode( 9156): at android.app.servertransaction.TransactionExecutor.executeLifecycleState(TransactionExecutor.java:142)
+06-02 16:38:24.779 D/StrictMode( 9156): at android.app.servertransaction.TransactionExecutor.execute(Transac
+06-02 16:38:24.782 D/StrictMode( 9156): StrictMode policy violation; ~duration=47 ms: android.os.strictmode.DiskReadViolation
+06-02 16:38:24.782 D/StrictMode( 9156): at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+06-02 16:38:24.782 D/StrictMode( 9156): at java.io.UnixFileSystem.checkAccess(UnixFileSystem.java:251)
+06-02 16:38:24.782 D/StrictMode( 9156): at java.io.File.exists(File.java:815)
+06-02 16:38:24.782 D/StrictMode( 9156): at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:605)
+06-02 16:38:24.782 D/StrictMode( 9156): at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:596)
+06-02 16:38:24.782 D/StrictMode( 9156): at android.app.ContextImpl.getPreferencesDir(ContextImpl.java:552)
+06-02 16:38:24.782 D/StrictMode( 9156): at android.app.ContextImpl.getSharedPreferencesPath(ContextImpl.java:747)
+06-02 16:38:24.782 D/StrictMode( 9156): at android.app.ContextImpl.getSharedPreferences(ContextImpl.java:400)
+06-02 16:38:24.782 D/StrictMode( 9156): at android.content.ContextWrapper.getSharedPreferences(ContextWrapper.java:174)
+06-02 16:38:24.782 D/StrictMode( 9156): at android.content.ContextWrapper.getSharedPreferences(ContextWrapper.java:174)
+06-02 16:38:24.782 D/StrictMode( 9156): at android.preference.PreferenceManager.getDefaultSharedPreferences(PreferenceManager.java:526)
+06-02 16:38:24.782 D/StrictMode( 9156): at org.mozilla.fenix.whatsnew.SharedPreferenceWhatsNewStorage.<init>(WhatsNewStorage.kt:35)
+06-02 16:38:24.782 D/StrictMode( 9156): at org.mozilla.fenix.whatsnew.WhatsNew$Companion.shouldHighlightWhatsNew(WhatsNew.kt:71)
+06-02 16:38:24.782 D/StrictMode( 9156): at org.mozilla.fenix.home.HomeMenu$coreMenuItems$2$whatsNewItem$1.invoke(HomeMenu.kt:92)
+06-02 16:38:24.782 D/StrictMode( 9156): at org.mozilla.fenix.home.HomeMenu$coreMenuItems$2$whatsNewItem$1.invoke(HomeMenu.kt:31)
+06-02 16:38:24.782 D/StrictMode( 9156): at mozilla.components.browser.menu.ext.BrowserMenuItemKt$getHighlight$3.invoke(BrowserMenuItem.kt:18)
+06-02 16:38:24.782 D/StrictMode( 9156): at mozilla.components.browser.menu.ext.BrowserMenuItemKt$getHighlight$3.invoke(Unknown Source:2)
+06-02 16:38:24.782 D/StrictMode( 9156): at kotlin.sequences.FilteringSequence$iterator$1.calcNext(Sequences.kt:133)
+06-02 16:38:24.782 D/StrictMode( 9156): at kotlin.sequences.FilteringSequence$iterator$1.hasNext(Sequences.kt:156)
+06-02 16:38:24.782 D/StrictMode( 9156): at kotlin.sequences.TransformingSequence$iterator$1.hasNext(Sequences.kt:176)
+06-02 16:38:24.782 D/StrictMode( 9156): at kotlin.sequences.FilteringSequence$iterator$1.calcNext(Sequences.kt:131)
+06-02 16:38:24.782 D/StrictMode( 9156): at kotlin.sequences.FilteringSequence$iterator$1.hasNext(Sequences.kt:156)
+06-02 16:38:24.782 D/StrictMode( 9156): at mozilla.components.browser.menu.ext.BrowserMenuItemKt.getHighlight(BrowserMenuItem.kt:31)
+06-02 16:38:24.782 D/StrictMode( 9156): at org.mozilla.fenix.home.HomeMenu$coreMenuItems$2.invoke(HomeMenu.kt:149)
+06-02 16:38:24.782 D/StrictMode( 9156): at org.mozilla.fenix.home.HomeMenu$coreMenuItems$2.invoke(HomeMenu.kt:31)
+06-02 16:38:24.782 D/StrictMode( 9156): at kotlin.SynchronizedLazyImpl.getValue(LazyJVM.kt:74)
+06-02 16:38:24.782 D/StrictMode( 9156): at org.mozilla.fenix.home.HomeMenu.getCoreMenuItems(Unknown Source:7)
+06-02 16:38:24.782 D/StrictMode( 9156): at org.mozilla.fenix.home.HomeMenu.<init>(HomeMenu.kt:170)
+06-02 16:38:24.782 D/StrictMode( 9156): at org.mozilla.fenix.home.HomeFragment.createHomeMenu(HomeFragment.kt:668)
+06-02 16:38:24.782 D/StrictMode( 9156): at org.mozilla.fenix.home.HomeFragment.onViewCreated(HomeFragment.kt:337)
+06-02 16:38:24.782 D/StrictMode( 9156): at androidx.fragment.app.FragmentStateManager.createView(FragmentStateManager.java:332)
+06-02 16:38:24.782 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1187)
+06-02 16:38:24.782 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.addAddedFragments(FragmentManager.java:2224)
+06-02 16:38:24.782 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.executeOpsTogether(FragmentManager.java:1997)
+06-02 16:38:24.782 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.removeRedundantOperationsAndExecute(FragmentManager.java:1953)
+06-02 16:38:24.782 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.execPendingActions(FragmentManager.java:1849)
+06-02 16:38:24.782 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.dispatchStateChange(FragmentManager.java:2629)
+06-02 16:38:24.782 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.dispatchActivityCreated(FragmentManager.java:2577)
+06-02 16:38:24.782 D/StrictMode( 9156): at androidx.fragment.app.Fragment.performActivityCreated(Fragment.java:2722)
+06-02 16:38:24.782 D/StrictMode( 9156): at androidx.fragment.app.FragmentStateManager.activityCreated(FragmentStateManager.java:346)
+06-02 16:38:24.782 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1188)
+06-02 16:38:24.782 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1356)
+06-02 16:38:24.782 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.moveFragmentToExpectedState(FragmentManager.java:1434)
+06-02 16:38:24.782 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1497)
+06-02 16:38:24.782 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.dispatchStateChange(FragmentManager.java:2625)
+06-02 16:38:24.782 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.dispatchActivityCreated(FragmentManager.java:2577)
+06-02 16:38:24.782 D/StrictMode( 9156): at androidx.fragment.app.FragmentController.dispatchActivityCreated(FragmentController.java:247)
+06-02 16:38:24.782 D/StrictMode( 9156): at androidx.fragment.app.FragmentActivity.onStart(FragmentActivity.java:541)
+06-02 16:38:24.782 D/StrictMode( 9156): at androidx.appcompat.app.AppCompatAc
+06-02 16:38:24.782 D/LeakCanary( 9156): LeakCanary is running and ready to detect leaks
+06-02 16:38:24.787 I/libglean_ffi( 9156): glean_core::ping: Collecting baseline
+06-02 16:38:24.796 D/libglean_ffi( 9156): glean_core::ping: Storing ping 'df517eba-b482-412e-a056-0d6679710e3c' at '/data/user/0/org.mozilla.fenix.debug/glean_data/pending_pings/df517eba-b482-412e-a056-0d6679710e3c'
+06-02 16:38:24.796 I/libglean_ffi( 9156): glean_core: The ping 'baseline' was submitted and will be sent as soon as possible
+06-02 16:38:24.819 D/GeckoNetworkManager( 9156): Incoming event enableNotifications for state OnNoListeners -> OnWithListeners
+06-02 16:38:24.820 D/GeckoNetworkManager( 9156): New network state: UP, WIFI, WIFI
+06-02 16:38:24.820 W/ActivityManager( 1869): Receiver with filter android.content.IntentFilter@d1608ed already registered for pid 9156, callerPackage is org.mozilla.fenix.debug
+06-02 16:38:24.827 D/GeckoNetworkManager( 9156): Incoming event receivedUpdate for state OnWithListeners -> OnWithListeners
+06-02 16:38:24.828 D/GeckoNetworkManager( 9156): New network state: UP, WIFI, WIFI
+06-02 16:38:24.898 D/GeckoThread( 9156): State changed to PROFILE_READY
+06-02 16:38:24.904 D/GeckoThread( 9201): State changed to LAUNCHED
+06-02 16:38:24.904 I/GeckoThread( 9201): preparing to run Gecko
+06-02 16:38:24.915 D/WIFI_UT ( 1869): got request NetworkRequest [ TRACK_DEFAULT id=34, [ Capabilities: INTERNET&NOT_RESTRICTED&TRUSTED Unwanted: Uid: 10099] ] with score 60
+06-02 16:38:24.915 D/WIFI ( 1869): got request NetworkRequest [ TRACK_DEFAULT id=34, [ Capabilities: INTERNET&NOT_RESTRICTED&TRUSTED Unwanted: Uid: 10099] ] with score 60
+06-02 16:38:24.915 D/PhoneSwitcherNetworkRequstListener( 2121): got request NetworkRequest [ TRACK_DEFAULT id=34, [ Capabilities: INTERNET&NOT_RESTRICTED&TRUSTED Unwanted: Uid: 10099] ] with score 60
+06-02 16:38:24.933 I/lla.fenix.debu( 9156): Background concurrent copying GC freed 16527(1436KB) AllocSpace objects, 30(1032KB) LOS objects, 49% free, 4MB/9MB, paused 264us total 150.694ms
+06-02 16:38:24.952 D/gralloc_ranchu( 1619): gralloc_alloc: Creating ashmem region of size 9334784
+06-02 16:38:24.952 D/GeckoViewStartup( 9156): observe: profile-after-change
+06-02 16:38:24.957 D/GeckoViewTelemetryController( 9156): setup - canRecordPrereleaseData true, canRecordReleaseData true
+06-02 16:38:24.962 D/ ( 1728): HostConnection::get() New Host Connection established 0xe50f1400, tid 1952
+06-02 16:38:24.963 D/gralloc_ranchu( 1619): gralloc_alloc: Creating ashmem region of size 9334784
+06-02 16:38:24.970 D/gralloc_ranchu( 1619): gralloc_alloc: Creating ashmem region of size 9334784
+06-02 16:38:24.987 D/ ( 9156): HostConnection::get() New Host Connection established 0xe33a0f40, tid 9245
+06-02 16:38:24.991 I/ConfigStore( 9156): android::hardware::configstore::V1_0::ISurfaceFlingerConfigs::hasWideColorDisplay retrieved: 0
+06-02 16:38:24.991 I/ConfigStore( 9156): android::hardware::configstore::V1_0::ISurfaceFlingerConfigs::hasHDRDisplay retrieved: 0
+06-02 16:38:24.991 I/OpenGLRenderer( 9156): Initialized EGL, version 1.4
+06-02 16:38:24.991 D/OpenGLRenderer( 9156): Swap behavior 1
+06-02 16:38:24.992 W/OpenGLRenderer( 9156): Failed to choose config with EGL_SWAP_BEHAVIOR_PRESERVED, retrying without...
+06-02 16:38:24.992 D/OpenGLRenderer( 9156): Swap behavior 0
+06-02 16:38:24.993 D/EGL_emulation( 9156): eglCreateContext: 0xe33868c0: maj 3 min 0 rcv 3
+06-02 16:38:24.994 D/EGL_emulation( 9156): eglMakeCurrent: 0xe33868c0: ver 3 0 (tinfo 0xb35fe0f0)
+06-02 16:38:24.996 E/SurfaceFlinger( 1728): ro.sf.lcd_density must be defined as a build property
+06-02 16:38:24.999 D/GeckoThread( 9156): State changed to RUNNING
+06-02 16:38:25.001 W/lla.fenix.debu( 9156): Accessing hidden field Landroid/os/Trace;->TRACE_TAG_APP:J (light greylist, reflection)
+06-02 16:38:25.003 D/StrictMode( 9156): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/os/Trace;->TRACE_TAG_APP:J
+06-02 16:38:25.003 D/StrictMode( 9156): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:25.003 D/StrictMode( 9156): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:25.003 D/StrictMode( 9156): at java.lang.Class.getPublicFieldRecursive(Native Method)
+06-02 16:38:25.003 D/StrictMode( 9156): at java.lang.Class.getField(Class.java:1599)
+06-02 16:38:25.003 D/StrictMode( 9156): at androidx.core.os.TraceCompat.<clinit>(TraceCompat.java:48)
+06-02 16:38:25.003 D/StrictMode( 9156): at androidx.core.os.TraceCompat.beginSection(TraceCompat.java:100)
+06-02 16:38:25.003 D/StrictMode( 9156): at androidx.recyclerview.widget.RecyclerView.onLayout(RecyclerView.java:4403)
+06-02 16:38:25.003 D/StrictMode( 9156): at android.view.View.layout(View.java:20672)
+06-02 16:38:25.003 D/StrictMode( 9156): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:25.003 D/StrictMode( 9156): at com.google.android.material.appbar.HeaderScrollingViewBehavior.layoutChild(HeaderScrollingViewBehavior.java:148)
+06-02 16:38:25.003 D/StrictMode( 9156): at com.google.android.material.appbar.ViewOffsetBehavior.onLayoutChild(ViewOffsetBehavior.java:43)
+06-02 16:38:25.003 D/StrictMode( 9156): at com.google.android.material.appbar.AppBarLayout$ScrollingViewBehavior.onLayoutChild(AppBarLayout.java:1892)
+06-02 16:38:25.003 D/StrictMode( 9156): at androidx.coordinatorlayout.widget.CoordinatorLayout.onLayout(CoordinatorLayout.java:918)
+06-02 16:38:25.003 D/StrictMode( 9156): at android.view.View.layout(View.java:20672)
+06-02 16:38:25.003 D/StrictMode( 9156): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:25.003 D/StrictMode( 9156): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:25.003 D/StrictMode( 9156): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:25.003 D/StrictMode( 9156): at android.view.View.layout(View.java:20672)
+06-02 16:38:25.003 D/StrictMode( 9156): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:25.003 D/StrictMode( 9156): at android.widget.LinearLayout.setChildFrame(LinearLayout.java:1812)
+06-02 16:38:25.003 D/StrictMode( 9156): at android.widget.LinearLayout.layoutVertical(LinearLayout.java:1656)
+06-02 16:38:25.003 D/StrictMode( 9156): at android.widget.LinearLayout.onLayout(LinearLayout.java:1565)
+06-02 16:38:25.003 D/StrictMode( 9156): at android.view.View.layout(View.java:20672)
+06-02 16:38:25.003 D/StrictMode( 9156): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:25.003 D/StrictMode( 9156): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:25.003 D/StrictMode( 9156): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:25.003 D/StrictMode( 9156): at android.view.View.layout(View.java:20672)
+06-02 16:38:25.003 D/StrictMode( 9156): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:25.003 D/StrictMode( 9156): at android.widget.LinearLayout.setChildFrame(LinearLayout.java:1812)
+06-02 16:38:25.003 D/StrictMode( 9156): at android.widget.LinearLayout.layoutVertical(LinearLayout.java:1656)
+06-02 16:38:25.003 D/StrictMode( 9156): at android.widget.LinearLayout.onLayout(LinearLayout.java:1565)
+06-02 16:38:25.003 D/StrictMode( 9156): at android.view.View.layout(View.java:20672)
+06-02 16:38:25.003 D/StrictMode( 9156): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:25.003 D/StrictMode( 9156): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:25.003 D/StrictMode( 9156): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:25.003 D/StrictMode( 9156): at android.view.View.layout(View.java:20672)
+06-02 16:38:25.003 D/StrictMode( 9156): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:25.003 D/StrictMode( 9156): at android.widget.LinearLayout.setChildFrame(LinearLayout.java:1812)
+06-02 16:38:25.003 D/StrictMode( 9156): at android.widget.LinearLayout.layoutVertical(LinearLayout.java:1656)
+06-02 16:38:25.003 D/StrictMode( 9156): at android.widget.LinearLayout.onLayout(LinearLayout.java:1565)
+06-02 16:38:25.003 D/StrictMode( 9156): at android.view.View.layout(View.java:20672)
+06-02 16:38:25.003 D/StrictMode( 9156): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:25.003 D/StrictMode( 9156): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:25.003 D/StrictMode( 9156): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:25.003 D/StrictMode( 9156): at com.android.internal.policy.DecorView.onLayout(DecorView.java:753)
+06-02 16:38:25.003 D/StrictMode( 9156): at android.view.View.layout(View.java:20672)
+06-02 16:38:25.003 D/StrictMode( 9156): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:25.003 D/StrictMode( 9156): at android.view.ViewRootImpl.performLayout(ViewRootImpl.java:2792)
+06-02 16:38:25.003 D/StrictMode( 9156): at android.view.ViewRootImpl.performTraversals(ViewRootImpl.java:2319)
+06-02 16:38:25.003 D/StrictMode( 9156): at android.view.ViewRootImpl.doTraversal(ViewRootImpl.java:1460)
+06-02 16:38:25.003 D/StrictMode( 9156): at android.view.ViewRootImpl$TraversalRunnable.run(ViewRootImpl.java:7183)
+06-02 16:38:25.003 D/StrictMode( 9156): at android.view.Choreographer$CallbackRecord.run(Choreographer.java:949)
+06-02 16:38:25.003 D/StrictMode( 9156): at android.view.Choreographer.doCallbacks(Choreographer.java:761)
+06-02 16:38:25.003 D/StrictMode( 9156): at android.view.Choreographer.doFrame(Choreographer.java:696)
+06-02 16:38:25.003 D/StrictMode( 9156): at android.view.Choreographer$FrameDisplayEventReceiver.run(Choreographer.java:935)
+06-02 16:38:25.003 D/StrictMode( 9156): at android.os.Handler.handleCallback(Handler.java:873)
+06-02 16:38:25.003 D/StrictMode( 9156): at android.os.Handler.dispatchMessage(Handler.java:99)
+06-02 16:38:25.003 D/StrictMode( 9156): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:25.003 D/StrictMode( 9156): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:25.003 D/StrictMode( 9156): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:25.003 D/StrictMode( 9156): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:25.003 D/StrictMode( 9156): at com.and
+06-02 16:38:25.003 W/lla.fenix.debu( 9156): Accessing hidden method Landroid/os/Trace;->isTagEnabled(J)Z (light greylist, reflection)
+06-02 16:38:25.006 D/StrictMode( 9156): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/os/Trace;->isTagEnabled(J)Z
+06-02 16:38:25.006 D/StrictMode( 9156): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:25.006 D/StrictMode( 9156): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:25.006 D/StrictMode( 9156): at java.lang.Class.getDeclaredMethodInternal(Native Method)
+06-02 16:38:25.006 D/StrictMode( 9156): at java.lang.Class.getPublicMethodRecursive(Class.java:2075)
+06-02 16:38:25.006 D/StrictMode( 9156): at java.lang.Class.getMethod(Class.java:2063)
+06-02 16:38:25.006 D/StrictMode( 9156): at java.lang.Class.getMethod(Class.java:1690)
+06-02 16:38:25.006 D/StrictMode( 9156): at androidx.core.os.TraceCompat.<clinit>(TraceCompat.java:51)
+06-02 16:38:25.006 D/StrictMode( 9156): at androidx.core.os.TraceCompat.beginSection(TraceCompat.java:100)
+06-02 16:38:25.006 D/StrictMode( 9156): at androidx.recyclerview.widget.RecyclerView.onLayout(RecyclerView.java:4403)
+06-02 16:38:25.006 D/StrictMode( 9156): at android.view.View.layout(View.java:20672)
+06-02 16:38:25.006 D/StrictMode( 9156): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:25.006 D/StrictMode( 9156): at com.google.android.material.appbar.HeaderScrollingViewBehavior.layoutChild(HeaderScrollingViewBehavior.java:148)
+06-02 16:38:25.006 D/StrictMode( 9156): at com.google.android.material.appbar.ViewOffsetBehavior.onLayoutChild(ViewOffsetBehavior.java:43)
+06-02 16:38:25.006 D/StrictMode( 9156): at com.google.android.material.appbar.AppBarLayout$ScrollingViewBehavior.onLayoutChild(AppBarLayout.java:1892)
+06-02 16:38:25.006 D/StrictMode( 9156): at androidx.coordinatorlayout.widget.CoordinatorLayout.onLayout(CoordinatorLayout.java:918)
+06-02 16:38:25.006 D/StrictMode( 9156): at android.view.View.layout(View.java:20672)
+06-02 16:38:25.006 D/StrictMode( 9156): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:25.006 D/StrictMode( 9156): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:25.006 D/StrictMode( 9156): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:25.006 D/StrictMode( 9156): at android.view.View.layout(View.java:20672)
+06-02 16:38:25.006 D/StrictMode( 9156): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:25.006 D/StrictMode( 9156): at android.widget.LinearLayout.setChildFrame(LinearLayout.java:1812)
+06-02 16:38:25.006 D/StrictMode( 9156): at android.widget.LinearLayout.layoutVertical(LinearLayout.java:1656)
+06-02 16:38:25.006 D/StrictMode( 9156): at android.widget.LinearLayout.onLayout(LinearLayout.java:1565)
+06-02 16:38:25.006 D/StrictMode( 9156): at android.view.View.layout(View.java:20672)
+06-02 16:38:25.006 D/StrictMode( 9156): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:25.006 D/StrictMode( 9156): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:25.006 D/StrictMode( 9156): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:25.006 D/StrictMode( 9156): at android.view.View.layout(View.java:20672)
+06-02 16:38:25.006 D/StrictMode( 9156): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:25.006 D/StrictMode( 9156): at android.widget.LinearLayout.setChildFrame(LinearLayout.java:1812)
+06-02 16:38:25.006 D/StrictMode( 9156): at android.widget.LinearLayout.layoutVertical(LinearLayout.java:1656)
+06-02 16:38:25.006 D/StrictMode( 9156): at android.widget.LinearLayout.onLayout(LinearLayout.java:1565)
+06-02 16:38:25.006 D/StrictMode( 9156): at android.view.View.layout(View.java:20672)
+06-02 16:38:25.006 D/StrictMode( 9156): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:25.006 D/StrictMode( 9156): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:25.006 D/StrictMode( 9156): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:25.006 D/StrictMode( 9156): at android.view.View.layout(View.java:20672)
+06-02 16:38:25.006 D/StrictMode( 9156): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:25.006 D/StrictMode( 9156): at android.widget.LinearLayout.setChildFrame(LinearLayout.java:1812)
+06-02 16:38:25.006 D/StrictMode( 9156): at android.widget.LinearLayout.layoutVertical(LinearLayout.java:1656)
+06-02 16:38:25.006 D/StrictMode( 9156): at android.widget.LinearLayout.onLayout(LinearLayout.java:1565)
+06-02 16:38:25.006 D/StrictMode( 9156): at android.view.View.layout(View.java:20672)
+06-02 16:38:25.006 D/StrictMode( 9156): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:25.006 D/StrictMode( 9156): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:25.006 D/StrictMode( 9156): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:25.006 D/StrictMode( 9156): at com.android.internal.policy.DecorView.onLayout(DecorView.java:753)
+06-02 16:38:25.006 D/StrictMode( 9156): at android.view.View.layout(View.java:20672)
+06-02 16:38:25.006 D/StrictMode( 9156): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:25.006 D/StrictMode( 9156): at android.view.ViewRootImpl.performLayout(ViewRootImpl.java:2792)
+06-02 16:38:25.006 D/StrictMode( 9156): at android.view.ViewRootImpl.performTraversals(ViewRootImpl.java:2319)
+06-02 16:38:25.006 D/StrictMode( 9156): at android.view.ViewRootImpl.doTraversal(ViewRootImpl.java:1460)
+06-02 16:38:25.006 D/StrictMode( 9156): at android.view.ViewRootImpl$TraversalRunnable.run(ViewRootImpl.java:7183)
+06-02 16:38:25.006 D/StrictMode( 9156): at android.view.Choreographer$CallbackRecord.run(Choreographer.java:949)
+06-02 16:38:25.006 D/StrictMode( 9156): at android.view.Choreographer.doCallbacks(Choreographer.java:761)
+06-02 16:38:25.006 D/StrictMode( 9156): at android.view.Choreographer.doFrame(Choreographer.java:696)
+06-02 16:38:25.006 D/StrictMode( 9156): at android.view.Choreographer$FrameDisplayEventReceiver.run(Choreographer.java:935)
+06-02 16:38:25.006 D/StrictMode( 9156): at android.os.Handler.handleCallback(Handler.java:873)
+06-02 16:38:25.006 D/StrictMode( 9156): at android.os.Handler.dispatchMessage(Handler.java:99)
+06-02 16:38:25.006 D/StrictMode( 9156): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:25.006 D/StrictMode( 9156): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:25.006 D/StrictMode( 9156): at java.lang.reflect.Method.invoke
+06-02 16:38:25.006 W/lla.fenix.debu( 9156): Accessing hidden method Landroid/os/Trace;->asyncTraceBegin(JLjava/lang/String;I)V (light greylist, reflection)
+06-02 16:38:25.008 D/StrictMode( 9156): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/os/Trace;->asyncTraceBegin(JLjava/lang/String;I)V
+06-02 16:38:25.008 D/StrictMode( 9156): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:25.008 D/StrictMode( 9156): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:25.008 D/StrictMode( 9156): at java.lang.Class.getDeclaredMethodInternal(Native Method)
+06-02 16:38:25.008 D/StrictMode( 9156): at java.lang.Class.getPublicMethodRecursive(Class.java:2075)
+06-02 16:38:25.008 D/StrictMode( 9156): at java.lang.Class.getMethod(Class.java:2063)
+06-02 16:38:25.008 D/StrictMode( 9156): at java.lang.Class.getMethod(Class.java:1690)
+06-02 16:38:25.008 D/StrictMode( 9156): at androidx.core.os.TraceCompat.<clinit>(TraceCompat.java:52)
+06-02 16:38:25.008 D/StrictMode( 9156): at androidx.core.os.TraceCompat.beginSection(TraceCompat.java:100)
+06-02 16:38:25.008 D/StrictMode( 9156): at androidx.recyclerview.widget.RecyclerView.onLayout(RecyclerView.java:4403)
+06-02 16:38:25.008 D/StrictMode( 9156): at android.view.View.layout(View.java:20672)
+06-02 16:38:25.008 D/StrictMode( 9156): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:25.008 D/StrictMode( 9156): at com.google.android.material.appbar.HeaderScrollingViewBehavior.layoutChild(HeaderScrollingViewBehavior.java:148)
+06-02 16:38:25.008 D/StrictMode( 9156): at com.google.android.material.appbar.ViewOffsetBehavior.onLayoutChild(ViewOffsetBehavior.java:43)
+06-02 16:38:25.008 D/StrictMode( 9156): at com.google.android.material.appbar.AppBarLayout$ScrollingViewBehavior.onLayoutChild(AppBarLayout.java:1892)
+06-02 16:38:25.008 D/StrictMode( 9156): at androidx.coordinatorlayout.widget.CoordinatorLayout.onLayout(CoordinatorLayout.java:918)
+06-02 16:38:25.008 D/StrictMode( 9156): at android.view.View.layout(View.java:20672)
+06-02 16:38:25.008 D/StrictMode( 9156): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:25.008 D/StrictMode( 9156): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:25.008 D/StrictMode( 9156): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:25.008 D/StrictMode( 9156): at android.view.View.layout(View.java:20672)
+06-02 16:38:25.008 D/StrictMode( 9156): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:25.008 D/StrictMode( 9156): at android.widget.LinearLayout.setChildFrame(LinearLayout.java:1812)
+06-02 16:38:25.008 D/StrictMode( 9156): at android.widget.LinearLayout.layoutVertical(LinearLayout.java:1656)
+06-02 16:38:25.008 D/StrictMode( 9156): at android.widget.LinearLayout.onLayout(LinearLayout.java:1565)
+06-02 16:38:25.008 D/StrictMode( 9156): at android.view.View.layout(View.java:20672)
+06-02 16:38:25.008 D/StrictMode( 9156): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:25.008 D/StrictMode( 9156): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:25.008 D/StrictMode( 9156): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:25.008 D/StrictMode( 9156): at android.view.View.layout(View.java:20672)
+06-02 16:38:25.008 D/StrictMode( 9156): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:25.008 D/StrictMode( 9156): at android.widget.LinearLayout.setChildFrame(LinearLayout.java:1812)
+06-02 16:38:25.008 D/StrictMode( 9156): at android.widget.LinearLayout.layoutVertical(LinearLayout.java:1656)
+06-02 16:38:25.008 D/StrictMode( 9156): at android.widget.LinearLayout.onLayout(LinearLayout.java:1565)
+06-02 16:38:25.008 D/StrictMode( 9156): at android.view.View.layout(View.java:20672)
+06-02 16:38:25.008 D/StrictMode( 9156): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:25.008 D/StrictMode( 9156): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:25.008 D/StrictMode( 9156): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:25.008 D/StrictMode( 9156): at android.view.View.layout(View.java:20672)
+06-02 16:38:25.008 D/StrictMode( 9156): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:25.008 D/StrictMode( 9156): at android.widget.LinearLayout.setChildFrame(LinearLayout.java:1812)
+06-02 16:38:25.008 D/StrictMode( 9156): at android.widget.LinearLayout.layoutVertical(LinearLayout.java:1656)
+06-02 16:38:25.008 D/StrictMode( 9156): at android.widget.LinearLayout.onLayout(LinearLayout.java:1565)
+06-02 16:38:25.008 D/StrictMode( 9156): at android.view.View.layout(View.java:20672)
+06-02 16:38:25.008 D/StrictMode( 9156): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:25.008 D/StrictMode( 9156): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:25.008 D/StrictMode( 9156): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:25.008 D/StrictMode( 9156): at com.android.internal.policy.DecorView.onLayout(DecorView.java:753)
+06-02 16:38:25.008 D/StrictMode( 9156): at android.view.View.layout(View.java:20672)
+06-02 16:38:25.008 D/StrictMode( 9156): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:25.008 D/StrictMode( 9156): at android.view.ViewRootImpl.performLayout(ViewRootImpl.java:2792)
+06-02 16:38:25.008 D/StrictMode( 9156): at android.view.ViewRootImpl.performTraversals(ViewRootImpl.java:2319)
+06-02 16:38:25.008 D/StrictMode( 9156): at android.view.ViewRootImpl.doTraversal(ViewRootImpl.java:1460)
+06-02 16:38:25.008 D/StrictMode( 9156): at android.view.ViewRootImpl$TraversalRunnable.run(ViewRootImpl.java:7183)
+06-02 16:38:25.008 D/StrictMode( 9156): at android.view.Choreographer$CallbackRecord.run(Choreographer.java:949)
+06-02 16:38:25.008 D/StrictMode( 9156): at android.view.Choreographer.doCallbacks(Choreographer.java:761)
+06-02 16:38:25.008 D/StrictMode( 9156): at android.view.Choreographer.doFrame(Choreographer.java:696)
+06-02 16:38:25.008 D/StrictMode( 9156): at android.view.Choreographer$FrameDisplayEventReceiver.run(Choreographer.java:935)
+06-02 16:38:25.008 D/StrictMode( 9156): at android.os.Handler.handleCallback(Handler.java:873)
+06-02 16:38:25.008 D/StrictMode( 9156): at android.os.Handler.dispatchMessage(Handler.java:99)
+06-02 16:38:25.008 D/StrictMode( 9156): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:25.008 D/StrictMode( 9156): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:25.008 D/StrictMode( 9156): at java.lang
+06-02 16:38:25.008 W/lla.fenix.debu( 9156): Accessing hidden method Landroid/os/Trace;->asyncTraceEnd(JLjava/lang/String;I)V (light greylist, reflection)
+06-02 16:38:25.013 I/Gecko ( 9156): -*- nsDNSServiceDiscovery.js : nsDNSServiceDiscovery
+06-02 16:38:25.013 D/StrictMode( 9156): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/os/Trace;->asyncTraceEnd(JLjava/lang/String;I)V
+06-02 16:38:25.013 D/StrictMode( 9156): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:25.013 D/StrictMode( 9156): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:25.013 D/StrictMode( 9156): at java.lang.Class.getDeclaredMethodInternal(Native Method)
+06-02 16:38:25.013 D/StrictMode( 9156): at java.lang.Class.getPublicMethodRecursive(Class.java:2075)
+06-02 16:38:25.013 D/StrictMode( 9156): at java.lang.Class.getMethod(Class.java:2063)
+06-02 16:38:25.013 D/StrictMode( 9156): at java.lang.Class.getMethod(Class.java:1690)
+06-02 16:38:25.013 D/StrictMode( 9156): at androidx.core.os.TraceCompat.<clinit>(TraceCompat.java:54)
+06-02 16:38:25.013 D/StrictMode( 9156): at androidx.core.os.TraceCompat.beginSection(TraceCompat.java:100)
+06-02 16:38:25.013 D/StrictMode( 9156): at androidx.recyclerview.widget.RecyclerView.onLayout(RecyclerView.java:4403)
+06-02 16:38:25.013 D/StrictMode( 9156): at android.view.View.layout(View.java:20672)
+06-02 16:38:25.013 D/StrictMode( 9156): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:25.013 D/StrictMode( 9156): at com.google.android.material.appbar.HeaderScrollingViewBehavior.layoutChild(HeaderScrollingViewBehavior.java:148)
+06-02 16:38:25.013 D/StrictMode( 9156): at com.google.android.material.appbar.ViewOffsetBehavior.onLayoutChild(ViewOffsetBehavior.java:43)
+06-02 16:38:25.013 D/StrictMode( 9156): at com.google.android.material.appbar.AppBarLayout$ScrollingViewBehavior.onLayoutChild(AppBarLayout.java:1892)
+06-02 16:38:25.013 D/StrictMode( 9156): at androidx.coordinatorlayout.widget.CoordinatorLayout.onLayout(CoordinatorLayout.java:918)
+06-02 16:38:25.013 D/StrictMode( 9156): at android.view.View.layout(View.java:20672)
+06-02 16:38:25.013 D/StrictMode( 9156): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:25.013 D/StrictMode( 9156): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:25.013 D/StrictMode( 9156): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:25.013 D/StrictMode( 9156): at android.view.View.layout(View.java:20672)
+06-02 16:38:25.013 D/StrictMode( 9156): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:25.013 D/StrictMode( 9156): at android.widget.LinearLayout.setChildFrame(LinearLayout.java:1812)
+06-02 16:38:25.013 D/StrictMode( 9156): at android.widget.LinearLayout.layoutVertical(LinearLayout.java:1656)
+06-02 16:38:25.013 D/StrictMode( 9156): at android.widget.LinearLayout.onLayout(LinearLayout.java:1565)
+06-02 16:38:25.013 D/StrictMode( 9156): at android.view.View.layout(View.java:20672)
+06-02 16:38:25.013 D/StrictMode( 9156): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:25.013 D/StrictMode( 9156): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:25.013 D/StrictMode( 9156): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:25.013 D/StrictMode( 9156): at android.view.View.layout(View.java:20672)
+06-02 16:38:25.013 D/StrictMode( 9156): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:25.013 D/StrictMode( 9156): at android.widget.LinearLayout.setChildFrame(LinearLayout.java:1812)
+06-02 16:38:25.013 D/StrictMode( 9156): at android.widget.LinearLayout.layoutVertical(LinearLayout.java:1656)
+06-02 16:38:25.013 D/StrictMode( 9156): at android.widget.LinearLayout.onLayout(LinearLayout.java:1565)
+06-02 16:38:25.013 D/StrictMode( 9156): at android.view.View.layout(View.java:20672)
+06-02 16:38:25.013 D/StrictMode( 9156): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:25.013 D/StrictMode( 9156): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:25.013 D/StrictMode( 9156): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:25.013 D/StrictMode( 9156): at android.view.View.layout(View.java:20672)
+06-02 16:38:25.013 D/StrictMode( 9156): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:25.013 D/StrictMode( 9156): at android.widget.LinearLayout.setChildFrame(LinearLayout.java:1812)
+06-02 16:38:25.013 D/StrictMode( 9156): at android.widget.LinearLayout.layoutVertical(LinearLayout.java:1656)
+06-02 16:38:25.013 D/StrictMode( 9156): at android.widget.LinearLayout.onLayout(LinearLayout.java:1565)
+06-02 16:38:25.013 D/StrictMode( 9156): at android.view.View.layout(View.java:20672)
+06-02 16:38:25.013 D/StrictMode( 9156): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:25.013 D/StrictMode( 9156): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:25.013 D/StrictMode( 9156): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:25.013 D/StrictMode( 9156): at com.android.internal.policy.DecorView.onLayout(DecorView.java:753)
+06-02 16:38:25.013 D/StrictMode( 9156): at android.view.View.layout(View.java:20672)
+06-02 16:38:25.013 D/StrictMode( 9156): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:25.013 D/StrictMode( 9156): at android.view.ViewRootImpl.performLayout(ViewRootImpl.java:2792)
+06-02 16:38:25.013 D/StrictMode( 9156): at android.view.ViewRootImpl.performTraversals(ViewRootImpl.java:2319)
+06-02 16:38:25.013 D/StrictMode( 9156): at android.view.ViewRootImpl.doTraversal(ViewRootImpl.java:1460)
+06-02 16:38:25.013 D/StrictMode( 9156): at android.view.ViewRootImpl$TraversalRunnable.run(ViewRootImpl.java:7183)
+06-02 16:38:25.013 D/StrictMode( 9156): at android.view.Choreographer$CallbackRecord.run(Choreographer.java:949)
+06-02 16:38:25.013 D/StrictMode( 9156): at android.view.Choreographer.doCallbacks(Choreographer.java:761)
+06-02 16:38:25.013 D/StrictMode( 9156): at android.view.Choreographer.doFrame(Choreographer.java:696)
+06-02 16:38:25.013 D/StrictMode( 9156): at android.view.Choreographer$FrameDisplayEventReceiver.run(Choreographer.java:935)
+06-02 16:38:25.013 D/StrictMode( 9156): at android.os.Handler.handleCallback(Handler.java:873)
+06-02 16:38:25.013 D/StrictMode( 9156): at android.os.Handler.dispatchMessage(Handler.java:99)
+06-02 16:38:25.013 D/StrictMode( 9156): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:25.013 D/StrictMode( 9156): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:25.013 D/StrictMode( 9156): at java.lang.r
+06-02 16:38:25.013 W/lla.fenix.debu( 9156): Accessing hidden method Landroid/os/Trace;->traceCounter(JLjava/lang/String;I)V (light greylist, reflection)
+06-02 16:38:25.016 D/StrictMode( 9156): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/os/Trace;->traceCounter(JLjava/lang/String;I)V
+06-02 16:38:25.016 D/StrictMode( 9156): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:25.016 D/StrictMode( 9156): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:25.016 D/StrictMode( 9156): at java.lang.Class.getDeclaredMethodInternal(Native Method)
+06-02 16:38:25.016 D/StrictMode( 9156): at java.lang.Class.getPublicMethodRecursive(Class.java:2075)
+06-02 16:38:25.016 D/StrictMode( 9156): at java.lang.Class.getMethod(Class.java:2063)
+06-02 16:38:25.016 D/StrictMode( 9156): at java.lang.Class.getMethod(Class.java:1690)
+06-02 16:38:25.016 D/StrictMode( 9156): at androidx.core.os.TraceCompat.<clinit>(TraceCompat.java:56)
+06-02 16:38:25.016 D/StrictMode( 9156): at androidx.core.os.TraceCompat.beginSection(TraceCompat.java:100)
+06-02 16:38:25.016 D/StrictMode( 9156): at androidx.recyclerview.widget.RecyclerView.onLayout(RecyclerView.java:4403)
+06-02 16:38:25.016 D/StrictMode( 9156): at android.view.View.layout(View.java:20672)
+06-02 16:38:25.016 D/StrictMode( 9156): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:25.016 D/StrictMode( 9156): at com.google.android.material.appbar.HeaderScrollingViewBehavior.layoutChild(HeaderScrollingViewBehavior.java:148)
+06-02 16:38:25.016 D/StrictMode( 9156): at com.google.android.material.appbar.ViewOffsetBehavior.onLayoutChild(ViewOffsetBehavior.java:43)
+06-02 16:38:25.016 D/StrictMode( 9156): at com.google.android.material.appbar.AppBarLayout$ScrollingViewBehavior.onLayoutChild(AppBarLayout.java:1892)
+06-02 16:38:25.016 D/StrictMode( 9156): at androidx.coordinatorlayout.widget.CoordinatorLayout.onLayout(CoordinatorLayout.java:918)
+06-02 16:38:25.016 D/StrictMode( 9156): at android.view.View.layout(View.java:20672)
+06-02 16:38:25.016 D/StrictMode( 9156): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:25.016 D/StrictMode( 9156): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:25.016 D/StrictMode( 9156): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:25.016 D/StrictMode( 9156): at android.view.View.layout(View.java:20672)
+06-02 16:38:25.016 D/StrictMode( 9156): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:25.016 D/StrictMode( 9156): at android.widget.LinearLayout.setChildFrame(LinearLayout.java:1812)
+06-02 16:38:25.016 D/StrictMode( 9156): at android.widget.LinearLayout.layoutVertical(LinearLayout.java:1656)
+06-02 16:38:25.016 D/StrictMode( 9156): at android.widget.LinearLayout.onLayout(LinearLayout.java:1565)
+06-02 16:38:25.016 D/StrictMode( 9156): at android.view.View.layout(View.java:20672)
+06-02 16:38:25.016 D/StrictMode( 9156): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:25.016 D/StrictMode( 9156): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:25.016 D/StrictMode( 9156): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:25.016 D/StrictMode( 9156): at android.view.View.layout(View.java:20672)
+06-02 16:38:25.016 D/StrictMode( 9156): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:25.016 D/StrictMode( 9156): at android.widget.LinearLayout.setChildFrame(LinearLayout.java:1812)
+06-02 16:38:25.016 D/StrictMode( 9156): at android.widget.LinearLayout.layoutVertical(LinearLayout.java:1656)
+06-02 16:38:25.016 D/StrictMode( 9156): at android.widget.LinearLayout.onLayout(LinearLayout.java:1565)
+06-02 16:38:25.016 D/StrictMode( 9156): at android.view.View.layout(View.java:20672)
+06-02 16:38:25.016 D/StrictMode( 9156): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:25.016 D/StrictMode( 9156): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:25.016 D/StrictMode( 9156): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:25.016 D/StrictMode( 9156): at android.view.View.layout(View.java:20672)
+06-02 16:38:25.016 D/StrictMode( 9156): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:25.016 D/StrictMode( 9156): at android.widget.LinearLayout.setChildFrame(LinearLayout.java:1812)
+06-02 16:38:25.016 D/StrictMode( 9156): at android.widget.LinearLayout.layoutVertical(LinearLayout.java:1656)
+06-02 16:38:25.016 D/StrictMode( 9156): at android.widget.LinearLayout.onLayout(LinearLayout.java:1565)
+06-02 16:38:25.016 D/StrictMode( 9156): at android.view.View.layout(View.java:20672)
+06-02 16:38:25.016 D/StrictMode( 9156): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:25.016 D/StrictMode( 9156): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:25.016 D/StrictMode( 9156): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:25.016 D/StrictMode( 9156): at com.android.internal.policy.DecorView.onLayout(DecorView.java:753)
+06-02 16:38:25.016 D/StrictMode( 9156): at android.view.View.layout(View.java:20672)
+06-02 16:38:25.016 D/StrictMode( 9156): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:25.016 D/StrictMode( 9156): at android.view.ViewRootImpl.performLayout(ViewRootImpl.java:2792)
+06-02 16:38:25.016 D/StrictMode( 9156): at android.view.ViewRootImpl.performTraversals(ViewRootImpl.java:2319)
+06-02 16:38:25.016 D/StrictMode( 9156): at android.view.ViewRootImpl.doTraversal(ViewRootImpl.java:1460)
+06-02 16:38:25.016 D/StrictMode( 9156): at android.view.ViewRootImpl$TraversalRunnable.run(ViewRootImpl.java:7183)
+06-02 16:38:25.016 D/StrictMode( 9156): at android.view.Choreographer$CallbackRecord.run(Choreographer.java:949)
+06-02 16:38:25.016 D/StrictMode( 9156): at android.view.Choreographer.doCallbacks(Choreographer.java:761)
+06-02 16:38:25.016 D/StrictMode( 9156): at android.view.Choreographer.doFrame(Choreographer.java:696)
+06-02 16:38:25.016 D/StrictMode( 9156): at android.view.Choreographer$FrameDisplayEventReceiver.run(Choreographer.java:935)
+06-02 16:38:25.016 D/StrictMode( 9156): at android.os.Handler.handleCallback(Handler.java:873)
+06-02 16:38:25.016 D/StrictMode( 9156): at android.os.Handler.dispatchMessage(Handler.java:99)
+06-02 16:38:25.016 D/StrictMode( 9156): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:25.016 D/StrictMode( 9156): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:25.016 D/StrictMode( 9156): at java.lang.re
+06-02 16:38:25.028 D/GeckoNetworkManager( 9156): Incoming event receivedUpdate for state OnWithListeners -> OnWithListeners
+06-02 16:38:25.029 D/GeckoNetworkManager( 9156): New network state: UP, WIFI, WIFI
+06-02 16:38:25.051 D/EGL_emulation( 9156): eglMakeCurrent: 0xe33868c0: ver 3 0 (tinfo 0xb35fe0f0)
+06-02 16:38:25.069 D/GeckoViewStartup( 9156): onEvent GeckoView:SetLocale
+06-02 16:38:25.070 D/GeckoViewStartup( 9156): onEvent GeckoView:ResetUserPrefs
+06-02 16:38:25.080 I/ActivityManager( 1869): Displayed org.mozilla.fenix.debug/.App: +1s708ms
+06-02 16:38:25.082 I/GoogleInputMethod( 1996): onFinishInput() : Dummy InputConnection bound
+06-02 16:38:25.082 I/GoogleInputMethod( 1996): onStartInput() : Dummy InputConnection bound
+06-02 16:38:25.082 D/glean/PingUploadWorker( 9156): Processing persisted pings at /data/user/0/org.mozilla.fenix.debug/glean_data/pending_pings
+06-02 16:38:25.083 D/glean/PingUploadWorker( 9156): Processing ping: df517eba-b482-412e-a056-0d6679710e3c
+06-02 16:38:25.088 E/adbd ( 4408): failed to connect to socket 'tcp:2829': Connection refused
+06-02 16:38:25.091 D/GeckoViewRemoteDebugger( 9156): onInit
+06-02 16:38:25.092 D/GeckoViewConsole( 9156): enabled = false
+06-02 16:38:25.092 D/glean/ConceptFetchHttpUploader( 9156): Submitting ping to: https://incoming.telemetry.mozilla.org/submit/org-mozilla-fenix-debug/baseline/1/df517eba-b482-412e-a056-0d6679710e3c
+06-02 16:38:25.134 D/GeckoViewStartup( 9156): onEvent GeckoView:SetLocale
+06-02 16:38:25.135 D/GeckoViewStartup( 9156): onEvent GeckoView:SetDefaultPrefs
+06-02 16:38:25.138 W/SurfaceFlinger( 1728): couldn't log to binary event log: overflow.
+06-02 16:38:25.149 D/GeckoViewStartup( 9156): onEvent GeckoView:SetDefaultPrefs
+06-02 16:38:25.153 D/StrictMode( 9156): StrictMode policy violation; ~duration=24 ms: android.os.strictmode.DiskReadViolation
+06-02 16:38:25.153 D/StrictMode( 9156): at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+06-02 16:38:25.153 D/StrictMode( 9156): at java.io.UnixFileSystem.checkAccess(UnixFileSystem.java:251)
+06-02 16:38:25.153 D/StrictMode( 9156): at java.io.File.exists(File.java:815)
+06-02 16:38:25.153 D/StrictMode( 9156): at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:605)
+06-02 16:38:25.153 D/StrictMode( 9156): at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:596)
+06-02 16:38:25.153 D/StrictMode( 9156): at android.app.ContextImpl.getFilesDir(ContextImpl.java:641)
+06-02 16:38:25.153 D/StrictMode( 9156): at android.content.ContextWrapper.getFilesDir(ContextWrapper.java:239)
+06-02 16:38:25.153 D/StrictMode( 9156): at mozilla.components.feature.tab.collections.TabCollectionStorage.<init>(TabCollectionStorage.kt:29)
+06-02 16:38:25.153 D/StrictMode( 9156): at org.mozilla.fenix.components.TabCollectionStorage$collectionStorage$2.invoke(TabCollectionStorage.kt:52)
+06-02 16:38:25.153 D/StrictMode( 9156): at org.mozilla.fenix.components.TabCollectionStorage$collectionStorage$2.invoke(TabCollectionStorage.kt:23)
+06-02 16:38:25.153 D/StrictMode( 9156): at kotlin.SynchronizedLazyImpl.getValue(LazyJVM.kt:74)
+06-02 16:38:25.153 D/StrictMode( 9156): at org.mozilla.fenix.components.TabCollectionStorage.getCollectionStorage(Unknown Source:7)
+06-02 16:38:25.153 D/StrictMode( 9156): at org.mozilla.fenix.components.TabCollectionStorage.getCollections(TabCollectionStorage.kt:70)
+06-02 16:38:25.153 D/StrictMode( 9156): at org.mozilla.fenix.components.TabCollectionStorage.getCollections$default(TabCollectionStorage.kt:69)
+06-02 16:38:25.153 D/StrictMode( 9156): at org.mozilla.fenix.home.HomeFragment.subscribeToTabCollections(HomeFragment.kt:750)
+06-02 16:38:25.153 D/StrictMode( 9156): at org.mozilla.fenix.home.HomeFragment.onStart(HomeFragment.kt:404)
+06-02 16:38:25.153 D/StrictMode( 9156): at androidx.fragment.app.Fragment.performStart(Fragment.java:2730)
+06-02 16:38:25.153 D/StrictMode( 9156): at androidx.fragment.app.FragmentStateManager.start(FragmentStateManager.java:365)
+06-02 16:38:25.153 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1194)
+06-02 16:38:25.153 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1356)
+06-02 16:38:25.153 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.moveFragmentToExpectedState(FragmentManager.java:1434)
+06-02 16:38:25.153 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1497)
+06-02 16:38:25.153 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.completeExecute(FragmentManager.java:2125)
+06-02 16:38:25.153 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager$StartEnterTransitionListener.completeTransaction(FragmentManager.java:3022)
+06-02 16:38:25.153 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.executePostponedTransaction(FragmentManager.java:1895)
+06-02 16:38:25.153 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.ensureExecReady(FragmentManager.java:1803)
+06-02 16:38:25.153 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager.execPendingActions(FragmentManager.java:1843)
+06-02 16:38:25.153 D/StrictMode( 9156): at androidx.fragment.app.FragmentManager$4.run(FragmentManager.java:413)
+06-02 16:38:25.153 D/StrictMode( 9156): at android.os.Handler.handleCallback(Handler.java:873)
+06-02 16:38:25.153 D/StrictMode( 9156): at android.os.Handler.dispatchMessage(Handler.java:99)
+06-02 16:38:25.153 D/StrictMode( 9156): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:25.153 D/StrictMode( 9156): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:25.153 D/StrictMode( 9156): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:25.153 D/StrictMode( 9156): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:25.153 D/StrictMode( 9156): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:25.159 D/GeckoViewStartup( 9156): onEvent GeckoView:SetDefaultPrefs
+06-02 16:38:25.184 I/chatty ( 9156): uid=10099(org.mozilla.fenix.debug) identical 1 line
+06-02 16:38:25.187 D/GeckoViewStartup( 9156): onEvent GeckoView:SetDefaultPrefs
+06-02 16:38:25.190 E/adbd ( 4408): failed to connect to socket 'tcp:2829': Connection refused
+06-02 16:38:25.209 D/GeckoViewStartup( 9156): onEvent GeckoView:SetDefaultPrefs
+06-02 16:38:25.230 W/ctxmgr ( 2473): [AclManager]No 2 for (accnt=account#-517948760#, com.google.android.gms(10008):IndoorOutdoorProducer, vrsn=13280000, 0, 3pPkg = null , 3pMdlId = null , pid = 2473). Was: 3 for 57, account#-517948760#
+06-02 16:38:25.244 D/gralloc_ranchu( 1869): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:25.245 D/ ( 1869): HostConnection::get() New Host Connection established 0xcb8a3340, tid 1930
+06-02 16:38:25.246 D/gralloc_ranchu( 1869): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:25.247 W/SurfaceFlinger( 1728): Attempting to set client state on removed layer: Splash Screen org.mozilla.fenix.debug#0
+06-02 16:38:25.247 W/SurfaceFlinger( 1728): Attempting to destroy on removed layer: Splash Screen org.mozilla.fenix.debug#0
+06-02 16:38:25.250 I/PBSessionCacheImpl( 2402): Deleted sessionId[359508686711] from persistence.
+06-02 16:38:25.253 W/SearchService( 2402): Abort, client detached.
+06-02 16:38:25.259 D/gralloc_ranchu( 1897): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:25.260 D/ ( 1897): HostConnection::get() New Host Connection established 0xed7c3080, tid 1897
+06-02 16:38:25.260 D/gralloc_ranchu( 1897): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:25.265 D/ ( 1897): HostConnection::get() New Host Connection established 0xed7c3080, tid 1897
+06-02 16:38:25.293 E/adbd ( 4408): failed to connect to socket 'tcp:2829': Connection refused
+06-02 16:38:25.302 D/GeckoViewStartup( 9156): onEvent GeckoView:SetDefaultPrefs
+06-02 16:38:25.318 I/chatty ( 9156): uid=10099(org.mozilla.fenix.debug) identical 3 lines
+06-02 16:38:25.319 D/GeckoViewStartup( 9156): onEvent GeckoView:SetDefaultPrefs
+06-02 16:38:25.324 D/GeckoViewConsole( 9156): onEvent GeckoView:RegisterWebExtension {"allowContentMessaging":true,"id":"webcompat@mozilla.com","locationUri":"resource://android/assets/extensions/webcompat/"}
+06-02 16:38:25.330 D/GeckoViewConsole( 9156): onEvent GeckoView:WebExtension:List null
+06-02 16:38:25.331 D/GeckoViewConsole( 9156): onEvent GeckoView:RegisterWebExtension {"allowContentMessaging":true,"id":"mozacBrowserIcons","locationUri":"resource://android/assets/extensions/browser-icons/"}
+06-02 16:38:25.332 D/GeckoViewConsole( 9156): onEvent GeckoView:RegisterWebExtension {"allowContentMessaging":true,"id":"mozacBrowserAds","locationUri":"resource://android/assets/extensions/ads/"}
+06-02 16:38:25.333 D/GeckoViewConsole( 9156): onEvent GeckoView:RegisterWebExtension {"allowContentMessaging":true,"id":"BrowserCookiesExtension","locationUri":"resource://android/assets/extensions/cookies/"}
+06-02 16:38:25.364 E/BatteryExternalStatsWorker( 1869): no controller energy info supplied for wifi
+06-02 16:38:25.396 E/adbd ( 4408): failed to connect to socket 'tcp:2829': Connection refused
+06-02 16:38:25.415 I/ActivityManager( 1869): Fully drawn org.mozilla.fenix.debug/.App: +2s48ms
+06-02 16:38:25.456 I/DefaultSupportedAddonsChecker( 9156): Register check for new supported add-ons
+06-02 16:38:25.478 I/SupportedAddonsWorker( 9156): Trying to check for new supported add-ons
+06-02 16:38:25.501 E/adbd ( 4408): failed to connect to socket 'tcp:2829': Connection refused
+06-02 16:38:25.554 D/App ( 9156): Installed browser-icons extension
+06-02 16:38:25.596 D/ ( 9156): HostConnection::get() New Host Connection established 0xcb89ecc0, tid 9185
+06-02 16:38:25.597 E/EGL_emulation( 9156): tid 9185: eglBindAPI(1259): error 0x300c (EGL_BAD_PARAMETER)
+06-02 16:38:25.599 D/EGL_emulation( 9156): eglCreateContext: 0xae25e060: maj 3 min 0 rcv 3
+06-02 16:38:25.602 D/EGL_emulation( 9156): eglMakeCurrent: 0xae25e060: ver 3 0 (tinfo 0xaf7eb740)
+06-02 16:38:25.604 E/adbd ( 4408): failed to connect to socket 'tcp:2829': Connection refused
+06-02 16:38:25.656 E/GeckoConsole( 9156): [JavaScript Error: "NetworkError when attempting to fetch resource."]
+06-02 16:38:25.656 E/GeckoConsole( 9156): get@resource://services-settings/RemoteSettingsClient.jsm:350:12
+06-02 16:38:25.710 E/adbd ( 4408): failed to connect to socket 'tcp:2829': Connection refused
+06-02 16:38:25.765 D/glean/ConceptFetchHttpUploader( 9156): Ping successfully sent (200)
+06-02 16:38:25.766 D/glean/PingUploadWorker( 9156): df517eba-b482-412e-a056-0d6679710e3c was deleted: true
+06-02 16:38:25.767 I/WM-WorkerWrapper( 9156): Worker result SUCCESS for Work [ id=0ef26001-ff6c-4e5f-aa78-4728cca0a169, tags={ mozilla.telemetry.glean.scheduler.PingUploadWorker, mozac_service_glean_ping_upload_worker } ]
+06-02 16:38:25.786 W/GeckoConsole( 9156): [JavaScript Warning: "Security wrapper denied access to property "ONE_QUARTER" on privileged Javascript object. Support for exposing privileged objects to untrusted content via __exposedProps__ has been removed - use WebIDL bindings or Components.utils.cloneInto instead. Note that only the first denied property access from a given global object will be reported." {file: "moz-extension://0b97d1ec-4efa-4cee-bc30-34776dc45bb8/data/picture_in_picture_overrides.js" line: 26}]
+06-02 16:38:25.805 D/BrowserIcons( 9156): Loaded icon (source = DOWNLOAD): https://www.wikipedia.org/
+06-02 16:38:25.811 D/BrowserIcons( 9156): Loaded icon (source = DOWNLOAD): https://www.youtube.com/
+06-02 16:38:25.814 E/adbd ( 4408): failed to connect to socket 'tcp:2829': Connection refused
+06-02 16:38:25.856 D/mozac-webcompat( 9156): Installed WebCompat webextension: webcompat@mozilla.com
+06-02 16:38:25.918 E/adbd ( 4408): failed to connect to socket 'tcp:2829': Connection refused
+06-02 16:38:25.935 I/Gecko ( 9156): 1591130305935 Marionette INFO Listening on port 2829
+06-02 16:38:25.949 E/GeckoConsole( 9156): [JavaScript Error: "can't access property "startupData", state is undefined" {file: "resource://gre/modules/addons/XPIProvider.jsm" line: 3079}]
+06-02 16:38:25.949 E/GeckoConsole( 9156): setStartupData@resource://gre/modules/addons/XPIProvider.jsm:3079:5
+06-02 16:38:25.949 E/GeckoConsole( 9156): saveStartupData@resource://gre/modules/Extension.jsm:2035:17
+06-02 16:38:25.949 E/GeckoConsole( 9156): _writePersistentListeners@resource://gre/modules/ExtensionCommon.jsm:2271:15
+06-02 16:38:25.949 E/GeckoConsole( 9156): savePersistentListener@resource://gre/modules/ExtensionCommon.jsm:2362:18
+06-02 16:38:25.949 E/GeckoConsole( 9156): addListener@resource://gre/modules/ExtensionCommon.jsm:2495:20
+06-02 16:38:25.949 E/GeckoConsole( 9156): addListener@resource://gre/modules/ExtensionCommon.jsm:2550:38
+06-02 16:38:25.949 E/GeckoConsole( 9156): recvAddListener@resource://gre/modules/ExtensionParent.jsm:1079:13
+06-02 16:38:25.977 I/chatty ( 9156): uid=10099(org.mozilla.fenix.debug) identical 24 lines
+06-02 16:38:26.006 E/GeckoConsole( 9156): [JavaScript Error: "can't access property "startupData", state is undefined" {file: "resource://gre/modules/addons/XPIProvider.jsm" line: 3079}]
+06-02 16:38:26.006 E/GeckoConsole( 9156): setStartupData@resource://gre/modules/addons/XPIProvider.jsm:3079:5
+06-02 16:38:26.006 E/GeckoConsole( 9156): saveStartupData@resource://gre/modules/Extension.jsm:2035:17
+06-02 16:38:26.006 E/GeckoConsole( 9156): _writePersistentListeners@resource://gre/modules/ExtensionCommon.jsm:2271:15
+06-02 16:38:26.006 E/GeckoConsole( 9156): savePersistentListener@resource://gre/modules/ExtensionCommon.jsm:2362:18
+06-02 16:38:26.006 E/GeckoConsole( 9156): addListener@resource://gre/modules/ExtensionCommon.jsm:2495:20
+06-02 16:38:26.006 E/GeckoConsole( 9156): addListener@resource://gre/modules/ExtensionCommon.jsm:2550:38
+06-02 16:38:26.006 E/GeckoConsole( 9156): recvAddListener@resource://gre/modules/ExtensionParent.jsm:1079:13
+06-02 16:38:26.812 I/WM-WorkerWrapper( 9156): Worker result SUCCESS for Work [ id=460bd936-fd2a-47c2-be1a-4fee8bde7995, tags={ mozilla.components.feature.addons.migration.DefaultSupportedAddonsChecker.periodicWork, mozilla.components.feature.addons.migration.SupportedAddonsWorker } ]
+06-02 16:38:28.256 I/EventLogSendingHelper( 2402): Sending log events.
+06-02 16:38:30.021 I/FenixApplication( 9156): Kicking-off account manager...
+06-02 16:38:30.022 I/FenixApplication( 9156): Running post-visual completeness tasks...
+06-02 16:38:30.022 I/FenixApplication( 9156): Storage initialization...
+06-02 16:38:30.024 I/PlacesHistoryStorage( 9156): Warming up places storage...
+06-02 16:38:30.027 D/RustNativeSupport( 9156): findMegazordLibraryName(places, 0.59.0
+06-02 16:38:30.027 D/RustNativeSupport( 9156): lib in use: none
+06-02 16:38:30.027 D/RustNativeSupport( 9156): lib configured: megazord
+06-02 16:38:30.027 D/RustNativeSupport( 9156): lib version configured: 0.59.0
+06-02 16:38:30.027 D/RustNativeSupport( 9156): settled on megazord
+06-02 16:38:30.028 I/FirefoxAccountStateMachine( 9156): Enabling/updating sync with a new SyncConfig: SyncConfig(supportedEngines=[mozilla.components.service.fxa.SyncEngine$History@623799a, mozilla.components.service.fxa.SyncEngine$Bookmarks@343e2cb, mozilla.components.service.fxa.SyncEngine$Passwords@7039aa8], syncPeriodInMinutes=240)
+06-02 16:38:30.029 D/places_ffi( 9156): places_api_new
+06-02 16:38:30.029 I/BgSyncManager( 9156): Periodic syncing enabled at a 240 interval
+06-02 16:38:30.030 I/FirefoxAccountStateMachine( 9156): Sync is enabled
+06-02 16:38:30.032 I/FenixApplication( 9156): 'Kicking-off account manager' took 10 ms
+06-02 16:38:30.033 I/FirefoxAccountStateMachine( 9156): Processing event Init for state Start. Next state is Start
+06-02 16:38:30.060 I/keystore( 1734): del USRPKEY_org.mozilla.fenix.debug 10099
+06-02 16:38:30.061 I/keystore( 1734): del USRCERT_org.mozilla.fenix.debug 10099
+06-02 16:38:30.061 I/keystore( 1734): del CACERT_org.mozilla.fenix.debug 10099
+06-02 16:38:30.085 D/places::db::schema( 9156): Creating schema
+06-02 16:38:30.085 I/FirefoxAccountStateMachine( 9156): Ran 'Init' side-effects for state Start, got successive event AccountNotFound
+06-02 16:38:30.085 I/FirefoxAccountStateMachine( 9156): Processing event AccountNotFound for state Start. Next state is NotAuthenticated
+06-02 16:38:30.089 D/RustNativeSupport( 9156): findMegazordLibraryName(fxaclient, 0.59.0
+06-02 16:38:30.089 D/RustNativeSupport( 9156): lib in use: none
+06-02 16:38:30.089 D/RustNativeSupport( 9156): lib configured: megazord
+06-02 16:38:30.089 D/RustNativeSupport( 9156): lib version configured: 0.59.0
+06-02 16:38:30.089 D/RustNativeSupport( 9156): settled on megazord
+06-02 16:38:30.090 D/fxaclient_ffi( 9156): fxa_new
+06-02 16:38:30.092 W/FirefoxAccountStateMachine( 9156): Got invalid event Init for state NotAuthenticated.
+06-02 16:38:30.112 D/sql_support::conn_ext( 9156): Transaction commited after 27.875555ms
+06-02 16:38:30.112 D/places_ffi( 9156): places_connection_new
+06-02 16:38:30.116 D/places_ffi( 9156): places_connection_new
+06-02 16:38:30.117 I/PlacesHistoryStorage( 9156): 'Warming up places storage' took 92 ms
+06-02 16:38:30.118 I/PlacesBookmarksStorage( 9156): Warming up places storage...
+06-02 16:38:30.118 D/places_ffi( 9156): places_connection_new
+06-02 16:38:30.152 I/PlacesBookmarksStorage( 9156): 'Warming up places storage' took 33 ms
+06-02 16:38:30.175 I/keystore( 1734): 1 0
+06-02 16:38:30.182 I/SyncableLoginsStorage( 9156): Warming up storage...
+06-02 16:38:30.190 D/RustNativeSupport( 9156): findMegazordLibraryName(logins, 0.59.0
+06-02 16:38:30.190 D/RustNativeSupport( 9156): lib in use: none
+06-02 16:38:30.190 D/RustNativeSupport( 9156): lib configured: megazord
+06-02 16:38:30.190 D/RustNativeSupport( 9156): lib version configured: 0.59.0
+06-02 16:38:30.190 D/RustNativeSupport( 9156): settled on megazord
+06-02 16:38:30.192 D/logins_ffi( 9156): sync15_passwords_state_new
+06-02 16:38:30.195 D/logins::schema( 9156): Creating schema
+06-02 16:38:30.242 W/ctxmgr ( 2473): [AclManager]No 2 for (accnt=account#-517948760#, com.google.android.gms(10008):IndoorOutdoorProducer, vrsn=13280000, 0, 3pPkg = null , 3pMdlId = null , pid = 2473). Was: 3 for 57, account#-517948760#
+06-02 16:38:30.288 I/SyncableLoginsStorage( 9156): 'Warming up storage' took 105 ms
+06-02 16:38:30.288 I/FenixApplication( 9156): 'Storage initialization' took 265 ms
+06-02 16:38:30.301 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:33.287 I/EventLogSendingHelper( 2402): Sending log events.
+06-02 16:38:35.144 E/memtrack( 1869): Couldn't load memtrack module
+06-02 16:38:35.144 W/android.os.Debug( 1869): failed to get memory consumption info: -1
+06-02 16:38:35.160 E/memtrack( 1869): Couldn't load memtrack module
+06-02 16:38:35.160 W/android.os.Debug( 1869): failed to get memory consumption info: -1
+06-02 16:38:35.258 W/ctxmgr ( 2473): [AclManager]No 2 for (accnt=account#-517948760#, com.google.android.gms(10008):IndoorOutdoorProducer, vrsn=13280000, 0, 3pPkg = null , 3pMdlId = null , pid = 2473). Was: 3 for 57, account#-517948760#
+06-02 16:38:40.266 W/ctxmgr ( 2473): [AclManager]No 2 for (accnt=account#-517948760#, com.google.android.gms(10008):IndoorOutdoorProducer, vrsn=13280000, 0, 3pPkg = null , 3pMdlId = null , pid = 2473). Was: 3 for 57, account#-517948760#
+06-02 16:38:42.115 E/netmgr ( 1826): Failed to open QEMU pipe 'qemud:network': Invalid argument
+06-02 16:38:42.116 E/netmgr ( 1826): WifiForwarder unable to open QEMU pipe: Invalid argument
+06-02 16:38:42.976 I/ActivityManager( 1869): Force stopping org.mozilla.fenix.debug appid=10099 user=0: clear data
+06-02 16:38:42.976 I/ActivityManager( 1869): Killing 9156:org.mozilla.fenix.debug/u0a99 (adj 0): stop org.mozilla.fenix.debug
+06-02 16:38:42.978 W/libprocessgroup( 1869): kill(-9156, 9) failed: No such process
+06-02 16:38:42.982 W/ActivityManager( 1869): Force removing ActivityRecord{39437d1 u0 org.mozilla.fenix.debug/.App t389}: app died, no saved state
+06-02 16:38:42.984 I/ServiceChildProcess( 9201): Service has been unbound. Stopping.
+06-02 16:38:42.996 I/ActivityManager( 1869): Killing 9201:org.mozilla.fenix.debug:tab0/u0a99 (adj 0): stop org.mozilla.fenix.debug
+06-02 16:38:43.001 D/ZenLog ( 1869): config: removeAutomaticZenRules,ZenModeConfig[user=0,allowAlarms=true,allowMedia=true,allowSystem=false,allowReminders=false,allowEvents=false,allowCalls=true,allowRepeatCallers=true,allowMessages=false,allowCallsFrom=stars,allowMessagesFrom=contacts,suppressedVisualEffects=511,areChannelsBypassingDnd=false,automaticRules={EVENTS_DEFAULT_RULE=ZenRule[enabled=false,snoozing=false,name=Event,zenMode=ZEN_MODE_IMPORTANT_INTERRUPTIONS,conditionId=condition://android/event?userId=-10000&calendar=&reply=1,condition=Condition[id=condition://android/event?userId=-10000&calendar=&reply=1,summary=...,line1=...,line2=...,icon=0,state=STATE_FALSE,flags=2],component=ComponentInfo{android/com.android.server.notification.EventConditionProvider},id=EVENTS_DEFAULT_RULE,creationTime=1587308662810,enabler=null], EVERY_NIGHT_DEFAULT_RULE=ZenRule[enabled=false,snoozing=false,name=Sleeping,zenMode=ZEN_MODE_IMPORTANT_INTERRUPTIONS,conditionId=condition://android/schedule?days=1.2.3.4.5.6.7&start=22.0&end=7.0&exitAtAlarm=true,condition=Condition[id=condition://android/schedule?days=1.2.3.4.5.6.7&start=22.0&end=7.0&exitAtAlarm=true,summary=...,line1=...,line2=...,icon=0,state=STATE_FALSE,flags=2],component=ComponentInfo{android/com.android.server.notification.ScheduleConditionProvider},id=EVERY_NIGHT_DEFAULT_RULE,creationTime=1587308662810,enabler=null]},manualRule=null],Diff[]
+06-02 16:38:43.001 I/ConditionProviders( 1869): Disallowing condition provider org.mozilla.fenix.debug
+06-02 16:38:43.001 D/ZenLog ( 1869): set_zen_mode: off,removeAutomaticZenRules
+06-02 16:38:43.002 E/memtrack( 1869): Couldn't load memtrack module
+06-02 16:38:43.002 W/android.os.Debug( 1869): failed to get memory consumption info: -1
+06-02 16:38:43.006 I/ActivityManager( 1869): Force stopping org.mozilla.fenix.debug appid=10099 user=-1: clearApplicationUserData
+06-02 16:38:43.009 D/gralloc_ranchu( 1897): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:43.010 D/ ( 1897): HostConnection::get() New Host Connection established 0xed7c3080, tid 1897
+06-02 16:38:43.010 D/gralloc_ranchu( 1897): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:43.010 D/ ( 1897): HostConnection::get() New Host Connection established 0xed7c3080, tid 1897
+06-02 16:38:43.010 D/gralloc_ranchu( 1897): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:43.010 D/ ( 1897): HostConnection::get() New Host Connection established 0xed7c3080, tid 1897
+06-02 16:38:43.010 D/gralloc_ranchu( 1897): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:43.011 D/ ( 1897): HostConnection::get() New Host Connection established 0xed7c3080, tid 1897
+06-02 16:38:43.011 D/gralloc_ranchu( 1897): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:43.011 D/ ( 1897): HostConnection::get() New Host Connection established 0xed7c3080, tid 1897
+06-02 16:38:43.011 D/gralloc_ranchu( 1897): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:43.016 D/ ( 1897): HostConnection::get() New Host Connection established 0xed7c3080, tid 1897
+06-02 16:38:43.023 W/libprocessgroup( 1869): kill(-9156, 9) failed: No such process
+06-02 16:38:43.025 D/gralloc_ranchu( 1619): gralloc_alloc: Creating ashmem region of size 9334784
+06-02 16:38:43.041 D/gralloc_ranchu( 1619): gralloc_alloc: Creating ashmem region of size 9334784
+06-02 16:38:43.045 I/keystore( 1734): clear_uid 10099
+06-02 16:38:43.049 D/gralloc_ranchu( 1619): gralloc_alloc: Creating ashmem region of size 9334784
+06-02 16:38:43.057 D/SurfaceFlinger( 1728): duplicate layer name: changing com.google.android.apps.nexuslauncher/com.google.android.apps.nexuslauncher.NexusLauncherActivity to com.google.android.apps.nexuslauncher/com.google.android.apps.nexuslauncher.NexusLauncherActivity#1
+06-02 16:38:43.062 D/gralloc_ranchu( 1619): gralloc_alloc: Creating ashmem region of size 9334784
+06-02 16:38:43.069 D/ ( 1728): HostConnection::get() New Host Connection established 0xe9a181c0, tid 1752
+06-02 16:38:43.069 D/gralloc_ranchu( 1619): gralloc_alloc: Creating ashmem region of size 9334784
+06-02 16:38:43.070 W/libprocessgroup( 1869): kill(-9156, 9) failed: No such process
+06-02 16:38:43.071 E/system_server( 1869): No package ID 7f found for ID 0x7f0801a6.
+06-02 16:38:43.071 E/system_server( 1869): No package ID 7f found for ID 0x7f13011d.
+06-02 16:38:43.071 E/system_server( 1869): No package ID 7f found for ID 0x7f13011d.
+06-02 16:38:43.071 E/system_server( 1869): No package ID 7f found for ID 0x7f0801a4.
+06-02 16:38:43.071 E/system_server( 1869): No package ID 7f found for ID 0x7f13011c.
+06-02 16:38:43.071 E/system_server( 1869): No package ID 7f found for ID 0x7f13011c.
+06-02 16:38:43.073 I/GeofencerStateMachine( 2473): removeGeofences: removeRequest=RemoveGeofencingRequest[REMOVE_ALL packageName=org.mozilla.fenix.debug]
+06-02 16:38:43.079 D/gralloc_ranchu( 1619): gralloc_alloc: Creating ashmem region of size 9334784
+06-02 16:38:43.081 I/Zygote ( 1729): Process 9201 exited due to signal (9)
+06-02 16:38:43.087 D/CarrierSvcBindHelper( 2121): No carrier app for: 0
+06-02 16:38:43.096 W/InputMethodManagerService( 1869): Got RemoteException sending setActive(false) notification to pid 9156 uid 10099
+06-02 16:38:43.101 D/EGL_emulation( 2488): eglMakeCurrent: 0xe3385ae0: ver 3 0 (tinfo 0xe33838f0)
+06-02 16:38:43.101 I/GoogleInputMethod( 1996): onFinishInput() : Dummy InputConnection bound
+06-02 16:38:43.102 I/GoogleInputMethod( 1996): onStartInput() : Dummy InputConnection bound
+06-02 16:38:43.103 D/EGL_emulation( 2402): eglMakeCurrent: 0xe1911c80: ver 3 0 (tinfo 0xc8cbe260)
+06-02 16:38:43.104 W/InputDispatcher( 1869): channel '99fbb04 org.mozilla.fenix.debug/org.mozilla.fenix.debug.App (server)' ~ Consumer closed input channel or an error occurred. events=0x9
+06-02 16:38:43.104 E/InputDispatcher( 1869): channel '99fbb04 org.mozilla.fenix.debug/org.mozilla.fenix.debug.App (server)' ~ Channel is unrecoverably broken and will be disposed!
+06-02 16:38:43.110 D/ ( 1728): HostConnection::get() New Host Connection established 0xe7e991c0, tid 2107
+06-02 16:38:43.110 I/WindowManager( 1869): WIN DEATH: Window{99fbb04 u0 org.mozilla.fenix.debug/org.mozilla.fenix.debug.App}
+06-02 16:38:43.110 W/InputDispatcher( 1869): Attempted to unregister already unregistered input channel '99fbb04 org.mozilla.fenix.debug/org.mozilla.fenix.debug.App (server)'
+06-02 16:38:43.112 W/libprocessgroup( 1869): kill(-9156, 9) failed: No such process
+06-02 16:38:43.112 D/gralloc_ranchu( 1728): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:43.112 D/ ( 1728): HostConnection::get() New Host Connection established 0xe7e991c0, tid 2107
+06-02 16:38:43.113 D/gralloc_ranchu( 1728): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:43.113 D/ ( 1728): HostConnection::get() New Host Connection established 0xe90c9100, tid 2107
+06-02 16:38:43.113 W/ActivityManager( 1869): setHasOverlayUi called on unknown pid: 9156
+06-02 16:38:43.113 D/gralloc_ranchu( 1728): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:43.113 D/ ( 1728): HostConnection::get() New Host Connection established 0xe90c9100, tid 2107
+06-02 16:38:43.113 D/gralloc_ranchu( 1728): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:43.118 W/SurfaceFlinger( 1728): Attempting to destroy on removed layer: AppWindowToken{97cb537 token=Token{bc3a236 ActivityRecord{39437d1 u0 org.mozilla.fenix.debug/.App t389}}}#0
+06-02 16:38:43.118 W/SurfaceFlinger( 1728): Attempting to destroy on removed layer: Task=389#0
+06-02 16:38:43.120 I/Zygote ( 1729): Process 9156 exited due to signal (9)
+06-02 16:38:43.124 D/vold ( 1558): Remounting 10099 as mode read
+06-02 16:38:43.132 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:43.144 I/LocationSettingsChecker( 2660): Removing dialog suppression flag for package org.mozilla.fenix.debug
+06-02 16:38:43.145 W/SessionLifecycleManager( 2402): Handover failed. Creating new session controller.
+06-02 16:38:43.148 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:43.149 W/libprocessgroup( 1869): kill(-9156, 9) failed: No such process
+06-02 16:38:43.149 I/libprocessgroup( 1869): Successfully killed process cgroup uid 10099 pid 9156 in 170ms
+06-02 16:38:43.152 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:43.157 I/ActivityManager( 1869): Force stopping org.mozilla.fenix.debug appid=10099 user=0: from pid 9326
+06-02 16:38:43.160 D/CarrierSvcBindHelper( 2121): No carrier app for: 0
+06-02 16:38:43.160 W/libprocessgroup( 1869): kill(-9201, 9) failed: No such process
+06-02 16:38:43.160 I/libprocessgroup( 1869): Successfully killed process cgroup uid 10099 pid 9201 in 0ms
+06-02 16:38:43.168 I/Icing ( 2660): doRemovePackageData org.mozilla.fenix.debug
+06-02 16:38:43.169 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:43.177 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:43.179 I/ProvidersCache( 4535): Provider returned no roots. Possibly naughty: com.google.android.apps.docs.storage
+06-02 16:38:43.187 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:43.195 D/vold ( 1558): Remounting 10099 as mode write
+06-02 16:38:43.191 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:43.203 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:43.204 W/LocationOracle( 2402): No location history returned by ContextManager
+06-02 16:38:43.207 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:43.228 V/SettingsProvider( 1869): Notifying for 0: content://settings/global/debug_app
+06-02 16:38:43.239 I/chatty ( 2002): uid=10024(com.android.systemui) RenderThread identical 3 lines
+06-02 16:38:43.243 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:43.257 W/ctxmgr ( 2473): [AclManager]No 3 for (accnt=account#-517948760#, com.google.android.gms(10008):UserVelocityProducer, vrsn=13280022, 0, 3pPkg = null , 3pMdlId = null , pid = 2473). Was: 3 for 1, account#-517948760#
+06-02 16:38:43.269 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:43.274 I/MicroDetectionWorker( 2402): #startMicroDetector [speakerMode: 0]
+06-02 16:38:43.275 I/AudioController( 2402): Using mInputStreamFactoryBuilder
+06-02 16:38:43.275 I/AudioController( 2402): Created new AudioSource
+06-02 16:38:43.275 I/MicroDetectionWorker( 2402): onReady
+06-02 16:38:43.276 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:43.290 I/chatty ( 2002): uid=10024(com.android.systemui) RenderThread identical 2 lines
+06-02 16:38:43.293 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:43.303 I/MicroRecognitionRunner( 2402): Starting detection.
+06-02 16:38:43.307 I/MicrophoneInputStream( 2402): mic_starting SR : 16000 CC : 16 SO : 6
+06-02 16:38:43.308 E/ ( 1627): Request requires android.permission.RECORD_AUDIO
+06-02 16:38:43.309 E/AudioPolicyIntefaceImpl( 1627): getInputForAttr permission denied: recording not allowed for uid 10039 pid 2402
+06-02 16:38:43.309 E/AudioFlinger( 1627): createRecord() checkRecordThread_l failed
+06-02 16:38:43.309 E/IAudioFlinger( 2402): createRecord returned error -22
+06-02 16:38:43.309 E/AudioRecord( 2402): AudioFlinger could not create record track, status: -22
+06-02 16:38:43.309 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:43.309 E/AudioRecord-JNI( 2402): Error creating AudioRecord instance: initialization check failed with status -22.
+06-02 16:38:43.311 E/android.media.AudioRecord( 2402): Error code -20 when initializing native AudioRecord object.
+06-02 16:38:43.311 I/MicrophoneInputStream( 2402): mic_started SR : 16000 CC : 16 SO : 6
+06-02 16:38:43.312 E/ActivityThread( 2402): Failed to find provider info for com.google.android.apps.gsa.testing.ui.audio.recorded
+06-02 16:38:43.312 W/SpeechLevelGenerator( 2402): Really low audio levels detected. The audio input may have issues.
+06-02 16:38:43.312 I/MicroDetectionWorker( 2402): onReady
+06-02 16:38:43.314 I/ActivityManager( 1869): Force stopping org.mozilla.fenix.debug appid=10099 user=-1: set debug app
+06-02 16:38:43.315 V/SettingsProvider( 1869): Notifying for 0: content://settings/global/debug_app
+06-02 16:38:43.316 I/MicrophoneInputStream( 2402): mic_close SR : 16000 CC : 16 SO : 6
+06-02 16:38:43.317 I/MicroRecognitionRunner( 2402): Detection finished
+06-02 16:38:43.317 W/ErrorReporter( 2402): reportError [type: 211, code: 524300]: Error reading from input stream
+06-02 16:38:43.319 W/ErrorProcessor( 2402): onFatalError, processing error from engine(4)
+06-02 16:38:43.319 W/ErrorProcessor( 2402): com.google.android.apps.gsa.shared.speech.b.g: Error reading from input stream
+06-02 16:38:43.319 W/ErrorProcessor( 2402): at com.google.android.apps.gsa.staticplugins.microdetection.d.k.a(SourceFile:91)
+06-02 16:38:43.319 W/ErrorProcessor( 2402): at com.google.android.apps.gsa.staticplugins.microdetection.d.l.run(Unknown Source:14)
+06-02 16:38:43.319 W/ErrorProcessor( 2402): at com.google.android.libraries.gsa.runner.a.a.b(SourceFile:32)
+06-02 16:38:43.319 W/ErrorProcessor( 2402): at com.google.android.libraries.gsa.runner.a.c.call(Unknown Source:4)
+06-02 16:38:43.319 W/ErrorProcessor( 2402): at java.util.concurrent.FutureTask.run(FutureTask.java:266)
+06-02 16:38:43.319 W/ErrorProcessor( 2402): at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:458)
+06-02 16:38:43.319 W/ErrorProcessor( 2402): at java.util.concurrent.FutureTask.run(FutureTask.java:266)
+06-02 16:38:43.319 W/ErrorProcessor( 2402): at com.google.android.apps.gsa.shared.util.concurrent.b.g.run(Unknown Source:4)
+06-02 16:38:43.319 W/ErrorProcessor( 2402): at com.google.android.apps.gsa.shared.util.concurrent.b.aw.run(SourceFile:4)
+06-02 16:38:43.319 W/ErrorProcessor( 2402): at com.google.android.apps.gsa.shared.util.concurrent.b.aw.run(SourceFile:4)
+06-02 16:38:43.319 W/ErrorProcessor( 2402): at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1167)
+06-02 16:38:43.319 W/ErrorProcessor( 2402): at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:641)
+06-02 16:38:43.319 W/ErrorProcessor( 2402): at java.lang.Thread.run(Thread.java:764)
+06-02 16:38:43.319 W/ErrorProcessor( 2402): at com.google.android.apps.gsa.shared.util.concurrent.b.i.run(SourceFile:6)
+06-02 16:38:43.319 W/ErrorProcessor( 2402): Caused by: com.google.android.apps.gsa.shared.exception.GsaIOException: Error code: 393238 | Buffer overflow, no available space.
+06-02 16:38:43.319 W/ErrorProcessor( 2402): at com.google.android.apps.gsa.speech.audio.Tee.j(SourceFile:103)
+06-02 16:38:43.319 W/ErrorProcessor( 2402): at com.google.android.apps.gsa.speech.audio.au.read(SourceFile:2)
+06-02 16:38:43.319 W/ErrorProcessor( 2402): at java.io.InputStream.read(InputStream.java:101)
+06-02 16:38:43.319 W/ErrorProcessor( 2402): at com.google.android.apps.gsa.speech.audio.ao.run(SourceFile:17)
+06-02 16:38:43.319 W/ErrorProcessor( 2402): at com.google.android.apps.gsa.speech.audio.an.run(SourceFile:2)
+06-02 16:38:43.319 W/ErrorProcessor( 2402): at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:458)
+06-02 16:38:43.319 W/ErrorProcessor( 2402): ... 10 more
+06-02 16:38:43.319 I/MicroRecognitionRunner( 2402): Stopping hotword detection.
+06-02 16:38:43.320 I/AudioController( 2402): internalShutdown
+06-02 16:38:43.328 I/MicroDetector( 2402): Keeping mic open: false
+06-02 16:38:43.328 I/MicroDetectionWorker( 2402): #onError(false)
+06-02 16:38:43.328 I/DeviceStateChecker( 2402): DeviceStateChecker cancelled
+06-02 16:38:43.350 I/ActivityManager( 1869): START u0 {flg=0x10000000 cmp=org.mozilla.fenix.debug/.App (has extras)} from uid 0
+06-02 16:38:43.358 I/Places ( 2473): ?: PlacesBleScanner start() with priority 2
+06-02 16:38:43.364 I/Places ( 2473): ?: PlacesBleScanner start() with priority 2
+06-02 16:38:43.370 I/PlaceInferenceEngine( 2473): [anon] Changed inference mode: 1
+06-02 16:38:43.384 I/lla.fenix.debu( 9365): Not late-enabling -Xcheck:jni (already on)
+06-02 16:38:43.385 I/ActivityManager( 1869): Start proc 9365:org.mozilla.fenix.debug/u0a99 for activity org.mozilla.fenix.debug/.App
+06-02 16:38:43.388 D/gralloc_ranchu( 1619): gralloc_alloc: Creating ashmem region of size 9334784
+06-02 16:38:43.395 I/Places ( 2473): ?: PlacesBleScanner start() with priority 2
+06-02 16:38:43.400 D/ ( 1728): HostConnection::get() New Host Connection established 0xe90dda40, tid 2107
+06-02 16:38:43.401 D/ ( 1869): HostConnection::get() New Host Connection established 0xc1b060c0, tid 1930
+06-02 16:38:43.418 W/lla.fenix.debu( 9365): Unexpected CPU variant for X86 using defaults: x86
+06-02 16:38:43.429 I/Places ( 2473): ?: PlacesBleScanner start() with priority 2
+06-02 16:38:43.430 I/PlaceInferenceEngine( 2473): [anon] Changed inference mode: 1
+06-02 16:38:43.457 I/Places ( 2473): Converted 0 out of 1 WiFi scans
+06-02 16:38:43.467 I/PlaceInferenceEngine( 2473): No beacon scan available - ignoring candidates.
+06-02 16:38:43.472 W/ActivityThread( 9365): Application org.mozilla.fenix.debug can be debugged on port 8100...
+06-02 16:38:43.478 I/lla.fenix.debu( 9365): The ClassLoaderContext is a special shared library.
+06-02 16:38:43.488 W/ctxmgr ( 2473): [AclManager]No 2 for (accnt=account#-517948760#, com.google.android.gms(10008):PlacesProducer, vrsn=13280000, 0, 3pPkg = null , 3pMdlId = null , pid = 2473). Was: 3 for 18, account#-517948760#
+06-02 16:38:43.492 D/EGL_emulation( 2488): eglMakeCurrent: 0xe3385ae0: ver 3 0 (tinfo 0xe33838f0)
+06-02 16:38:43.492 D/EGL_emulation( 2402): eglMakeCurrent: 0xe1911c80: ver 3 0 (tinfo 0xc8cbe260)
+06-02 16:38:43.494 D/gralloc_ranchu( 1728): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:43.495 D/ ( 1728): HostConnection::get() New Host Connection established 0xe90dda40, tid 2107
+06-02 16:38:43.495 D/gralloc_ranchu( 1728): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:43.495 D/ ( 1728): HostConnection::get() New Host Connection established 0xe90dda40, tid 2107
+06-02 16:38:43.496 D/gralloc_ranchu( 1728): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:43.496 D/ ( 1728): HostConnection::get() New Host Connection established 0xe90dda40, tid 2107
+06-02 16:38:43.496 D/gralloc_ranchu( 1728): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:43.497 D/gralloc_ranchu( 1728): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:43.499 D/ ( 1728): HostConnection::get() New Host Connection established 0xe50f1400, tid 1952
+06-02 16:38:43.499 D/gralloc_ranchu( 1728): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:43.499 D/ ( 1728): HostConnection::get() New Host Connection established 0xe50f1400, tid 1952
+06-02 16:38:43.499 D/gralloc_ranchu( 1728): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:43.499 D/ ( 1728): HostConnection::get() New Host Connection established 0xe50f1400, tid 1952
+06-02 16:38:43.499 D/gralloc_ranchu( 1728): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:43.504 D/gralloc_ranchu( 1619): gralloc_alloc: Creating ashmem region of size 9334784
+06-02 16:38:43.508 D/gralloc_ranchu( 1897): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:43.509 D/ ( 1897): HostConnection::get() New Host Connection established 0xed7c3080, tid 1897
+06-02 16:38:43.509 D/gralloc_ranchu( 1897): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:43.510 D/ ( 1897): HostConnection::get() New Host Connection established 0xed7c3080, tid 1897
+06-02 16:38:43.510 D/gralloc_ranchu( 1897): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:43.510 D/ ( 1897): HostConnection::get() New Host Connection established 0xed7c3080, tid 1897
+06-02 16:38:43.511 D/gralloc_ranchu( 1897): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:43.513 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:43.515 D/ ( 1728): HostConnection::get() New Host Connection established 0xe50f1280, tid 1952
+06-02 16:38:43.517 D/ ( 1897): HostConnection::get() New Host Connection established 0xed7c3080, tid 1897
+06-02 16:38:43.523 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:43.634 I/chatty ( 2002): uid=10024(com.android.systemui) RenderThread identical 13 lines
+06-02 16:38:43.636 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:44.078 D/FirebaseApp( 9365): Default FirebaseApp failed to initialize because no default options were found. This usually means that com.google.gms:google-services was not applied to your gradle project.
+06-02 16:38:44.078 I/FirebaseInitProvider( 9365): FirebaseApp initialization unsuccessful
+06-02 16:38:44.149 D/FenixApplication( 9365): Initializing Glean (uploadEnabled=true, isFennec=false)
+06-02 16:38:44.163 D/RustNativeSupport( 9365): findMegazordLibraryName(viaduct, 0.59.0
+06-02 16:38:44.163 D/RustNativeSupport( 9365): lib in use: none
+06-02 16:38:44.164 D/RustNativeSupport( 9365): lib configured: megazord
+06-02 16:38:44.164 D/RustNativeSupport( 9365): lib version configured: 0.59.0
+06-02 16:38:44.164 D/RustNativeSupport( 9365): settled on megazord
+06-02 16:38:44.222 D/libglean_ffi( 9365): glean_ffi: Android logging should be hooked up!
+06-02 16:38:44.224 I/glean/Glean( 9365): Registering pings for mozilla.telemetry.glean.GleanMetrics.Pings
+06-02 16:38:44.225 I/libglean_ffi( 9365): glean_core: Creating new Glean
+06-02 16:38:44.225 D/libglean_ffi( 9365): glean_core::database: Database path: "/data/user/0/org.mozilla.fenix.debug/glean_data/db"
+06-02 16:38:44.226 I/libglean_ffi( 9365): glean_core::database: Database initialized
+06-02 16:38:44.226 D/RustNativeSupport( 9365): findMegazordLibraryName(rustlog, 0.59.0
+06-02 16:38:44.226 D/RustNativeSupport( 9365): lib in use: none
+06-02 16:38:44.226 D/RustNativeSupport( 9365): lib configured: megazord
+06-02 16:38:44.226 D/RustNativeSupport( 9365): lib version configured: 0.59.0
+06-02 16:38:44.226 D/RustNativeSupport( 9365): settled on megazord
+06-02 16:38:44.228 I/rc_log_ffi::ios( 9365): rc_log adapter initialized!
+06-02 16:38:44.245 I/GeckoRuntime( 9365): Adding debug configuration from: /data/local/tmp/org.mozilla.fenix.debug-geckoview-config.yaml
+06-02 16:38:44.245 D/GeckoDebugConfig( 9365): Adding environment variables from debug config: {MOZ_CRASHREPORTER=1, MOZ_CRASHREPORTER_NO_REPORT=1, MOZ_CRASHREPORTER_SHUTDOWN=1}
+06-02 16:38:44.246 D/GeckoDebugConfig( 9365): Adding arguments from debug config: [-marionette, -profile, /mnt/sdcard/org.mozilla.fenix.debug-geckodriver-profile]
+06-02 16:38:44.246 I/libglean_ffi( 9365): glean_ffi: Glean initialized
+06-02 16:38:44.247 D/GeckoThread( 9365): State changed to LAUNCHED
+06-02 16:38:44.247 I/GeckoThread( 9365): preparing to run Gecko
+06-02 16:38:44.249 D/GeckoThread( 9365): env var: MOZ_CRASHREPORTER=1
+06-02 16:38:44.249 D/GeckoThread( 9365): env var: MOZ_CRASHREPORTER_NO_REPORT=1
+06-02 16:38:44.249 D/GeckoThread( 9365): env var: MOZ_CRASHREPORTER_SHUTDOWN=1
+06-02 16:38:44.259 D/GeckoRuntime( 9365): Lifecycle: onCreate
+06-02 16:38:44.267 D/GeckoThread( 9365): State changed to MOZGLUE_READY
+06-02 16:38:44.288 W/Settings( 9365): Setting animator_duration_scale has moved from android.provider.Settings.System to android.provider.Settings.Global, returning read-only global URI.
+06-02 16:38:44.292 E/GeckoLibLoad( 9365): Load sqlite start
+06-02 16:38:44.299 I/glean/MetricsPingSched( 9365): The application just updated. Send metrics ping now.
+06-02 16:38:44.303 E/GeckoLibLoad( 9365): Load sqlite done
+06-02 16:38:44.303 E/GeckoLibLoad( 9365): Load nss start
+06-02 16:38:44.303 E/GeckoLibLoad( 9365): Load nss done
+06-02 16:38:44.320 I/glean/MetricsPingSched( 9365): Collecting the 'metrics' ping, now = Tue Jun 02 16:38:44 EDT 2020, startup = true, reason = upgrade
+06-02 16:38:44.345 D/LeakCanary( 9365): Updated AppWatcher.config: Config(no changes)
+06-02 16:38:44.357 E/GeckoLibLoad( 9365): Loaded libs in 53.254794ms total, 20ms(80ms) user, 20ms(30ms) system, 0(0) faults
+06-02 16:38:44.357 D/GeckoThread( 9365): State changed to LIBS_READY
+06-02 16:38:44.357 I/libglean_ffi( 9365): glean_core::ping: Collecting metrics
+06-02 16:38:44.357 I/libglean_ffi( 9365): glean_core::ping: Storage for metrics empty. Bailing out.
+06-02 16:38:44.357 I/libglean_ffi( 9365): glean_core: No content for ping 'metrics', therefore no ping queued.
+06-02 16:38:44.360 D/glean/MetricsPingSched( 9365): Scheduling the 'metrics' ping in 40875709ms
+06-02 16:38:44.361 W/GeckoThread( 9365): zerdatime 4688492 - runGecko
+06-02 16:38:44.363 D/GeckoProfile( 9365): Loading profile at: null name: default
+06-02 16:38:44.364 D/GeckoProfile( 9365): Created new profile dir.
+06-02 16:38:44.365 I/GeckoProfile( 9365): Enqueuing profile init.
+06-02 16:38:44.368 D/GeckoProfile( 9365): Found profile dir: /data/user/0/org.mozilla.fenix.debug/files/mozilla/68sf7sou.default
+06-02 16:38:44.368 D/GeckoProfile( 9365): Attempting to write new client ID properties
+06-02 16:38:44.369 D/GeckoProfile( 9365): Creating profile dir: /data/user/0/org.mozilla.fenix.debug/files/mozilla/68sf7sou.default
+06-02 16:38:44.370 D/LeakCanary( 9365): Updated LeakCanary.config: Config(no changes)
+06-02 16:38:44.373 D/App ( 9365): DebugMetricController: start
+06-02 16:38:44.373 D/App ( 9365): DebugMetricController: start
+06-02 16:38:44.374 W/PushConfig( 9365): No firebase configuration found; cannot support push service.
+06-02 16:38:44.383 I/Gecko:DumpUtils( 9365): Fifo watcher disabled via pref.
+06-02 16:38:44.388 D/StrictMode( 9365): StrictMode policy violation; ~duration=164 ms: android.os.strictmode.DiskReadViolation
+06-02 16:38:44.388 D/StrictMode( 9365): at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+06-02 16:38:44.388 D/StrictMode( 9365): at java.io.FileInputStream.<init>(FileInputStream.java:163)
+06-02 16:38:44.388 D/StrictMode( 9365): at org.mozilla.gecko.util.DebugConfig.fromFile(DebugConfig.java:49)
+06-02 16:38:44.388 D/StrictMode( 9365): at org.mozilla.geckoview.GeckoRuntime.init(GeckoRuntime.java:363)
+06-02 16:38:44.388 D/StrictMode( 9365): at org.mozilla.geckoview.GeckoRuntime.create(GeckoRuntime.java:574)
+06-02 16:38:44.388 D/StrictMode( 9365): at GeckoProvider.createRuntime(GeckoProvider.kt:58)
+06-02 16:38:44.388 D/StrictMode( 9365): at GeckoProvider.getOrCreateRuntime(GeckoProvider.kt:28)
+06-02 16:38:44.388 D/StrictMode( 9365): at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:79)
+06-02 16:38:44.388 D/StrictMode( 9365): at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:57)
+06-02 16:38:44.388 D/StrictMode( 9365): at kotlin.SynchronizedLazyImpl.getValue(LazyJVM.kt:74)
+06-02 16:38:44.388 D/StrictMode( 9365): at org.mozilla.fenix.components.Core.getEngine(Unknown Source:7)
+06-02 16:38:44.388 D/StrictMode( 9365): at org.mozilla.fenix.FenixApplication.setupInMainProcessOnly(FenixApplication.kt:128)
+06-02 16:38:44.388 D/StrictMode( 9365): at org.mozilla.fenix.FenixApplication.onCreate(FenixApplication.kt:90)
+06-02 16:38:44.388 D/StrictMode( 9365): at android.app.Instrumentation.callApplicationOnCreate(Instrumentation.java:1154)
+06-02 16:38:44.388 D/StrictMode( 9365): at android.app.ActivityThread.handleBindApplication(ActivityThread.java:5871)
+06-02 16:38:44.388 D/StrictMode( 9365): at android.app.ActivityThread.access$1100(ActivityThread.java:199)
+06-02 16:38:44.388 D/StrictMode( 9365): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1650)
+06-02 16:38:44.388 D/StrictMode( 9365): at android.os.Handler.dispatchMessage(Handler.java:106)
+06-02 16:38:44.388 D/StrictMode( 9365): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:44.388 D/StrictMode( 9365): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:44.388 D/StrictMode( 9365): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:44.388 D/StrictMode( 9365): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:44.388 D/StrictMode( 9365): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:44.390 D/StrictMode( 9365): StrictMode policy violation; ~duration=159 ms: android.os.strictmode.DiskReadViolation
+06-02 16:38:44.390 D/StrictMode( 9365): at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+06-02 16:38:44.390 D/StrictMode( 9365): at libcore.io.BlockGuardOs.read(BlockGuardOs.java:253)
+06-02 16:38:44.390 D/StrictMode( 9365): at libcore.io.IoBridge.read(IoBridge.java:501)
+06-02 16:38:44.390 D/StrictMode( 9365): at java.io.FileInputStream.read(FileInputStream.java:307)
+06-02 16:38:44.390 D/StrictMode( 9365): at java.io.FilterInputStream.read(FilterInputStream.java:133)
+06-02 16:38:44.390 D/StrictMode( 9365): at java.io.PushbackInputStream.read(PushbackInputStream.java:186)
+06-02 16:38:44.390 D/StrictMode( 9365): at org.yaml.snakeyaml.reader.UnicodeReader.init(UnicodeReader.java:92)
+06-02 16:38:44.390 D/StrictMode( 9365): at org.yaml.snakeyaml.reader.UnicodeReader.read(UnicodeReader.java:124)
+06-02 16:38:44.390 D/StrictMode( 9365): at org.yaml.snakeyaml.reader.StreamReader.update(StreamReader.java:183)
+06-02 16:38:44.390 D/StrictMode( 9365): at org.yaml.snakeyaml.reader.StreamReader.ensureEnoughData(StreamReader.java:176)
+06-02 16:38:44.390 D/StrictMode( 9365): at org.yaml.snakeyaml.reader.StreamReader.ensureEnoughData(StreamReader.java:171)
+06-02 16:38:44.390 D/StrictMode( 9365): at org.yaml.snakeyaml.reader.StreamReader.peek(StreamReader.java:126)
+06-02 16:38:44.390 D/StrictMode( 9365): at org.yaml.snakeyaml.scanner.ScannerImpl.scanToNextToken(ScannerImpl.java:1177)
+06-02 16:38:44.390 D/StrictMode( 9365): at org.yaml.snakeyaml.scanner.ScannerImpl.fetchMoreTokens(ScannerImpl.java:287)
+06-02 16:38:44.390 D/StrictMode( 9365): at org.yaml.snakeyaml.scanner.ScannerImpl.checkToken(ScannerImpl.java:227)
+06-02 16:38:44.390 D/StrictMode( 9365): at org.yaml.snakeyaml.parser.ParserImpl$ParseImplicitDocumentStart.produce(ParserImpl.java:195)
+06-02 16:38:44.390 D/StrictMode( 9365): at org.yaml.snakeyaml.parser.ParserImpl.peekEvent(ParserImpl.java:158)
+06-02 16:38:44.390 D/StrictMode( 9365): at org.yaml.snakeyaml.parser.ParserImpl.checkEvent(ParserImpl.java:148)
+06-02 16:38:44.390 D/StrictMode( 9365): at org.yaml.snakeyaml.composer.Composer.getSingleNode(Composer.java:107)
+06-02 16:38:44.390 D/StrictMode( 9365): at org.yaml.snakeyaml.constructor.BaseConstructor.getSingleData(BaseConstructor.java:141)
+06-02 16:38:44.390 D/StrictMode( 9365): at org.yaml.snakeyaml.Yaml.loadFromReader(Yaml.java:525)
+06-02 16:38:44.390 D/StrictMode( 9365): at org.yaml.snakeyaml.Yaml.load(Yaml.java:453)
+06-02 16:38:44.390 D/StrictMode( 9365): at org.mozilla.gecko.util.DebugConfig.fromFile(DebugConfig.java:51)
+06-02 16:38:44.390 D/StrictMode( 9365): at org.mozilla.geckoview.GeckoRuntime.init(GeckoRuntime.java:363)
+06-02 16:38:44.390 D/StrictMode( 9365): at org.mozilla.geckoview.GeckoRuntime.create(GeckoRuntime.java:574)
+06-02 16:38:44.390 D/StrictMode( 9365): at GeckoProvider.createRuntime(GeckoProvider.kt:58)
+06-02 16:38:44.390 D/StrictMode( 9365): at GeckoProvider.getOrCreateRuntime(GeckoProvider.kt:28)
+06-02 16:38:44.390 D/StrictMode( 9365): at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:79)
+06-02 16:38:44.390 D/StrictMode( 9365): at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:57)
+06-02 16:38:44.390 D/StrictMode( 9365): at kotlin.SynchronizedLazyImpl.getValue(LazyJVM.kt:74)
+06-02 16:38:44.390 D/StrictMode( 9365): at org.mozilla.fenix.components.Core.getEngine(Unknown Source:7)
+06-02 16:38:44.390 D/StrictMode( 9365): at org.mozilla.fenix.FenixApplication.setupInMainProcessOnly(FenixApplication.kt:128)
+06-02 16:38:44.390 D/StrictMode( 9365): at org.mozilla.fenix.FenixApplication.onCreate(FenixApplication.kt:90)
+06-02 16:38:44.390 D/StrictMode( 9365): at android.app.Instrumentation.callApplicationOnCreate(Instrumentation.java:1154)
+06-02 16:38:44.390 D/StrictMode( 9365): at android.app.ActivityThread.handleBindApplication(ActivityThread.java:5871)
+06-02 16:38:44.390 D/StrictMode( 9365): at android.app.ActivityThread.access$1100(ActivityThread.java:199)
+06-02 16:38:44.390 D/StrictMode( 9365): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1650)
+06-02 16:38:44.390 D/StrictMode( 9365): at android.os.Handler.dispatchMessage(Handler.java:106)
+06-02 16:38:44.390 D/StrictMode( 9365): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:44.390 D/StrictMode( 9365): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:44.390 D/StrictMode( 9365): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:44.390 D/StrictMode( 9365): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:44.390 D/StrictMode( 9365): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:44.392 D/StrictMode( 9365): StrictMode policy violation; ~duration=159 ms: android.os.strictmode.DiskReadViolation
+06-02 16:38:44.392 D/StrictMode( 9365): at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+06-02 16:38:44.392 D/StrictMode( 9365): at libcore.io.BlockGuardOs.read(BlockGuardOs.java:253)
+06-02 16:38:44.392 D/StrictMode( 9365): at libcore.io.IoBridge.read(IoBridge.java:501)
+06-02 16:38:44.392 D/StrictMode( 9365): at java.io.FileInputStream.read(FileInputStream.java:307)
+06-02 16:38:44.392 D/StrictMode( 9365): at java.io.FilterInputStream.read(FilterInputStream.java:133)
+06-02 16:38:44.392 D/StrictMode( 9365): at java.io.PushbackInputStream.read(PushbackInputStream.java:186)
+06-02 16:38:44.392 D/StrictMode( 9365): at sun.nio.cs.StreamDecoder.readBytes(StreamDecoder.java:288)
+06-02 16:38:44.392 D/StrictMode( 9365): at sun.nio.cs.StreamDecoder.implRead(StreamDecoder.java:351)
+06-02 16:38:44.392 D/StrictMode( 9365): at sun.nio.cs.StreamDecoder.read(StreamDecoder.java:180)
+06-02 16:38:44.392 D/StrictMode( 9365): at java.io.InputStreamReader.read(InputStreamReader.java:184)
+06-02 16:38:44.392 D/StrictMode( 9365): at org.yaml.snakeyaml.reader.UnicodeReader.read(UnicodeReader.java:125)
+06-02 16:38:44.392 D/StrictMode( 9365): at org.yaml.snakeyaml.reader.StreamReader.update(StreamReader.java:183)
+06-02 16:38:44.392 D/StrictMode( 9365): at org.yaml.snakeyaml.reader.StreamReader.ensureEnoughData(StreamReader.java:176)
+06-02 16:38:44.392 D/StrictMode( 9365): at org.yaml.snakeyaml.reader.StreamReader.ensureEnoughData(StreamReader.java:171)
+06-02 16:38:44.392 D/StrictMode( 9365): at org.yaml.snakeyaml.reader.StreamReader.peek(StreamReader.java:126)
+06-02 16:38:44.392 D/StrictMode( 9365): at org.yaml.snakeyaml.scanner.ScannerImpl.scanToNextToken(ScannerImpl.java:1177)
+06-02 16:38:44.392 D/StrictMode( 9365): at org.yaml.snakeyaml.scanner.ScannerImpl.fetchMoreTokens(ScannerImpl.java:287)
+06-02 16:38:44.392 D/StrictMode( 9365): at org.yaml.snakeyaml.scanner.ScannerImpl.checkToken(ScannerImpl.java:227)
+06-02 16:38:44.392 D/StrictMode( 9365): at org.yaml.snakeyaml.parser.ParserImpl$ParseImplicitDocumentStart.produce(ParserImpl.java:195)
+06-02 16:38:44.392 D/StrictMode( 9365): at org.yaml.snakeyaml.parser.ParserImpl.peekEvent(ParserImpl.java:158)
+06-02 16:38:44.392 D/StrictMode( 9365): at org.yaml.snakeyaml.parser.ParserImpl.checkEvent(ParserImpl.java:148)
+06-02 16:38:44.392 D/StrictMode( 9365): at org.yaml.snakeyaml.composer.Composer.getSingleNode(Composer.java:107)
+06-02 16:38:44.392 D/StrictMode( 9365): at org.yaml.snakeyaml.constructor.BaseConstructor.getSingleData(BaseConstructor.java:141)
+06-02 16:38:44.392 D/StrictMode( 9365): at org.yaml.snakeyaml.Yaml.loadFromReader(Yaml.java:525)
+06-02 16:38:44.392 D/StrictMode( 9365): at org.yaml.snakeyaml.Yaml.load(Yaml.java:453)
+06-02 16:38:44.392 D/StrictMode( 9365): at org.mozilla.gecko.util.DebugConfig.fromFile(DebugConfig.java:51)
+06-02 16:38:44.392 D/StrictMode( 9365): at org.mozilla.geckoview.GeckoRuntime.init(GeckoRuntime.java:363)
+06-02 16:38:44.392 D/StrictMode( 9365): at org.mozilla.geckoview.GeckoRuntime.create(GeckoRuntime.java:574)
+06-02 16:38:44.392 D/StrictMode( 9365): at GeckoProvider.createRuntime(GeckoProvider.kt:58)
+06-02 16:38:44.392 D/StrictMode( 9365): at GeckoProvider.getOrCreateRuntime(GeckoProvider.kt:28)
+06-02 16:38:44.392 D/StrictMode( 9365): at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:79)
+06-02 16:38:44.392 D/StrictMode( 9365): at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:57)
+06-02 16:38:44.392 D/StrictMode( 9365): at kotlin.SynchronizedLazyImpl.getValue(LazyJVM.kt:74)
+06-02 16:38:44.392 D/StrictMode( 9365): at org.mozilla.fenix.components.Core.getEngine(Unknown Source:7)
+06-02 16:38:44.392 D/StrictMode( 9365): at org.mozilla.fenix.FenixApplication.setupInMainProcessOnly(FenixApplication.kt:128)
+06-02 16:38:44.392 D/StrictMode( 9365): at org.mozilla.fenix.FenixApplication.onCreate(FenixApplication.kt:90)
+06-02 16:38:44.392 D/StrictMode( 9365): at android.app.Instrumentation.callApplicationOnCreate(Instrumentation.java:1154)
+06-02 16:38:44.392 D/StrictMode( 9365): at android.app.ActivityThread.handleBindApplication(ActivityThread.java:5871)
+06-02 16:38:44.392 D/StrictMode( 9365): at android.app.ActivityThread.access$1100(ActivityThread.java:199)
+06-02 16:38:44.392 D/StrictMode( 9365): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1650)
+06-02 16:38:44.392 D/StrictMode( 9365): at android.os.Handler.dispatchMessage(Handler.java:106)
+06-02 16:38:44.392 D/StrictMode( 9365): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:44.392 D/StrictMode( 9365): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:44.392 D/StrictMode( 9365): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:44.392 D/StrictMode( 9365): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:44.392 D/StrictMode( 9365): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:44.396 D/StrictMode( 9365): StrictMode policy violation; ~duration=146 ms: android.os.strictmode.DiskReadViolation
+06-02 16:38:44.396 D/StrictMode( 9365): at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+06-02 16:38:44.396 D/StrictMode( 9365): at libcore.io.BlockGuardOs.read(BlockGuardOs.java:253)
+06-02 16:38:44.396 D/StrictMode( 9365): at libcore.io.IoBridge.read(IoBridge.java:501)
+06-02 16:38:44.396 D/StrictMode( 9365): at java.io.FileInputStream.read(FileInputStream.java:307)
+06-02 16:38:44.396 D/StrictMode( 9365): at java.io.FilterInputStream.read(FilterInputStream.java:133)
+06-02 16:38:44.396 D/StrictMode( 9365): at java.io.PushbackInputStream.read(PushbackInputStream.java:186)
+06-02 16:38:44.396 D/StrictMode( 9365): at sun.nio.cs.StreamDecoder.readBytes(StreamDecoder.java:288)
+06-02 16:38:44.396 D/StrictMode( 9365): at sun.nio.cs.StreamDecoder.implRead(StreamDecoder.java:351)
+06-02 16:38:44.396 D/StrictMode( 9365): at sun.nio.cs.StreamDecoder.read(StreamDecoder.java:180)
+06-02 16:38:44.396 D/StrictMode( 9365): at java.io.InputStreamReader.read(InputStreamReader.java:184)
+06-02 16:38:44.396 D/StrictMode( 9365): at org.yaml.snakeyaml.reader.UnicodeReader.read(UnicodeReader.java:125)
+06-02 16:38:44.396 D/StrictMode( 9365): at org.yaml.snakeyaml.reader.StreamReader.update(StreamReader.java:183)
+06-02 16:38:44.396 D/StrictMode( 9365): at org.yaml.snakeyaml.reader.StreamReader.ensureEnoughData(StreamReader.java:176)
+06-02 16:38:44.396 D/StrictMode( 9365): at org.yaml.snakeyaml.reader.StreamReader.peek(StreamReader.java:136)
+06-02 16:38:44.396 D/StrictMode( 9365): at org.yaml.snakeyaml.scanner.ScannerImpl.scanPlain(ScannerImpl.java:1999)
+06-02 16:38:44.396 D/StrictMode( 9365): at org.yaml.snakeyaml.scanner.ScannerImpl.fetchPlain(ScannerImpl.java:1044)
+06-02 16:38:44.396 D/StrictMode( 9365): at org.yaml.snakeyaml.scanner.ScannerImpl.fetchMoreTokens(ScannerImpl.java:399)
+06-02 16:38:44.396 D/StrictMode( 9365): at org.yaml.snakeyaml.scanner.ScannerImpl.checkToken(ScannerImpl.java:227)
+06-02 16:38:44.396 D/StrictMode( 9365): at org.yaml.snakeyaml.parser.ParserImpl$ParseBlockSequenceEntry.produce(ParserImpl.java:504)
+06-02 16:38:44.396 D/StrictMode( 9365): at org.yaml.snakeyaml.parser.ParserImpl.peekEvent(ParserImpl.java:158)
+06-02 16:38:44.396 D/StrictMode( 9365): at org.yaml.snakeyaml.parser.ParserImpl.checkEvent(ParserImpl.java:148)
+06-02 16:38:44.396 D/StrictMode( 9365): at org.yaml.snakeyaml.composer.Composer.composeSequenceNode(Composer.java:188)
+06-02 16:38:44.396 D/StrictMode( 9365): at org.yaml.snakeyaml.composer.Composer.composeNode(Composer.java:142)
+06-02 16:38:44.396 D/StrictMode( 9365): at org.yaml.snakeyaml.composer.Composer.composeValueNode(Composer.java:236)
+06-02 16:38:44.396 D/StrictMode( 9365): at org.yaml.snakeyaml.composer.Composer.composeMappingChildren(Composer.java:227)
+06-02 16:38:44.396 D/StrictMode( 9365): at org.yaml.snakeyaml.composer.Composer.composeMappingNode(Composer.java:215)
+06-02 16:38:44.396 D/StrictMode( 9365): at org.yaml.snakeyaml.composer.Composer.composeNode(Composer.java:144)
+06-02 16:38:44.396 D/StrictMode( 9365): at org.yaml.snakeyaml.composer.Composer.getNode(Composer.java:85)
+06-02 16:38:44.396 D/StrictMode( 9365): at org.yaml.snakeyaml.composer.Composer.getSingleNode(Composer.java:108)
+06-02 16:38:44.396 D/StrictMode( 9365): at org.yaml.snakeyaml.constructor.BaseConstructor.getSingleData(BaseConstructor.java:141)
+06-02 16:38:44.396 D/StrictMode( 9365): at org.yaml.snakeyaml.Yaml.loadFromReader(Yaml.java:525)
+06-02 16:38:44.396 D/StrictMode( 9365): at org.yaml.snakeyaml.Yaml.load(Yaml.java:453)
+06-02 16:38:44.396 D/StrictMode( 9365): at org.mozilla.gecko.util.DebugConfig.fromFile(DebugConfig.java:51)
+06-02 16:38:44.396 D/StrictMode( 9365): at org.mozilla.geckoview.GeckoRuntime.init(GeckoRuntime.java:363)
+06-02 16:38:44.396 D/StrictMode( 9365): at org.mozilla.geckoview.GeckoRuntime.create(GeckoRuntime.java:574)
+06-02 16:38:44.396 D/StrictMode( 9365): at GeckoProvider.createRuntime(GeckoProvider.kt:58)
+06-02 16:38:44.396 D/StrictMode( 9365): at GeckoProvider.getOrCreateRuntime(GeckoProvider.kt:28)
+06-02 16:38:44.396 D/StrictMode( 9365): at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:79)
+06-02 16:38:44.396 D/StrictMode( 9365): at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:57)
+06-02 16:38:44.396 D/StrictMode( 9365): at kotlin.SynchronizedLazyImpl.getValue(LazyJVM.kt:74)
+06-02 16:38:44.396 D/StrictMode( 9365): at org.mozilla.fenix.components.Core.getEngine(Unknown Source:7)
+06-02 16:38:44.396 D/StrictMode( 9365): at org.mozilla.fenix.FenixApplication.setupInMainProcessOnly(FenixApplication.kt:128)
+06-02 16:38:44.396 D/StrictMode( 9365): at org.mozilla.fenix.FenixApplication.onCreate(FenixApplication.kt:90)
+06-02 16:38:44.396 D/StrictMode( 9365): at android.app.Instrumentation.callApplicationOnCreate(Instrumentation.java:1154)
+06-02 16:38:44.396 D/StrictMode( 9365): at android.app.ActivityThread.handleBindApplication(ActivityThread.java:5871)
+06-02 16:38:44.396 D/StrictMode( 9365): at android.app.ActivityThread.access$1100(ActivityThread.java:199)
+06-02 16:38:44.396 D/StrictMode( 9365): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1650)
+06-02 16:38:44.396 D/StrictMode( 9365): at android.os.Handler.dispatchMessage(Handler.java:106)
+06-02 16:38:44.396 D/StrictMode( 9365): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:44.396 D/StrictMode( 9365): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:44.396 D/StrictMode( 9365): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:44.396 D/StrictMode( 9365): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:44.396 D/StrictMode( 9365): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:44.399 D/StrictMode( 9365): StrictMode policy violation; ~duration=120 ms: android.os.strictmode.DiskReadViolation
+06-02 16:38:44.399 D/StrictMode( 9365): at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+06-02 16:38:44.399 D/StrictMode( 9365): at java.io.UnixFileSystem.checkAccess(UnixFileSystem.java:251)
+06-02 16:38:44.399 D/StrictMode( 9365): at java.io.File.exists(File.java:815)
+06-02 16:38:44.399 D/StrictMode( 9365): at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:605)
+06-02 16:38:44.399 D/StrictMode( 9365): at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:596)
+06-02 16:38:44.399 D/StrictMode( 9365): at android.app.ContextImpl.getPreferencesDir(ContextImpl.java:552)
+06-02 16:38:44.399 D/StrictMode( 9365): at android.app.ContextImpl.getSharedPreferencesPath(ContextImpl.java:747)
+06-02 16:38:44.399 D/StrictMode( 9365): at android.app.ContextImpl.getSharedPreferences(ContextImpl.java:400)
+06-02 16:38:44.399 D/StrictMode( 9365): at android.content.ContextWrapper.getSharedPreferences(ContextWrapper.java:174)
+06-02 16:38:44.399 D/StrictMode( 9365): at mozilla.components.browser.engine.gecko.GeckoEngine.<init>(GeckoEngine.kt:68)
+06-02 16:38:44.399 D/StrictMode( 9365): at mozilla.components.browser.engine.gecko.GeckoEngine.<init>(GeckoEngine.kt:63)
+06-02 16:38:44.399 D/StrictMode( 9365): at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:76)
+06-02 16:38:44.399 D/StrictMode( 9365): at org.mozilla.fenix.components.Core$engine$2.invoke(Core.kt:57)
+06-02 16:38:44.399 D/StrictMode( 9365): at kotlin.SynchronizedLazyImpl.getValue(LazyJVM.kt:74)
+06-02 16:38:44.399 D/StrictMode( 9365): at org.mozilla.fenix.components.Core.getEngine(Unknown Source:7)
+06-02 16:38:44.399 D/StrictMode( 9365): at org.mozilla.fenix.FenixApplication.setupInMainProcessOnly(FenixApplication.kt:128)
+06-02 16:38:44.399 D/StrictMode( 9365): at org.mozilla.fenix.FenixApplication.onCreate(FenixApplication.kt:90)
+06-02 16:38:44.399 D/StrictMode( 9365): at android.app.Instrumentation.callApplicationOnCreate(Instrumentation.java:1154)
+06-02 16:38:44.399 D/StrictMode( 9365): at android.app.ActivityThread.handleBindApplication(ActivityThread.java:5871)
+06-02 16:38:44.399 D/StrictMode( 9365): at android.app.ActivityThread.access$1100(ActivityThread.java:199)
+06-02 16:38:44.399 D/StrictMode( 9365): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1650)
+06-02 16:38:44.399 D/StrictMode( 9365): at android.os.Handler.dispatchMessage(Handler.java:106)
+06-02 16:38:44.399 D/StrictMode( 9365): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:44.399 D/StrictMode( 9365): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:44.399 D/StrictMode( 9365): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:44.399 D/StrictMode( 9365): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:44.399 D/StrictMode( 9365): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:44.402 D/StrictMode( 9365): StrictMode policy violation; ~duration=46 ms: android.os.strictmode.DiskReadViolation
+06-02 16:38:44.402 D/StrictMode( 9365): at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+06-02 16:38:44.402 D/StrictMode( 9365): at java.io.UnixFileSystem.checkAccess(UnixFileSystem.java:251)
+06-02 16:38:44.402 D/StrictMode( 9365): at java.io.File.exists(File.java:815)
+06-02 16:38:44.402 D/StrictMode( 9365): at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:605)
+06-02 16:38:44.402 D/StrictMode( 9365): at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:596)
+06-02 16:38:44.402 D/StrictMode( 9365): at android.app.ContextImpl.getPreferencesDir(ContextImpl.java:552)
+06-02 16:38:44.402 D/StrictMode( 9365): at android.app.ContextImpl.getSharedPreferencesPath(ContextImpl.java:747)
+06-02 16:38:44.402 D/StrictMode( 9365): at android.app.ContextImpl.getSharedPreferences(ContextImpl.java:400)
+06-02 16:38:44.402 D/StrictMode( 9365): at android.content.ContextWrapper.getSharedPreferences(ContextWrapper.java:174)
+06-02 16:38:44.402 D/StrictMode( 9365): at androidx.preference.PreferenceManager.getDefaultSharedPreferences(PreferenceManager.java:119)
+06-02 16:38:44.402 D/StrictMode( 9365): at org.mozilla.fenix.DebugFenixApplication.setupLeakCanary(DebugFenixApplication.kt:15)
+06-02 16:38:44.402 D/StrictMode( 9365): at org.mozilla.fenix.FenixApplication.setupInMainProcessOnly(FenixApplication.kt:140)
+06-02 16:38:44.402 D/StrictMode( 9365): at org.mozilla.fenix.FenixApplication.onCreate(FenixApplication.kt:90)
+06-02 16:38:44.402 D/StrictMode( 9365): at android.app.Instrumentation.callApplicationOnCreate(Instrumentation.java:1154)
+06-02 16:38:44.402 D/StrictMode( 9365): at android.app.ActivityThread.handleBindApplication(ActivityThread.java:5871)
+06-02 16:38:44.402 D/StrictMode( 9365): at android.app.ActivityThread.access$1100(ActivityThread.java:199)
+06-02 16:38:44.402 D/StrictMode( 9365): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1650)
+06-02 16:38:44.402 D/StrictMode( 9365): at android.os.Handler.dispatchMessage(Handler.java:106)
+06-02 16:38:44.402 D/StrictMode( 9365): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:44.402 D/StrictMode( 9365): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:44.402 D/StrictMode( 9365): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:44.402 D/StrictMode( 9365): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:44.402 D/StrictMode( 9365): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:44.404 D/StrictMode( 9365): StrictMode policy violation; ~duration=43 ms: android.os.strictmode.DiskReadViolation
+06-02 16:38:44.404 D/StrictMode( 9365): at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+06-02 16:38:44.404 D/StrictMode( 9365): at android.app.SharedPreferencesImpl.awaitLoadedLocked(SharedPreferencesImpl.java:256)
+06-02 16:38:44.404 D/StrictMode( 9365): at android.app.SharedPreferencesImpl.getBoolean(SharedPreferencesImpl.java:325)
+06-02 16:38:44.404 D/StrictMode( 9365): at org.mozilla.fenix.DebugFenixApplication.setupLeakCanary(DebugFenixApplication.kt:16)
+06-02 16:38:44.404 D/StrictMode( 9365): at org.mozilla.fenix.FenixApplication.setupInMainProcessOnly(FenixApplication.kt:140)
+06-02 16:38:44.404 D/StrictMode( 9365): at org.mozilla.fenix.FenixApplication.onCreate(FenixApplication.kt:90)
+06-02 16:38:44.404 D/StrictMode( 9365): at android.app.Instrumentation.callApplicationOnCreate(Instrumentation.java:1154)
+06-02 16:38:44.404 D/StrictMode( 9365): at android.app.ActivityThread.handleBindApplication(ActivityThread.java:5871)
+06-02 16:38:44.404 D/StrictMode( 9365): at android.app.ActivityThread.access$1100(ActivityThread.java:199)
+06-02 16:38:44.404 D/StrictMode( 9365): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1650)
+06-02 16:38:44.404 D/StrictMode( 9365): at android.os.Handler.dispatchMessage(Handler.java:106)
+06-02 16:38:44.404 D/StrictMode( 9365): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:44.404 D/StrictMode( 9365): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:44.404 D/StrictMode( 9365): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:44.404 D/StrictMode( 9365): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:44.404 D/StrictMode( 9365): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:44.406 D/GeckoSysInfo( 9365): System memory: 1494MB.
+06-02 16:38:44.406 W/lla.fenix.debu( 9365): Accessing hidden method Landroid/os/MessageQueue;->next()Landroid/os/Message; (light greylist, JNI)
+06-02 16:38:44.406 D/StrictMode( 9365): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/os/MessageQueue;->next()Landroid/os/Message;
+06-02 16:38:44.406 D/StrictMode( 9365): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:44.406 D/StrictMode( 9365): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:44.406 D/StrictMode( 9365): at org.mozilla.gecko.mozglue.GeckoLoader.nativeRun(Native Method)
+06-02 16:38:44.406 D/StrictMode( 9365): at org.mozilla.gecko.GeckoThread.run(GeckoThread.java:449)
+06-02 16:38:44.406 W/lla.fenix.debu( 9365): Accessing hidden field Landroid/os/MessageQueue;->mMessages:Landroid/os/Message; (light greylist, JNI)
+06-02 16:38:44.406 D/StrictMode( 9365): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/os/MessageQueue;->mMessages:Landroid/os/Message;
+06-02 16:38:44.406 D/StrictMode( 9365): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:44.406 D/StrictMode( 9365): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:44.406 D/StrictMode( 9365): at org.mozilla.gecko.mozglue.GeckoLoader.nativeRun(Native Method)
+06-02 16:38:44.406 D/StrictMode( 9365): at org.mozilla.gecko.GeckoThread.run(GeckoThread.java:449)
+06-02 16:38:44.408 W/lla.fenix.debu( 9365): Accessing hidden field Ljava/lang/Boolean;->value:Z (light greylist, JNI)
+06-02 16:38:44.409 D/StrictMode( 9365): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Ljava/lang/Boolean;->value:Z
+06-02 16:38:44.409 D/StrictMode( 9365): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:44.409 D/StrictMode( 9365): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:44.409 D/StrictMode( 9365): at org.mozilla.gecko.mozglue.GeckoLoader.nativeRun(Native Method)
+06-02 16:38:44.409 D/StrictMode( 9365): at org.mozilla.gecko.GeckoThread.run(GeckoThread.java:449)
+06-02 16:38:44.409 W/lla.fenix.debu( 9365): Accessing hidden field Ljava/lang/Integer;->value:I (light greylist, JNI)
+06-02 16:38:44.409 D/StrictMode( 9365): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Ljava/lang/Integer;->value:I
+06-02 16:38:44.409 D/StrictMode( 9365): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:44.409 D/StrictMode( 9365): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:44.409 D/StrictMode( 9365): at org.mozilla.gecko.mozglue.GeckoLoader.nativeRun(Native Method)
+06-02 16:38:44.409 D/StrictMode( 9365): at org.mozilla.gecko.GeckoThread.run(GeckoThread.java:449)
+06-02 16:38:44.409 W/lla.fenix.debu( 9365): Accessing hidden field Ljava/lang/Double;->value:D (light greylist, JNI)
+06-02 16:38:44.410 D/StrictMode( 9365): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Ljava/lang/Double;->value:D
+06-02 16:38:44.410 D/StrictMode( 9365): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:44.410 D/StrictMode( 9365): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:44.410 D/StrictMode( 9365): at org.mozilla.gecko.mozglue.GeckoLoader.nativeRun(Native Method)
+06-02 16:38:44.410 D/StrictMode( 9365): at org.mozilla.gecko.GeckoThread.run(GeckoThread.java:449)
+06-02 16:38:44.410 D/GeckoThread( 9365): State changed to JNI_READY
+06-02 16:38:44.456 D/ServiceAllocator( 9365): org.mozilla.gecko.process.GeckoChildProcessServices$tab0 updateBindings: BACKGROUND priority, 0 importance, 2 successful binds, 0 failed binds, 0 successful unbinds, 0 failed unbinds
+06-02 16:38:44.465 I/enix.debug:tab( 9414): Not late-enabling -Xcheck:jni (already on)
+06-02 16:38:44.465 I/ActivityManager( 1869): Start proc 9414:org.mozilla.fenix.debug:tab0/u0a99 for service org.mozilla.fenix.debug/org.mozilla.gecko.process.GeckoChildProcessServices$tab0
+06-02 16:38:44.493 W/lla.fenix.debu( 9365): Accessing hidden method Landroid/content/res/Resources$Theme;->rebase()V (dark greylist, reflection)
+06-02 16:38:44.493 D/StrictMode( 9365): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/content/res/Resources$Theme;->rebase()V
+06-02 16:38:44.493 D/StrictMode( 9365): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:44.493 D/StrictMode( 9365): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:44.493 D/StrictMode( 9365): at java.lang.Class.getDeclaredMethodInternal(Native Method)
+06-02 16:38:44.493 D/StrictMode( 9365): at java.lang.Class.getMethod(Class.java:2064)
+06-02 16:38:44.493 D/StrictMode( 9365): at java.lang.Class.getDeclaredMethod(Class.java:2047)
+06-02 16:38:44.493 D/StrictMode( 9365): at androidx.core.content.res.ResourcesCompat$ThemeCompat$ImplApi23.rebase(ResourcesCompat.java:501)
+06-02 16:38:44.493 D/StrictMode( 9365): at androidx.core.content.res.ResourcesCompat$ThemeCompat.rebase(ResourcesCompat.java:477)
+06-02 16:38:44.493 D/StrictMode( 9365): at androidx.appcompat.app.AppCompatDelegateImpl.attachBaseContext2(AppCompatDelegateImpl.java:465)
+06-02 16:38:44.493 D/StrictMode( 9365): at androidx.appcompat.app.AppCompatActivity.attachBaseContext(AppCompatActivity.java:107)
+06-02 16:38:44.493 D/StrictMode( 9365): at mozilla.components.support.locale.LocaleAwareAppCompatActivity.attachBaseContext(LocaleAwareAppCompatActivity.kt:19)
+06-02 16:38:44.493 D/StrictMode( 9365): at android.app.Activity.attach(Activity.java:7051)
+06-02 16:38:44.493 D/StrictMode( 9365): at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2873)
+06-02 16:38:44.493 D/StrictMode( 9365): at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:3048)
+06-02 16:38:44.493 D/StrictMode( 9365): at android.app.servertransaction.LaunchActivityItem.execute(LaunchActivityItem.java:78)
+06-02 16:38:44.493 D/StrictMode( 9365): at android.app.servertransaction.TransactionExecutor.executeCallbacks(TransactionExecutor.java:108)
+06-02 16:38:44.493 D/StrictMode( 9365): at android.app.servertransaction.TransactionExecutor.execute(TransactionExecutor.java:68)
+06-02 16:38:44.493 D/StrictMode( 9365): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1808)
+06-02 16:38:44.493 D/StrictMode( 9365): at android.os.Handler.dispatchMessage(Handler.java:106)
+06-02 16:38:44.493 D/StrictMode( 9365): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:44.493 D/StrictMode( 9365): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:44.493 D/StrictMode( 9365): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:44.493 D/StrictMode( 9365): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:44.493 D/StrictMode( 9365): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:44.494 W/enix.debug:tab( 9414): Unexpected CPU variant for X86 using defaults: x86
+06-02 16:38:44.495 I/ResourcesCompat( 9365): Failed to retrieve rebase() method
+06-02 16:38:44.495 I/ResourcesCompat( 9365): java.lang.NoSuchMethodException: rebase []
+06-02 16:38:44.495 I/ResourcesCompat( 9365): at java.lang.Class.getMethod(Class.java:2068)
+06-02 16:38:44.495 I/ResourcesCompat( 9365): at java.lang.Class.getDeclaredMethod(Class.java:2047)
+06-02 16:38:44.495 I/ResourcesCompat( 9365): at androidx.core.content.res.ResourcesCompat$ThemeCompat$ImplApi23.rebase(ResourcesCompat.java:501)
+06-02 16:38:44.495 I/ResourcesCompat( 9365): at androidx.core.content.res.ResourcesCompat$ThemeCompat.rebase(ResourcesCompat.java:477)
+06-02 16:38:44.495 I/ResourcesCompat( 9365): at androidx.appcompat.app.AppCompatDelegateImpl.attachBaseContext2(AppCompatDelegateImpl.java:465)
+06-02 16:38:44.495 I/ResourcesCompat( 9365): at androidx.appcompat.app.AppCompatActivity.attachBaseContext(AppCompatActivity.java:107)
+06-02 16:38:44.495 I/ResourcesCompat( 9365): at mozilla.components.support.locale.LocaleAwareAppCompatActivity.attachBaseContext(LocaleAwareAppCompatActivity.kt:19)
+06-02 16:38:44.495 I/ResourcesCompat( 9365): at android.app.Activity.attach(Activity.java:7051)
+06-02 16:38:44.495 I/ResourcesCompat( 9365): at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2873)
+06-02 16:38:44.495 I/ResourcesCompat( 9365): at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:3048)
+06-02 16:38:44.495 I/ResourcesCompat( 9365): at android.app.servertransaction.LaunchActivityItem.execute(LaunchActivityItem.java:78)
+06-02 16:38:44.495 I/ResourcesCompat( 9365): at android.app.servertransaction.TransactionExecutor.executeCallbacks(TransactionExecutor.java:108)
+06-02 16:38:44.495 I/ResourcesCompat( 9365): at android.app.servertransaction.TransactionExecutor.execute(TransactionExecutor.java:68)
+06-02 16:38:44.495 I/ResourcesCompat( 9365): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1808)
+06-02 16:38:44.495 I/ResourcesCompat( 9365): at android.os.Handler.dispatchMessage(Handler.java:106)
+06-02 16:38:44.495 I/ResourcesCompat( 9365): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:44.495 I/ResourcesCompat( 9365): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:44.495 I/ResourcesCompat( 9365): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:44.495 I/ResourcesCompat( 9365): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:44.495 I/ResourcesCompat( 9365): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:44.515 W/lla.fenix.debu( 9365): Accessing hidden method Landroid/graphics/drawable/Drawable;->getOpticalInsets()Landroid/graphics/Insets; (light greylist, linking)
+06-02 16:38:44.515 W/lla.fenix.debu( 9365): Accessing hidden field Landroid/graphics/Insets;->left:I (light greylist, linking)
+06-02 16:38:44.515 W/lla.fenix.debu( 9365): Accessing hidden field Landroid/graphics/Insets;->right:I (light greylist, linking)
+06-02 16:38:44.515 W/lla.fenix.debu( 9365): Accessing hidden field Landroid/graphics/Insets;->top:I (light greylist, linking)
+06-02 16:38:44.515 W/lla.fenix.debu( 9365): Accessing hidden field Landroid/graphics/Insets;->bottom:I (light greylist, linking)
+06-02 16:38:44.531 I/enix.debug:tab( 9414): The ClassLoaderContext is a special shared library.
+06-02 16:38:44.554 D/GeckoViewStartup( 9365): observe: app-startup
+06-02 16:38:44.560 W/lla.fenix.debu( 9365): Accessing hidden method Landroid/view/View;->getAccessibilityDelegate()Landroid/view/View$AccessibilityDelegate; (light greylist, linking)
+06-02 16:38:44.561 D/GeckoViewConsole( 9365): enabled = true
+06-02 16:38:44.564 W/lla.fenix.debu( 9365): Accessing hidden method Landroid/view/View;->computeFitSystemWindows(Landroid/graphics/Rect;Landroid/graphics/Rect;)Z (light greylist, reflection)
+06-02 16:38:44.568 D/StrictMode( 9365): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/view/View;->computeFitSystemWindows(Landroid/graphics/Rect;Landroid/graphics/Rect;)Z
+06-02 16:38:44.568 D/StrictMode( 9365): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:44.568 D/StrictMode( 9365): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:44.568 D/StrictMode( 9365): at java.lang.Class.getDeclaredMethodInternal(Native Method)
+06-02 16:38:44.568 D/StrictMode( 9365): at java.lang.Class.getMethod(Class.java:2064)
+06-02 16:38:44.568 D/StrictMode( 9365): at java.lang.Class.getDeclaredMethod(Class.java:2047)
+06-02 16:38:44.568 D/StrictMode( 9365): at androidx.appcompat.widget.ViewUtils.<clinit>(ViewUtils.java:44)
+06-02 16:38:44.568 D/StrictMode( 9365): at androidx.appcompat.widget.ViewUtils.makeOptionalFitsSystemWindows(ViewUtils.java:80)
+06-02 16:38:44.568 D/StrictMode( 9365): at androidx.appcompat.app.AppCompatDelegateImpl.createSubDecor(AppCompatDelegateImpl.java:970)
+06-02 16:38:44.568 D/StrictMode( 9365): at androidx.appcompat.app.AppCompatDelegateImpl.ensureSubDecor(AppCompatDelegateImpl.java:803)
+06-02 16:38:44.568 D/StrictMode( 9365): at androidx.appcompat.app.AppCompatDelegateImpl.setContentView(AppCompatDelegateImpl.java:692)
+06-02 16:38:44.568 D/StrictMode( 9365): at androidx.appcompat.app.AppCompatActivity.setContentView(AppCompatActivity.java:170)
+06-02 16:38:44.568 D/StrictMode( 9365): at org.mozilla.fenix.HomeActivity.onCreate(HomeActivity.kt:130)
+06-02 16:38:44.568 D/StrictMode( 9365): at android.app.Activity.performCreate(Activity.java:7136)
+06-02 16:38:44.568 D/StrictMode( 9365): at android.app.Activity.performCreate(Activity.java:7127)
+06-02 16:38:44.568 D/StrictMode( 9365): at android.app.Instrumentation.callActivityOnCreate(Instrumentation.java:1271)
+06-02 16:38:44.568 D/StrictMode( 9365): at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2893)
+06-02 16:38:44.568 D/StrictMode( 9365): at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:3048)
+06-02 16:38:44.568 D/StrictMode( 9365): at android.app.servertransaction.LaunchActivityItem.execute(LaunchActivityItem.java:78)
+06-02 16:38:44.568 D/StrictMode( 9365): at android.app.servertransaction.TransactionExecutor.executeCallbacks(TransactionExecutor.java:108)
+06-02 16:38:44.568 D/StrictMode( 9365): at android.app.servertransaction.TransactionExecutor.execute(TransactionExecutor.java:68)
+06-02 16:38:44.568 D/StrictMode( 9365): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1808)
+06-02 16:38:44.568 D/StrictMode( 9365): at android.os.Handler.dispatchMessage(Handler.java:106)
+06-02 16:38:44.568 D/StrictMode( 9365): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:44.568 D/StrictMode( 9365): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:44.568 D/StrictMode( 9365): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:44.568 D/StrictMode( 9365): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:44.568 D/StrictMode( 9365): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:44.569 W/lla.fenix.debu( 9365): Accessing hidden method Landroid/view/ViewGroup;->makeOptionalFitsSystemWindows()V (light greylist, reflection)
+06-02 16:38:44.573 E/storaged( 1741): getDiskStats failed with result NOT_SUPPORTED and size 0
+06-02 16:38:44.577 D/StrictMode( 9365): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/view/ViewGroup;->makeOptionalFitsSystemWindows()V
+06-02 16:38:44.577 D/StrictMode( 9365): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:44.577 D/StrictMode( 9365): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:44.577 D/StrictMode( 9365): at java.lang.Class.getDeclaredMethodInternal(Native Method)
+06-02 16:38:44.577 D/StrictMode( 9365): at java.lang.Class.getPublicMethodRecursive(Class.java:2075)
+06-02 16:38:44.577 D/StrictMode( 9365): at java.lang.Class.getMethod(Class.java:2063)
+06-02 16:38:44.577 D/StrictMode( 9365): at java.lang.Class.getMethod(Class.java:1690)
+06-02 16:38:44.577 D/StrictMode( 9365): at androidx.appcompat.widget.ViewUtils.makeOptionalFitsSystemWindows(ViewUtils.java:84)
+06-02 16:38:44.577 D/StrictMode( 9365): at androidx.appcompat.app.AppCompatDelegateImpl.createSubDecor(AppCompatDelegateImpl.java:970)
+06-02 16:38:44.577 D/StrictMode( 9365): at androidx.appcompat.app.AppCompatDelegateImpl.ensureSubDecor(AppCompatDelegateImpl.java:803)
+06-02 16:38:44.577 D/StrictMode( 9365): at androidx.appcompat.app.AppCompatDelegateImpl.setContentView(AppCompatDelegateImpl.java:692)
+06-02 16:38:44.577 D/StrictMode( 9365): at androidx.appcompat.app.AppCompatActivity.setContentView(AppCompatActivity.java:170)
+06-02 16:38:44.577 D/StrictMode( 9365): at org.mozilla.fenix.HomeActivity.onCreate(HomeActivity.kt:130)
+06-02 16:38:44.577 D/StrictMode( 9365): at android.app.Activity.performCreate(Activity.java:7136)
+06-02 16:38:44.577 D/StrictMode( 9365): at android.app.Activity.performCreate(Activity.java:7127)
+06-02 16:38:44.577 D/StrictMode( 9365): at android.app.Instrumentation.callActivityOnCreate(Instrumentation.java:1271)
+06-02 16:38:44.577 D/StrictMode( 9365): at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2893)
+06-02 16:38:44.577 D/StrictMode( 9365): at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:3048)
+06-02 16:38:44.577 D/StrictMode( 9365): at android.app.servertransaction.LaunchActivityItem.execute(LaunchActivityItem.java:78)
+06-02 16:38:44.577 D/StrictMode( 9365): at android.app.servertransaction.TransactionExecutor.executeCallbacks(TransactionExecutor.java:108)
+06-02 16:38:44.577 D/StrictMode( 9365): at android.app.servertransaction.TransactionExecutor.execute(TransactionExecutor.java:68)
+06-02 16:38:44.577 D/StrictMode( 9365): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1808)
+06-02 16:38:44.577 D/StrictMode( 9365): at android.os.Handler.dispatchMessage(Handler.java:106)
+06-02 16:38:44.577 D/StrictMode( 9365): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:44.577 D/StrictMode( 9365): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:44.577 D/StrictMode( 9365): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:44.577 D/StrictMode( 9365): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:44.577 D/StrictMode( 9365): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:44.635 I/AJC ( 9365): isPerformanceTest
+06-02 16:38:44.636 I/AJC ( 9365): isPerformanceTest : isPhonePlugged: true
+06-02 16:38:44.636 I/AJC ( 9365): isPerformanceTest : isAdbEnabled: true
+06-02 16:38:44.640 D/App ( 9365): DebugMetricController: track event: org.mozilla.fenix.components.metrics.Event$DismissedOnboarding@da9a582
+06-02 16:38:44.642 I/GeckoConsole( 9365): No chrome package registered for chrome://browser/content/built_in_addons.json
+06-02 16:38:44.766 W/lla.fenix.debu( 9365): Accessing hidden method Landroid/graphics/FontFamily;-><init>()V (light greylist, reflection)
+06-02 16:38:44.772 D/StrictMode( 9365): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/graphics/FontFamily;-><init>()V
+06-02 16:38:44.772 D/StrictMode( 9365): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:44.772 D/StrictMode( 9365): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:44.772 D/StrictMode( 9365): at java.lang.Class.getDeclaredConstructorInternal(Native Method)
+06-02 16:38:44.772 D/StrictMode( 9365): at java.lang.Class.getConstructor0(Class.java:2325)
+06-02 16:38:44.772 D/StrictMode( 9365): at java.lang.Class.getConstructor(Class.java:1725)
+06-02 16:38:44.772 D/StrictMode( 9365): at androidx.core.graphics.TypefaceCompatApi26Impl.obtainFontFamilyCtor(TypefaceCompatApi26Impl.java:321)
+06-02 16:38:44.772 D/StrictMode( 9365): at androidx.core.graphics.TypefaceCompatApi26Impl.<init>(TypefaceCompatApi26Impl.java:84)
+06-02 16:38:44.772 D/StrictMode( 9365): at androidx.core.graphics.TypefaceCompatApi28Impl.<init>(TypefaceCompatApi28Impl.java:36)
+06-02 16:38:44.772 D/StrictMode( 9365): at androidx.core.graphics.TypefaceCompat.<clinit>(TypefaceCompat.java:51)
+06-02 16:38:44.772 D/StrictMode( 9365): at androidx.core.graphics.TypefaceCompat.create(TypefaceCompat.java:194)
+06-02 16:38:44.772 D/StrictMode( 9365): at androidx.appcompat.widget.AppCompatTextView.setTypeface(AppCompatTextView.java:708)
+06-02 16:38:44.772 D/StrictMode( 9365): at android.widget.TextView.resolveStyleAndSetTypeface(TextView.java:2037)
+06-02 16:38:44.772 D/StrictMode( 9365): at android.widget.TextView.setTypefaceFromAttrs(TextView.java:2008)
+06-02 16:38:44.772 D/StrictMode( 9365): at android.widget.TextView.applyTextAppearance(TextView.java:3640)
+06-02 16:38:44.772 D/StrictMode( 9365): at android.widget.TextView.<init>(TextView.java:1498)
+06-02 16:38:44.772 D/StrictMode( 9365): at android.widget.TextView.<init>(TextView.java:869)
+06-02 16:38:44.772 D/StrictMode( 9365): at androidx.appcompat.widget.AppCompatTextView.<init>(AppCompatTextView.java:100)
+06-02 16:38:44.772 D/StrictMode( 9365): at androidx.appcompat.widget.AppCompatTextView.<init>(AppCompatTextView.java:95)
+06-02 16:38:44.772 D/StrictMode( 9365): at androidx.appcompat.app.AppCompatViewInflater.createTextView(AppCompatViewInflater.java:194)
+06-02 16:38:44.772 D/StrictMode( 9365): at androidx.appcompat.app.AppCompatViewInflater.createView(AppCompatViewInflater.java:115)
+06-02 16:38:44.772 D/StrictMode( 9365): at androidx.appcompat.app.AppCompatDelegateImpl.createView(AppCompatDelegateImpl.java:1548)
+06-02 16:38:44.772 D/StrictMode( 9365): at androidx.appcompat.app.AppCompatDelegateImpl.onCreateView(AppCompatDelegateImpl.java:1599)
+06-02 16:38:44.772 D/StrictMode( 9365): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:772)
+06-02 16:38:44.772 D/StrictMode( 9365): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:730)
+06-02 16:38:44.772 D/StrictMode( 9365): at android.view.LayoutInflater.rInflate(LayoutInflater.java:863)
+06-02 16:38:44.772 D/StrictMode( 9365): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:44.772 D/StrictMode( 9365): at android.view.LayoutInflater.rInflate(LayoutInflater.java:866)
+06-02 16:38:44.772 D/StrictMode( 9365): at android.view.LayoutInflater.inflate(LayoutInflater.java:489)
+06-02 16:38:44.772 D/StrictMode( 9365): at android.view.LayoutInflater.inflate(LayoutInflater.java:423)
+06-02 16:38:44.772 D/StrictMode( 9365): at android.view.LayoutInflater.inflate(LayoutInflater.java:374)
+06-02 16:38:44.772 D/StrictMode( 9365): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(TabCounter.kt:30)
+06-02 16:38:44.772 D/StrictMode( 9365): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(TabCounter.kt:22)
+06-02 16:38:44.772 D/StrictMode( 9365): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(Unknown Source:6)
+06-02 16:38:44.772 D/StrictMode( 9365): at java.lang.reflect.Constructor.newInstance0(Native Method)
+06-02 16:38:44.772 D/StrictMode( 9365): at java.lang.reflect.Constructor.newInstance(Constructor.java:343)
+06-02 16:38:44.772 D/StrictMode( 9365): at android.view.LayoutInflater.createView(LayoutInflater.java:647)
+06-02 16:38:44.772 D/StrictMode( 9365): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:790)
+06-02 16:38:44.772 D/StrictMode( 9365): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:730)
+06-02 16:38:44.772 D/StrictMode( 9365): at android.view.LayoutInflater.rInflate(LayoutInflater.java:863)
+06-02 16:38:44.772 D/StrictMode( 9365): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:44.772 D/StrictMode( 9365): at android.view.LayoutInflater.rInflate(LayoutInflater.java:866)
+06-02 16:38:44.772 D/StrictMode( 9365): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:44.772 D/StrictMode( 9365): at android.view.LayoutInflater.inflate(LayoutInflater.java:515)
+06-02 16:38:44.772 D/StrictMode( 9365): at android.view.LayoutInflater.inflate(LayoutInflater.java:423)
+06-02 16:38:44.772 D/StrictMode( 9365): at org.mozilla.fenix.home.HomeFragment.onCreateView(HomeFragment.kt:183)
+06-02 16:38:44.772 D/StrictMode( 9365): at androidx.fragment.app.Fragment.performCreateView(Fragment.java:2698)
+06-02 16:38:44.772 D/StrictMode( 9365): at androidx.fragment.app.FragmentStateManager.createView(FragmentStateManager.java:320)
+06-02 16:38:44.772 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1187)
+06-02 16:38:44.772 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.addAddedFragments(FragmentManager.java:2224)
+06-02 16:38:44.772 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.executeOpsTogether(FragmentManager.java:1997)
+06-02 16:38:44.772 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.removeRedundantOperationsAndExecute(FragmentManager.java:1953)
+06-02 16:38:44.772 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.execPendingActions(FragmentManager.java:1849)
+06-02 16:38:44.772 D/StrictMode( 9365): at
+06-02 16:38:44.772 W/lla.fenix.debu( 9365): Accessing hidden method Landroid/graphics/FontFamily;->addFontFromAssetManager(Landroid/content/res/AssetManager;Ljava/lang/String;IZIII[Landroid/graphics/fonts/FontVariationAxis;)Z (light greylist, reflection)
+06-02 16:38:44.774 D/StrictMode( 9365): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/graphics/FontFamily;->addFontFromAssetManager(Landroid/content/res/AssetManager;Ljava/lang/String;IZIII[Landroid/graphics/fonts/FontVariationAxis;)Z
+06-02 16:38:44.774 D/StrictMode( 9365): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:44.774 D/StrictMode( 9365): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:44.774 D/StrictMode( 9365): at java.lang.Class.getDeclaredMethodInternal(Native Method)
+06-02 16:38:44.774 D/StrictMode( 9365): at java.lang.Class.getPublicMethodRecursive(Class.java:2075)
+06-02 16:38:44.774 D/StrictMode( 9365): at java.lang.Class.getMethod(Class.java:2063)
+06-02 16:38:44.774 D/StrictMode( 9365): at java.lang.Class.getMethod(Class.java:1690)
+06-02 16:38:44.774 D/StrictMode( 9365): at androidx.core.graphics.TypefaceCompatApi26Impl.obtainAddFontFromAssetManagerMethod(TypefaceCompatApi26Impl.java:326)
+06-02 16:38:44.774 D/StrictMode( 9365): at androidx.core.graphics.TypefaceCompatApi26Impl.<init>(TypefaceCompatApi26Impl.java:85)
+06-02 16:38:44.774 D/StrictMode( 9365): at androidx.core.graphics.TypefaceCompatApi28Impl.<init>(TypefaceCompatApi28Impl.java:36)
+06-02 16:38:44.774 D/StrictMode( 9365): at androidx.core.graphics.TypefaceCompat.<clinit>(TypefaceCompat.java:51)
+06-02 16:38:44.774 D/StrictMode( 9365): at androidx.core.graphics.TypefaceCompat.create(TypefaceCompat.java:194)
+06-02 16:38:44.774 D/StrictMode( 9365): at androidx.appcompat.widget.AppCompatTextView.setTypeface(AppCompatTextView.java:708)
+06-02 16:38:44.774 D/StrictMode( 9365): at android.widget.TextView.resolveStyleAndSetTypeface(TextView.java:2037)
+06-02 16:38:44.774 D/StrictMode( 9365): at android.widget.TextView.setTypefaceFromAttrs(TextView.java:2008)
+06-02 16:38:44.774 D/StrictMode( 9365): at android.widget.TextView.applyTextAppearance(TextView.java:3640)
+06-02 16:38:44.774 D/StrictMode( 9365): at android.widget.TextView.<init>(TextView.java:1498)
+06-02 16:38:44.774 D/StrictMode( 9365): at android.widget.TextView.<init>(TextView.java:869)
+06-02 16:38:44.774 D/StrictMode( 9365): at androidx.appcompat.widget.AppCompatTextView.<init>(AppCompatTextView.java:100)
+06-02 16:38:44.774 D/StrictMode( 9365): at androidx.appcompat.widget.AppCompatTextView.<init>(AppCompatTextView.java:95)
+06-02 16:38:44.774 D/StrictMode( 9365): at androidx.appcompat.app.AppCompatViewInflater.createTextView(AppCompatViewInflater.java:194)
+06-02 16:38:44.774 D/StrictMode( 9365): at androidx.appcompat.app.AppCompatViewInflater.createView(AppCompatViewInflater.java:115)
+06-02 16:38:44.774 D/StrictMode( 9365): at androidx.appcompat.app.AppCompatDelegateImpl.createView(AppCompatDelegateImpl.java:1548)
+06-02 16:38:44.774 D/StrictMode( 9365): at androidx.appcompat.app.AppCompatDelegateImpl.onCreateView(AppCompatDelegateImpl.java:1599)
+06-02 16:38:44.774 D/StrictMode( 9365): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:772)
+06-02 16:38:44.774 D/StrictMode( 9365): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:730)
+06-02 16:38:44.774 D/StrictMode( 9365): at android.view.LayoutInflater.rInflate(LayoutInflater.java:863)
+06-02 16:38:44.774 D/StrictMode( 9365): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:44.774 D/StrictMode( 9365): at android.view.LayoutInflater.rInflate(LayoutInflater.java:866)
+06-02 16:38:44.774 D/StrictMode( 9365): at android.view.LayoutInflater.inflate(LayoutInflater.java:489)
+06-02 16:38:44.774 D/StrictMode( 9365): at android.view.LayoutInflater.inflate(LayoutInflater.java:423)
+06-02 16:38:44.774 D/StrictMode( 9365): at android.view.LayoutInflater.inflate(LayoutInflater.java:374)
+06-02 16:38:44.774 D/StrictMode( 9365): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(TabCounter.kt:30)
+06-02 16:38:44.774 D/StrictMode( 9365): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(TabCounter.kt:22)
+06-02 16:38:44.774 D/StrictMode( 9365): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(Unknown Source:6)
+06-02 16:38:44.774 D/StrictMode( 9365): at java.lang.reflect.Constructor.newInstance0(Native Method)
+06-02 16:38:44.774 D/StrictMode( 9365): at java.lang.reflect.Constructor.newInstance(Constructor.java:343)
+06-02 16:38:44.774 D/StrictMode( 9365): at android.view.LayoutInflater.createView(LayoutInflater.java:647)
+06-02 16:38:44.774 D/StrictMode( 9365): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:790)
+06-02 16:38:44.774 D/StrictMode( 9365): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:730)
+06-02 16:38:44.774 D/StrictMode( 9365): at android.view.LayoutInflater.rInflate(LayoutInflater.java:863)
+06-02 16:38:44.774 D/StrictMode( 9365): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:44.774 D/StrictMode( 9365): at android.view.LayoutInflater.rInflate(LayoutInflater.java:866)
+06-02 16:38:44.774 D/StrictMode( 9365): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:44.774 D/StrictMode( 9365): at android.view.LayoutInflater.inflate(LayoutInflater.java:515)
+06-02 16:38:44.774 D/StrictMode( 9365): at android.view.LayoutInflater.inflate(LayoutInflater.java:423)
+06-02 16:38:44.774 D/StrictMode( 9365): at org.mozilla.fenix.home.HomeFragment.onCreateView(HomeFragment.kt:183)
+06-02 16:38:44.774 D/StrictMode( 9365): at androidx.fragment.app.Fragment.performCreateView(Fragment.java:2698)
+06-02 16:38:44.774 D/StrictMode( 9365): at androidx.fragment.app.FragmentStateManager.createView(FragmentStateManager.java:320)
+06-02 16:38:44.774 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1187)
+06-02 16:38:44.774 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.addAddedFragments(FragmentManager.java:2224)
+06-02 16:38:44.774 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.executeOpsTogether(FragmentManager.java:1997)
+06-02 16:38:44.774 D/StrictMode( 9365): at androidx.fragme
+06-02 16:38:44.774 W/lla.fenix.debu( 9365): Accessing hidden method Landroid/graphics/FontFamily;->addFontFromBuffer(Ljava/nio/ByteBuffer;I[Landroid/graphics/fonts/FontVariationAxis;II)Z (light greylist, reflection)
+06-02 16:38:44.777 D/StrictMode( 9365): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/graphics/FontFamily;->addFontFromBuffer(Ljava/nio/ByteBuffer;I[Landroid/graphics/fonts/FontVariationAxis;II)Z
+06-02 16:38:44.777 D/StrictMode( 9365): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:44.777 D/StrictMode( 9365): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:44.777 D/StrictMode( 9365): at java.lang.Class.getDeclaredMethodInternal(Native Method)
+06-02 16:38:44.777 D/StrictMode( 9365): at java.lang.Class.getPublicMethodRecursive(Class.java:2075)
+06-02 16:38:44.777 D/StrictMode( 9365): at java.lang.Class.getMethod(Class.java:2063)
+06-02 16:38:44.777 D/StrictMode( 9365): at java.lang.Class.getMethod(Class.java:1690)
+06-02 16:38:44.777 D/StrictMode( 9365): at androidx.core.graphics.TypefaceCompatApi26Impl.obtainAddFontFromBufferMethod(TypefaceCompatApi26Impl.java:333)
+06-02 16:38:44.777 D/StrictMode( 9365): at androidx.core.graphics.TypefaceCompatApi26Impl.<init>(TypefaceCompatApi26Impl.java:86)
+06-02 16:38:44.777 D/StrictMode( 9365): at androidx.core.graphics.TypefaceCompatApi28Impl.<init>(TypefaceCompatApi28Impl.java:36)
+06-02 16:38:44.777 D/StrictMode( 9365): at androidx.core.graphics.TypefaceCompat.<clinit>(TypefaceCompat.java:51)
+06-02 16:38:44.777 D/StrictMode( 9365): at androidx.core.graphics.TypefaceCompat.create(TypefaceCompat.java:194)
+06-02 16:38:44.777 D/StrictMode( 9365): at androidx.appcompat.widget.AppCompatTextView.setTypeface(AppCompatTextView.java:708)
+06-02 16:38:44.777 D/StrictMode( 9365): at android.widget.TextView.resolveStyleAndSetTypeface(TextView.java:2037)
+06-02 16:38:44.777 D/StrictMode( 9365): at android.widget.TextView.setTypefaceFromAttrs(TextView.java:2008)
+06-02 16:38:44.777 D/StrictMode( 9365): at android.widget.TextView.applyTextAppearance(TextView.java:3640)
+06-02 16:38:44.777 D/StrictMode( 9365): at android.widget.TextView.<init>(TextView.java:1498)
+06-02 16:38:44.777 D/StrictMode( 9365): at android.widget.TextView.<init>(TextView.java:869)
+06-02 16:38:44.777 D/StrictMode( 9365): at androidx.appcompat.widget.AppCompatTextView.<init>(AppCompatTextView.java:100)
+06-02 16:38:44.777 D/StrictMode( 9365): at androidx.appcompat.widget.AppCompatTextView.<init>(AppCompatTextView.java:95)
+06-02 16:38:44.777 D/StrictMode( 9365): at androidx.appcompat.app.AppCompatViewInflater.createTextView(AppCompatViewInflater.java:194)
+06-02 16:38:44.777 D/StrictMode( 9365): at androidx.appcompat.app.AppCompatViewInflater.createView(AppCompatViewInflater.java:115)
+06-02 16:38:44.777 D/StrictMode( 9365): at androidx.appcompat.app.AppCompatDelegateImpl.createView(AppCompatDelegateImpl.java:1548)
+06-02 16:38:44.777 D/StrictMode( 9365): at androidx.appcompat.app.AppCompatDelegateImpl.onCreateView(AppCompatDelegateImpl.java:1599)
+06-02 16:38:44.777 D/StrictMode( 9365): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:772)
+06-02 16:38:44.777 D/StrictMode( 9365): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:730)
+06-02 16:38:44.777 D/StrictMode( 9365): at android.view.LayoutInflater.rInflate(LayoutInflater.java:863)
+06-02 16:38:44.777 D/StrictMode( 9365): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:44.777 D/StrictMode( 9365): at android.view.LayoutInflater.rInflate(LayoutInflater.java:866)
+06-02 16:38:44.777 D/StrictMode( 9365): at android.view.LayoutInflater.inflate(LayoutInflater.java:489)
+06-02 16:38:44.777 D/StrictMode( 9365): at android.view.LayoutInflater.inflate(LayoutInflater.java:423)
+06-02 16:38:44.777 D/StrictMode( 9365): at android.view.LayoutInflater.inflate(LayoutInflater.java:374)
+06-02 16:38:44.777 D/StrictMode( 9365): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(TabCounter.kt:30)
+06-02 16:38:44.777 D/StrictMode( 9365): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(TabCounter.kt:22)
+06-02 16:38:44.777 D/StrictMode( 9365): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(Unknown Source:6)
+06-02 16:38:44.777 D/StrictMode( 9365): at java.lang.reflect.Constructor.newInstance0(Native Method)
+06-02 16:38:44.777 D/StrictMode( 9365): at java.lang.reflect.Constructor.newInstance(Constructor.java:343)
+06-02 16:38:44.777 D/StrictMode( 9365): at android.view.LayoutInflater.createView(LayoutInflater.java:647)
+06-02 16:38:44.777 D/StrictMode( 9365): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:790)
+06-02 16:38:44.777 D/StrictMode( 9365): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:730)
+06-02 16:38:44.777 D/StrictMode( 9365): at android.view.LayoutInflater.rInflate(LayoutInflater.java:863)
+06-02 16:38:44.777 D/StrictMode( 9365): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:44.777 D/StrictMode( 9365): at android.view.LayoutInflater.rInflate(LayoutInflater.java:866)
+06-02 16:38:44.777 D/StrictMode( 9365): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:44.777 D/StrictMode( 9365): at android.view.LayoutInflater.inflate(LayoutInflater.java:515)
+06-02 16:38:44.777 D/StrictMode( 9365): at android.view.LayoutInflater.inflate(LayoutInflater.java:423)
+06-02 16:38:44.777 D/StrictMode( 9365): at org.mozilla.fenix.home.HomeFragment.onCreateView(HomeFragment.kt:183)
+06-02 16:38:44.777 D/StrictMode( 9365): at androidx.fragment.app.Fragment.performCreateView(Fragment.java:2698)
+06-02 16:38:44.777 D/StrictMode( 9365): at androidx.fragment.app.FragmentStateManager.createView(FragmentStateManager.java:320)
+06-02 16:38:44.777 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1187)
+06-02 16:38:44.777 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.addAddedFragments(FragmentManager.java:2224)
+06-02 16:38:44.777 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.executeOpsTogether(FragmentManager.java:1997)
+06-02 16:38:44.777 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.removeRedundantOperati
+06-02 16:38:44.777 W/lla.fenix.debu( 9365): Accessing hidden method Landroid/graphics/FontFamily;->freeze()Z (light greylist, reflection)
+06-02 16:38:44.780 D/StrictMode( 9365): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/graphics/FontFamily;->freeze()Z
+06-02 16:38:44.780 D/StrictMode( 9365): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:44.780 D/StrictMode( 9365): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:44.780 D/StrictMode( 9365): at java.lang.Class.getDeclaredMethodInternal(Native Method)
+06-02 16:38:44.780 D/StrictMode( 9365): at java.lang.Class.getPublicMethodRecursive(Class.java:2075)
+06-02 16:38:44.780 D/StrictMode( 9365): at java.lang.Class.getMethod(Class.java:2063)
+06-02 16:38:44.780 D/StrictMode( 9365): at java.lang.Class.getMethod(Class.java:1690)
+06-02 16:38:44.780 D/StrictMode( 9365): at androidx.core.graphics.TypefaceCompatApi26Impl.obtainFreezeMethod(TypefaceCompatApi26Impl.java:339)
+06-02 16:38:44.780 D/StrictMode( 9365): at androidx.core.graphics.TypefaceCompatApi26Impl.<init>(TypefaceCompatApi26Impl.java:87)
+06-02 16:38:44.780 D/StrictMode( 9365): at androidx.core.graphics.TypefaceCompatApi28Impl.<init>(TypefaceCompatApi28Impl.java:36)
+06-02 16:38:44.780 D/StrictMode( 9365): at androidx.core.graphics.TypefaceCompat.<clinit>(TypefaceCompat.java:51)
+06-02 16:38:44.780 D/StrictMode( 9365): at androidx.core.graphics.TypefaceCompat.create(TypefaceCompat.java:194)
+06-02 16:38:44.780 D/StrictMode( 9365): at androidx.appcompat.widget.AppCompatTextView.setTypeface(AppCompatTextView.java:708)
+06-02 16:38:44.780 D/StrictMode( 9365): at android.widget.TextView.resolveStyleAndSetTypeface(TextView.java:2037)
+06-02 16:38:44.780 D/StrictMode( 9365): at android.widget.TextView.setTypefaceFromAttrs(TextView.java:2008)
+06-02 16:38:44.780 D/StrictMode( 9365): at android.widget.TextView.applyTextAppearance(TextView.java:3640)
+06-02 16:38:44.780 D/StrictMode( 9365): at android.widget.TextView.<init>(TextView.java:1498)
+06-02 16:38:44.780 D/StrictMode( 9365): at android.widget.TextView.<init>(TextView.java:869)
+06-02 16:38:44.780 D/StrictMode( 9365): at androidx.appcompat.widget.AppCompatTextView.<init>(AppCompatTextView.java:100)
+06-02 16:38:44.780 D/StrictMode( 9365): at androidx.appcompat.widget.AppCompatTextView.<init>(AppCompatTextView.java:95)
+06-02 16:38:44.780 D/StrictMode( 9365): at androidx.appcompat.app.AppCompatViewInflater.createTextView(AppCompatViewInflater.java:194)
+06-02 16:38:44.780 D/StrictMode( 9365): at androidx.appcompat.app.AppCompatViewInflater.createView(AppCompatViewInflater.java:115)
+06-02 16:38:44.780 D/StrictMode( 9365): at androidx.appcompat.app.AppCompatDelegateImpl.createView(AppCompatDelegateImpl.java:1548)
+06-02 16:38:44.780 D/StrictMode( 9365): at androidx.appcompat.app.AppCompatDelegateImpl.onCreateView(AppCompatDelegateImpl.java:1599)
+06-02 16:38:44.780 D/StrictMode( 9365): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:772)
+06-02 16:38:44.780 D/StrictMode( 9365): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:730)
+06-02 16:38:44.780 D/StrictMode( 9365): at android.view.LayoutInflater.rInflate(LayoutInflater.java:863)
+06-02 16:38:44.780 D/StrictMode( 9365): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:44.780 D/StrictMode( 9365): at android.view.LayoutInflater.rInflate(LayoutInflater.java:866)
+06-02 16:38:44.780 D/StrictMode( 9365): at android.view.LayoutInflater.inflate(LayoutInflater.java:489)
+06-02 16:38:44.780 D/StrictMode( 9365): at android.view.LayoutInflater.inflate(LayoutInflater.java:423)
+06-02 16:38:44.780 D/StrictMode( 9365): at android.view.LayoutInflater.inflate(LayoutInflater.java:374)
+06-02 16:38:44.780 D/StrictMode( 9365): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(TabCounter.kt:30)
+06-02 16:38:44.780 D/StrictMode( 9365): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(TabCounter.kt:22)
+06-02 16:38:44.780 D/StrictMode( 9365): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(Unknown Source:6)
+06-02 16:38:44.780 D/StrictMode( 9365): at java.lang.reflect.Constructor.newInstance0(Native Method)
+06-02 16:38:44.780 D/StrictMode( 9365): at java.lang.reflect.Constructor.newInstance(Constructor.java:343)
+06-02 16:38:44.780 D/StrictMode( 9365): at android.view.LayoutInflater.createView(LayoutInflater.java:647)
+06-02 16:38:44.780 D/StrictMode( 9365): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:790)
+06-02 16:38:44.780 D/StrictMode( 9365): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:730)
+06-02 16:38:44.780 D/StrictMode( 9365): at android.view.LayoutInflater.rInflate(LayoutInflater.java:863)
+06-02 16:38:44.780 D/StrictMode( 9365): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:44.780 D/StrictMode( 9365): at android.view.LayoutInflater.rInflate(LayoutInflater.java:866)
+06-02 16:38:44.780 D/StrictMode( 9365): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:44.780 D/StrictMode( 9365): at android.view.LayoutInflater.inflate(LayoutInflater.java:515)
+06-02 16:38:44.780 D/StrictMode( 9365): at android.view.LayoutInflater.inflate(LayoutInflater.java:423)
+06-02 16:38:44.780 D/StrictMode( 9365): at org.mozilla.fenix.home.HomeFragment.onCreateView(HomeFragment.kt:183)
+06-02 16:38:44.780 D/StrictMode( 9365): at androidx.fragment.app.Fragment.performCreateView(Fragment.java:2698)
+06-02 16:38:44.780 D/StrictMode( 9365): at androidx.fragment.app.FragmentStateManager.createView(FragmentStateManager.java:320)
+06-02 16:38:44.780 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1187)
+06-02 16:38:44.780 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.addAddedFragments(FragmentManager.java:2224)
+06-02 16:38:44.780 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.executeOpsTogether(FragmentManager.java:1997)
+06-02 16:38:44.780 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.removeRedundantOperationsAndExecute(FragmentManager.java:1953)
+06-02 16:38:44.780 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.execPe
+06-02 16:38:44.780 W/lla.fenix.debu( 9365): Accessing hidden method Landroid/graphics/FontFamily;->abortCreation()V (light greylist, reflection)
+06-02 16:38:44.781 D/StrictMode( 9365): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/graphics/FontFamily;->abortCreation()V
+06-02 16:38:44.781 D/StrictMode( 9365): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:44.781 D/StrictMode( 9365): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:44.781 D/StrictMode( 9365): at java.lang.Class.getDeclaredMethodInternal(Native Method)
+06-02 16:38:44.781 D/StrictMode( 9365): at java.lang.Class.getPublicMethodRecursive(Class.java:2075)
+06-02 16:38:44.781 D/StrictMode( 9365): at java.lang.Class.getMethod(Class.java:2063)
+06-02 16:38:44.781 D/StrictMode( 9365): at java.lang.Class.getMethod(Class.java:1690)
+06-02 16:38:44.781 D/StrictMode( 9365): at androidx.core.graphics.TypefaceCompatApi26Impl.obtainAbortCreationMethod(TypefaceCompatApi26Impl.java:343)
+06-02 16:38:44.781 D/StrictMode( 9365): at androidx.core.graphics.TypefaceCompatApi26Impl.<init>(TypefaceCompatApi26Impl.java:88)
+06-02 16:38:44.781 D/StrictMode( 9365): at androidx.core.graphics.TypefaceCompatApi28Impl.<init>(TypefaceCompatApi28Impl.java:36)
+06-02 16:38:44.781 D/StrictMode( 9365): at androidx.core.graphics.TypefaceCompat.<clinit>(TypefaceCompat.java:51)
+06-02 16:38:44.781 D/StrictMode( 9365): at androidx.core.graphics.TypefaceCompat.create(TypefaceCompat.java:194)
+06-02 16:38:44.781 D/StrictMode( 9365): at androidx.appcompat.widget.AppCompatTextView.setTypeface(AppCompatTextView.java:708)
+06-02 16:38:44.781 D/StrictMode( 9365): at android.widget.TextView.resolveStyleAndSetTypeface(TextView.java:2037)
+06-02 16:38:44.781 D/StrictMode( 9365): at android.widget.TextView.setTypefaceFromAttrs(TextView.java:2008)
+06-02 16:38:44.781 D/StrictMode( 9365): at android.widget.TextView.applyTextAppearance(TextView.java:3640)
+06-02 16:38:44.781 D/StrictMode( 9365): at android.widget.TextView.<init>(TextView.java:1498)
+06-02 16:38:44.781 D/StrictMode( 9365): at android.widget.TextView.<init>(TextView.java:869)
+06-02 16:38:44.781 D/StrictMode( 9365): at androidx.appcompat.widget.AppCompatTextView.<init>(AppCompatTextView.java:100)
+06-02 16:38:44.781 D/StrictMode( 9365): at androidx.appcompat.widget.AppCompatTextView.<init>(AppCompatTextView.java:95)
+06-02 16:38:44.781 D/StrictMode( 9365): at androidx.appcompat.app.AppCompatViewInflater.createTextView(AppCompatViewInflater.java:194)
+06-02 16:38:44.781 D/StrictMode( 9365): at androidx.appcompat.app.AppCompatViewInflater.createView(AppCompatViewInflater.java:115)
+06-02 16:38:44.781 D/StrictMode( 9365): at androidx.appcompat.app.AppCompatDelegateImpl.createView(AppCompatDelegateImpl.java:1548)
+06-02 16:38:44.781 D/StrictMode( 9365): at androidx.appcompat.app.AppCompatDelegateImpl.onCreateView(AppCompatDelegateImpl.java:1599)
+06-02 16:38:44.781 D/StrictMode( 9365): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:772)
+06-02 16:38:44.781 D/StrictMode( 9365): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:730)
+06-02 16:38:44.781 D/StrictMode( 9365): at android.view.LayoutInflater.rInflate(LayoutInflater.java:863)
+06-02 16:38:44.781 D/StrictMode( 9365): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:44.781 D/StrictMode( 9365): at android.view.LayoutInflater.rInflate(LayoutInflater.java:866)
+06-02 16:38:44.781 D/StrictMode( 9365): at android.view.LayoutInflater.inflate(LayoutInflater.java:489)
+06-02 16:38:44.781 D/StrictMode( 9365): at android.view.LayoutInflater.inflate(LayoutInflater.java:423)
+06-02 16:38:44.781 D/StrictMode( 9365): at android.view.LayoutInflater.inflate(LayoutInflater.java:374)
+06-02 16:38:44.781 D/StrictMode( 9365): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(TabCounter.kt:30)
+06-02 16:38:44.781 D/StrictMode( 9365): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(TabCounter.kt:22)
+06-02 16:38:44.781 D/StrictMode( 9365): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(Unknown Source:6)
+06-02 16:38:44.781 D/StrictMode( 9365): at java.lang.reflect.Constructor.newInstance0(Native Method)
+06-02 16:38:44.781 D/StrictMode( 9365): at java.lang.reflect.Constructor.newInstance(Constructor.java:343)
+06-02 16:38:44.781 D/StrictMode( 9365): at android.view.LayoutInflater.createView(LayoutInflater.java:647)
+06-02 16:38:44.781 D/StrictMode( 9365): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:790)
+06-02 16:38:44.781 D/StrictMode( 9365): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:730)
+06-02 16:38:44.781 D/StrictMode( 9365): at android.view.LayoutInflater.rInflate(LayoutInflater.java:863)
+06-02 16:38:44.781 D/StrictMode( 9365): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:44.781 D/StrictMode( 9365): at android.view.LayoutInflater.rInflate(LayoutInflater.java:866)
+06-02 16:38:44.781 D/StrictMode( 9365): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:44.781 D/StrictMode( 9365): at android.view.LayoutInflater.inflate(LayoutInflater.java:515)
+06-02 16:38:44.781 D/StrictMode( 9365): at android.view.LayoutInflater.inflate(LayoutInflater.java:423)
+06-02 16:38:44.781 D/StrictMode( 9365): at org.mozilla.fenix.home.HomeFragment.onCreateView(HomeFragment.kt:183)
+06-02 16:38:44.781 D/StrictMode( 9365): at androidx.fragment.app.Fragment.performCreateView(Fragment.java:2698)
+06-02 16:38:44.781 D/StrictMode( 9365): at androidx.fragment.app.FragmentStateManager.createView(FragmentStateManager.java:320)
+06-02 16:38:44.781 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1187)
+06-02 16:38:44.781 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.addAddedFragments(FragmentManager.java:2224)
+06-02 16:38:44.781 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.executeOpsTogether(FragmentManager.java:1997)
+06-02 16:38:44.781 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.removeRedundantOperationsAndExecute(FragmentManager.java:1953)
+06-02 16:38:44.781 D/StrictMode( 9365): at androidx.fragment.app.Fragment
+06-02 16:38:44.781 W/lla.fenix.debu( 9365): Accessing hidden method Landroid/graphics/Typeface;->createFromFamiliesWithDefault([Landroid/graphics/FontFamily;Ljava/lang/String;II)Landroid/graphics/Typeface; (light greylist, reflection)
+06-02 16:38:44.788 D/StrictMode( 9365): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/graphics/Typeface;->createFromFamiliesWithDefault([Landroid/graphics/FontFamily;Ljava/lang/String;II)Landroid/graphics/Typeface;
+06-02 16:38:44.788 D/StrictMode( 9365): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:44.788 D/StrictMode( 9365): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:44.788 D/StrictMode( 9365): at java.lang.Class.getDeclaredMethodInternal(Native Method)
+06-02 16:38:44.788 D/StrictMode( 9365): at java.lang.Class.getMethod(Class.java:2064)
+06-02 16:38:44.788 D/StrictMode( 9365): at java.lang.Class.getDeclaredMethod(Class.java:2047)
+06-02 16:38:44.788 D/StrictMode( 9365): at androidx.core.graphics.TypefaceCompatApi28Impl.obtainCreateFromFamiliesWithDefaultMethod(TypefaceCompatApi28Impl.java:62)
+06-02 16:38:44.788 D/StrictMode( 9365): at androidx.core.graphics.TypefaceCompatApi26Impl.<init>(TypefaceCompatApi26Impl.java:89)
+06-02 16:38:44.788 D/StrictMode( 9365): at androidx.core.graphics.TypefaceCompatApi28Impl.<init>(TypefaceCompatApi28Impl.java:36)
+06-02 16:38:44.788 D/StrictMode( 9365): at androidx.core.graphics.TypefaceCompat.<clinit>(TypefaceCompat.java:51)
+06-02 16:38:44.788 D/StrictMode( 9365): at androidx.core.graphics.TypefaceCompat.create(TypefaceCompat.java:194)
+06-02 16:38:44.788 D/StrictMode( 9365): at androidx.appcompat.widget.AppCompatTextView.setTypeface(AppCompatTextView.java:708)
+06-02 16:38:44.788 D/StrictMode( 9365): at android.widget.TextView.resolveStyleAndSetTypeface(TextView.java:2037)
+06-02 16:38:44.788 D/StrictMode( 9365): at android.widget.TextView.setTypefaceFromAttrs(TextView.java:2008)
+06-02 16:38:44.788 D/StrictMode( 9365): at android.widget.TextView.applyTextAppearance(TextView.java:3640)
+06-02 16:38:44.788 D/StrictMode( 9365): at android.widget.TextView.<init>(TextView.java:1498)
+06-02 16:38:44.788 D/StrictMode( 9365): at android.widget.TextView.<init>(TextView.java:869)
+06-02 16:38:44.788 D/StrictMode( 9365): at androidx.appcompat.widget.AppCompatTextView.<init>(AppCompatTextView.java:100)
+06-02 16:38:44.788 D/StrictMode( 9365): at androidx.appcompat.widget.AppCompatTextView.<init>(AppCompatTextView.java:95)
+06-02 16:38:44.788 D/StrictMode( 9365): at androidx.appcompat.app.AppCompatViewInflater.createTextView(AppCompatViewInflater.java:194)
+06-02 16:38:44.788 D/StrictMode( 9365): at androidx.appcompat.app.AppCompatViewInflater.createView(AppCompatViewInflater.java:115)
+06-02 16:38:44.788 D/StrictMode( 9365): at androidx.appcompat.app.AppCompatDelegateImpl.createView(AppCompatDelegateImpl.java:1548)
+06-02 16:38:44.788 D/StrictMode( 9365): at androidx.appcompat.app.AppCompatDelegateImpl.onCreateView(AppCompatDelegateImpl.java:1599)
+06-02 16:38:44.788 D/StrictMode( 9365): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:772)
+06-02 16:38:44.788 D/StrictMode( 9365): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:730)
+06-02 16:38:44.788 D/StrictMode( 9365): at android.view.LayoutInflater.rInflate(LayoutInflater.java:863)
+06-02 16:38:44.788 D/StrictMode( 9365): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:44.788 D/StrictMode( 9365): at android.view.LayoutInflater.rInflate(LayoutInflater.java:866)
+06-02 16:38:44.788 D/StrictMode( 9365): at android.view.LayoutInflater.inflate(LayoutInflater.java:489)
+06-02 16:38:44.788 D/StrictMode( 9365): at android.view.LayoutInflater.inflate(LayoutInflater.java:423)
+06-02 16:38:44.788 D/StrictMode( 9365): at android.view.LayoutInflater.inflate(LayoutInflater.java:374)
+06-02 16:38:44.788 D/StrictMode( 9365): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(TabCounter.kt:30)
+06-02 16:38:44.788 D/StrictMode( 9365): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(TabCounter.kt:22)
+06-02 16:38:44.788 D/StrictMode( 9365): at org.mozilla.fenix.components.toolbar.TabCounter.<init>(Unknown Source:6)
+06-02 16:38:44.788 D/StrictMode( 9365): at java.lang.reflect.Constructor.newInstance0(Native Method)
+06-02 16:38:44.788 D/StrictMode( 9365): at java.lang.reflect.Constructor.newInstance(Constructor.java:343)
+06-02 16:38:44.788 D/StrictMode( 9365): at android.view.LayoutInflater.createView(LayoutInflater.java:647)
+06-02 16:38:44.788 D/StrictMode( 9365): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:790)
+06-02 16:38:44.788 D/StrictMode( 9365): at android.view.LayoutInflater.createViewFromTag(LayoutInflater.java:730)
+06-02 16:38:44.788 D/StrictMode( 9365): at android.view.LayoutInflater.rInflate(LayoutInflater.java:863)
+06-02 16:38:44.788 D/StrictMode( 9365): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:44.788 D/StrictMode( 9365): at android.view.LayoutInflater.rInflate(LayoutInflater.java:866)
+06-02 16:38:44.788 D/StrictMode( 9365): at android.view.LayoutInflater.rInflateChildren(LayoutInflater.java:824)
+06-02 16:38:44.788 D/StrictMode( 9365): at android.view.LayoutInflater.inflate(LayoutInflater.java:515)
+06-02 16:38:44.788 D/StrictMode( 9365): at android.view.LayoutInflater.inflate(LayoutInflater.java:423)
+06-02 16:38:44.788 D/StrictMode( 9365): at org.mozilla.fenix.home.HomeFragment.onCreateView(HomeFragment.kt:183)
+06-02 16:38:44.788 D/StrictMode( 9365): at androidx.fragment.app.Fragment.performCreateView(Fragment.java:2698)
+06-02 16:38:44.788 D/StrictMode( 9365): at androidx.fragment.app.FragmentStateManager.createView(FragmentStateManager.java:320)
+06-02 16:38:44.788 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1187)
+06-02 16:38:44.788 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.addAddedFragments(FragmentManager.java:2224)
+06-02 16:38:44.788 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.executeOpsTogether(FragmentManager.java:1997)
+06-02 16:38:44.788 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.removeRedundantOperationsAndExecute(FragmentMa
+06-02 16:38:44.960 D/GeckoRuntime( 9365): Lifecycle: onStart
+06-02 16:38:44.963 D/GeckoRuntime( 9365): Lifecycle: onResume
+06-02 16:38:44.966 D/GeckoNetworkManager( 9365): Incoming event start for state OffNoListeners -> OnNoListeners
+06-02 16:38:44.968 D/GeckoNetworkManager( 9365): New network state: UP, WIFI, WIFI
+06-02 16:38:44.972 D/OpenGLRenderer( 9365): Skia GL Pipeline
+06-02 16:38:44.987 E/SurfaceFlinger( 1728): ro.sf.lcd_density must be defined as a build property
+06-02 16:38:44.998 D/StrictMode( 9365): StrictMode policy violation; ~duration=528 ms: android.os.strictmode.DiskReadViolation
+06-02 16:38:44.998 D/StrictMode( 9365): at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+06-02 16:38:44.998 D/StrictMode( 9365): at java.io.UnixFileSystem.checkAccess(UnixFileSystem.java:251)
+06-02 16:38:44.998 D/StrictMode( 9365): at java.io.File.exists(File.java:815)
+06-02 16:38:44.998 D/StrictMode( 9365): at android.app.ContextImpl.getDataDir(ContextImpl.java:2253)
+06-02 16:38:44.998 D/StrictMode( 9365): at android.app.ContextImpl.getPreferencesDir(ContextImpl.java:550)
+06-02 16:38:44.998 D/StrictMode( 9365): at android.app.ContextImpl.getSharedPreferencesPath(ContextImpl.java:747)
+06-02 16:38:44.998 D/StrictMode( 9365): at android.app.ContextImpl.getSharedPreferences(ContextImpl.java:400)
+06-02 16:38:44.998 D/StrictMode( 9365): at mozilla.components.support.locale.LocaleManager$Storage.getSharedPreferences(LocaleManager.kt:123)
+06-02 16:38:44.998 D/StrictMode( 9365): at mozilla.components.support.locale.LocaleManager$Storage.getLocale(LocaleManager.kt:99)
+06-02 16:38:44.998 D/StrictMode( 9365): at mozilla.components.support.locale.LocaleManager.getCurrentLocale(LocaleManager.kt:42)
+06-02 16:38:44.998 D/StrictMode( 9365): at mozilla.components.support.locale.LocaleManager.updateResources$support_locale_release(LocaleManager.kt:72)
+06-02 16:38:44.998 D/StrictMode( 9365): at mozilla.components.support.locale.LocaleAwareAppCompatActivity.attachBaseContext(LocaleAwareAppCompatActivity.kt:18)
+06-02 16:38:44.998 D/StrictMode( 9365): at android.app.Activity.attach(Activity.java:7051)
+06-02 16:38:44.998 D/StrictMode( 9365): at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2873)
+06-02 16:38:44.998 D/StrictMode( 9365): at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:3048)
+06-02 16:38:44.998 D/StrictMode( 9365): at android.app.servertransaction.LaunchActivityItem.execute(LaunchActivityItem.java:78)
+06-02 16:38:44.998 D/StrictMode( 9365): at android.app.servertransaction.TransactionExecutor.executeCallbacks(TransactionExecutor.java:108)
+06-02 16:38:44.998 D/StrictMode( 9365): at android.app.servertransaction.TransactionExecutor.execute(TransactionExecutor.java:68)
+06-02 16:38:44.998 D/StrictMode( 9365): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1808)
+06-02 16:38:44.998 D/StrictMode( 9365): at android.os.Handler.dispatchMessage(Handler.java:106)
+06-02 16:38:44.998 D/StrictMode( 9365): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:44.998 D/StrictMode( 9365): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:44.998 D/StrictMode( 9365): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:44.998 D/StrictMode( 9365): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:44.998 D/StrictMode( 9365): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:45.001 D/StrictMode( 9365): StrictMode policy violation; ~duration=528 ms: android.os.strictmode.DiskReadViolation
+06-02 16:38:45.001 D/StrictMode( 9365): at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+06-02 16:38:45.001 D/StrictMode( 9365): at java.io.UnixFileSystem.checkAccess(UnixFileSystem.java:251)
+06-02 16:38:45.001 D/StrictMode( 9365): at java.io.File.exists(File.java:815)
+06-02 16:38:45.001 D/StrictMode( 9365): at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:605)
+06-02 16:38:45.001 D/StrictMode( 9365): at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:596)
+06-02 16:38:45.001 D/StrictMode( 9365): at android.app.ContextImpl.getPreferencesDir(ContextImpl.java:552)
+06-02 16:38:45.001 D/StrictMode( 9365): at android.app.ContextImpl.getSharedPreferencesPath(ContextImpl.java:747)
+06-02 16:38:45.001 D/StrictMode( 9365): at android.app.ContextImpl.getSharedPreferences(ContextImpl.java:400)
+06-02 16:38:45.001 D/StrictMode( 9365): at mozilla.components.support.locale.LocaleManager$Storage.getSharedPreferences(LocaleManager.kt:123)
+06-02 16:38:45.001 D/StrictMode( 9365): at mozilla.components.support.locale.LocaleManager$Storage.getLocale(LocaleManager.kt:99)
+06-02 16:38:45.001 D/StrictMode( 9365): at mozilla.components.support.locale.LocaleManager.getCurrentLocale(LocaleManager.kt:42)
+06-02 16:38:45.001 D/StrictMode( 9365): at mozilla.components.support.locale.LocaleManager.updateResources$support_locale_release(LocaleManager.kt:72)
+06-02 16:38:45.001 D/StrictMode( 9365): at mozilla.components.support.locale.LocaleAwareAppCompatActivity.attachBaseContext(LocaleAwareAppCompatActivity.kt:18)
+06-02 16:38:45.001 D/StrictMode( 9365): at android.app.Activity.attach(Activity.java:7051)
+06-02 16:38:45.001 D/StrictMode( 9365): at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2873)
+06-02 16:38:45.001 D/StrictMode( 9365): at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:3048)
+06-02 16:38:45.001 D/StrictMode( 9365): at android.app.servertransaction.LaunchActivityItem.execute(LaunchActivityItem.java:78)
+06-02 16:38:45.001 D/StrictMode( 9365): at android.app.servertransaction.TransactionExecutor.executeCallbacks(TransactionExecutor.java:108)
+06-02 16:38:45.001 D/StrictMode( 9365): at android.app.servertransaction.TransactionExecutor.execute(TransactionExecutor.java:68)
+06-02 16:38:45.001 D/StrictMode( 9365): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1808)
+06-02 16:38:45.001 D/StrictMode( 9365): at android.os.Handler.dispatchMessage(Handler.java:106)
+06-02 16:38:45.001 D/StrictMode( 9365): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:45.001 D/StrictMode( 9365): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:45.001 D/StrictMode( 9365): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:45.001 D/StrictMode( 9365): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:45.001 D/StrictMode( 9365): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:45.005 D/StrictMode( 9365): StrictMode policy violation; ~duration=360 ms: android.os.strictmode.DiskReadViolation
+06-02 16:38:45.005 D/StrictMode( 9365): at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+06-02 16:38:45.005 D/StrictMode( 9365): at java.io.UnixFileSystem.checkAccess(UnixFileSystem.java:251)
+06-02 16:38:45.005 D/StrictMode( 9365): at java.io.File.exists(File.java:815)
+06-02 16:38:45.005 D/StrictMode( 9365): at android.app.ContextImpl.getDataDir(ContextImpl.java:2253)
+06-02 16:38:45.005 D/StrictMode( 9365): at android.app.ContextImpl.getPreferencesDir(ContextImpl.java:550)
+06-02 16:38:45.005 D/StrictMode( 9365): at android.app.ContextImpl.getSharedPreferencesPath(ContextImpl.java:747)
+06-02 16:38:45.005 D/StrictMode( 9365): at android.app.ContextImpl.getSharedPreferences(ContextImpl.java:400)
+06-02 16:38:45.005 D/StrictMode( 9365): at android.content.ContextWrapper.getSharedPreferences(ContextWrapper.java:174)
+06-02 16:38:45.005 D/StrictMode( 9365): at android.content.ContextWrapper.getSharedPreferences(ContextWrapper.java:174)
+06-02 16:38:45.005 D/StrictMode( 9365): at org.mozilla.fenix.onboarding.FenixOnboarding.<init>(FenixOnboarding.kt:15)
+06-02 16:38:45.005 D/StrictMode( 9365): at org.mozilla.fenix.perf.Performance.disableOnboarding(Performance.kt:72)
+06-02 16:38:45.005 D/StrictMode( 9365): at org.mozilla.fenix.perf.Performance.processIntentIfPerformanceTest(Performance.kt:32)
+06-02 16:38:45.005 D/StrictMode( 9365): at org.mozilla.fenix.HomeActivity.onCreate(HomeActivity.kt:145)
+06-02 16:38:45.005 D/StrictMode( 9365): at android.app.Activity.performCreate(Activity.java:7136)
+06-02 16:38:45.005 D/StrictMode( 9365): at android.app.Activity.performCreate(Activity.java:7127)
+06-02 16:38:45.005 D/StrictMode( 9365): at android.app.Instrumentation.callActivityOnCreate(Instrumentation.java:1271)
+06-02 16:38:45.005 D/StrictMode( 9365): at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2893)
+06-02 16:38:45.005 D/StrictMode( 9365): at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:3048)
+06-02 16:38:45.005 D/StrictMode( 9365): at android.app.servertransaction.LaunchActivityItem.execute(LaunchActivityItem.java:78)
+06-02 16:38:45.005 D/StrictMode( 9365): at android.app.servertransaction.TransactionExecutor.executeCallbacks(TransactionExecutor.java:108)
+06-02 16:38:45.005 D/StrictMode( 9365): at android.app.servertransaction.TransactionExecutor.execute(TransactionExecutor.java:68)
+06-02 16:38:45.005 D/StrictMode( 9365): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1808)
+06-02 16:38:45.005 D/StrictMode( 9365): at android.os.Handler.dispatchMessage(Handler.java:106)
+06-02 16:38:45.005 D/StrictMode( 9365): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:45.005 D/StrictMode( 9365): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:45.005 D/StrictMode( 9365): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:45.005 D/StrictMode( 9365): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:45.005 D/StrictMode( 9365): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:45.008 D/StrictMode( 9365): StrictMode policy violation; ~duration=360 ms: android.os.strictmode.DiskReadViolation
+06-02 16:38:45.008 D/StrictMode( 9365): at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+06-02 16:38:45.008 D/StrictMode( 9365): at java.io.UnixFileSystem.checkAccess(UnixFileSystem.java:251)
+06-02 16:38:45.008 D/StrictMode( 9365): at java.io.File.exists(File.java:815)
+06-02 16:38:45.008 D/StrictMode( 9365): at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:605)
+06-02 16:38:45.008 D/StrictMode( 9365): at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:596)
+06-02 16:38:45.008 D/StrictMode( 9365): at android.app.ContextImpl.getPreferencesDir(ContextImpl.java:552)
+06-02 16:38:45.008 D/StrictMode( 9365): at android.app.ContextImpl.getSharedPreferencesPath(ContextImpl.java:747)
+06-02 16:38:45.008 D/StrictMode( 9365): at android.app.ContextImpl.getSharedPreferences(ContextImpl.java:400)
+06-02 16:38:45.008 D/StrictMode( 9365): at android.content.ContextWrapper.getSharedPreferences(ContextWrapper.java:174)
+06-02 16:38:45.008 D/StrictMode( 9365): at android.content.ContextWrapper.getSharedPreferences(ContextWrapper.java:174)
+06-02 16:38:45.008 D/StrictMode( 9365): at org.mozilla.fenix.onboarding.FenixOnboarding.<init>(FenixOnboarding.kt:15)
+06-02 16:38:45.008 D/StrictMode( 9365): at org.mozilla.fenix.perf.Performance.disableOnboarding(Performance.kt:72)
+06-02 16:38:45.008 D/StrictMode( 9365): at org.mozilla.fenix.perf.Performance.processIntentIfPerformanceTest(Performance.kt:32)
+06-02 16:38:45.008 D/StrictMode( 9365): at org.mozilla.fenix.HomeActivity.onCreate(HomeActivity.kt:145)
+06-02 16:38:45.008 D/StrictMode( 9365): at android.app.Activity.performCreate(Activity.java:7136)
+06-02 16:38:45.008 D/StrictMode( 9365): at android.app.Activity.performCreate(Activity.java:7127)
+06-02 16:38:45.008 D/StrictMode( 9365): at android.app.Instrumentation.callActivityOnCreate(Instrumentation.java:1271)
+06-02 16:38:45.008 D/StrictMode( 9365): at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2893)
+06-02 16:38:45.008 D/StrictMode( 9365): at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:3048)
+06-02 16:38:45.008 D/StrictMode( 9365): at android.app.servertransaction.LaunchActivityItem.execute(LaunchActivityItem.java:78)
+06-02 16:38:45.008 D/StrictMode( 9365): at android.app.servertransaction.TransactionExecutor.executeCallbacks(TransactionExecutor.java:108)
+06-02 16:38:45.008 D/StrictMode( 9365): at android.app.servertransaction.TransactionExecutor.execute(TransactionExecutor.java:68)
+06-02 16:38:45.008 D/StrictMode( 9365): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1808)
+06-02 16:38:45.008 D/StrictMode( 9365): at android.os.Handler.dispatchMessage(Handler.java:106)
+06-02 16:38:45.008 D/StrictMode( 9365): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:45.008 D/StrictMode( 9365): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:45.008 D/StrictMode( 9365): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:45.008 D/StrictMode( 9365): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:45.008 D/StrictMode( 9365): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:45.009 D/StrictMode( 9365): StrictMode policy violation; ~duration=359 ms: android.os.strictmode.DiskReadViolation
+06-02 16:38:45.009 D/StrictMode( 9365): at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+06-02 16:38:45.009 D/StrictMode( 9365): at android.app.SharedPreferencesImpl.awaitLoadedLocked(SharedPreferencesImpl.java:256)
+06-02 16:38:45.009 D/StrictMode( 9365): at android.app.SharedPreferencesImpl.edit(SharedPreferencesImpl.java:349)
+06-02 16:38:45.009 D/StrictMode( 9365): at org.mozilla.fenix.onboarding.FenixOnboarding.setOnboardedVersion(FenixOnboarding.kt:42)
+06-02 16:38:45.009 D/StrictMode( 9365): at org.mozilla.fenix.onboarding.FenixOnboarding.finish(FenixOnboarding.kt:25)
+06-02 16:38:45.009 D/StrictMode( 9365): at org.mozilla.fenix.perf.Performance.disableOnboarding(Performance.kt:72)
+06-02 16:38:45.009 D/StrictMode( 9365): at org.mozilla.fenix.perf.Performance.processIntentIfPerformanceTest(Performance.kt:32)
+06-02 16:38:45.009 D/StrictMode( 9365): at org.mozilla.fenix.HomeActivity.onCreate(HomeActivity.kt:145)
+06-02 16:38:45.009 D/StrictMode( 9365): at android.app.Activity.performCreate(Activity.java:7136)
+06-02 16:38:45.009 D/StrictMode( 9365): at android.app.Activity.performCreate(Activity.java:7127)
+06-02 16:38:45.009 D/StrictMode( 9365): at android.app.Instrumentation.callActivityOnCreate(Instrumentation.java:1271)
+06-02 16:38:45.009 D/StrictMode( 9365): at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2893)
+06-02 16:38:45.009 D/StrictMode( 9365): at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:3048)
+06-02 16:38:45.009 D/StrictMode( 9365): at android.app.servertransaction.LaunchActivityItem.execute(LaunchActivityItem.java:78)
+06-02 16:38:45.009 D/StrictMode( 9365): at android.app.servertransaction.TransactionExecutor.executeCallbacks(TransactionExecutor.java:108)
+06-02 16:38:45.009 D/StrictMode( 9365): at android.app.servertransaction.TransactionExecutor.execute(TransactionExecutor.java:68)
+06-02 16:38:45.009 D/StrictMode( 9365): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1808)
+06-02 16:38:45.009 D/StrictMode( 9365): at android.os.Handler.dispatchMessage(Handler.java:106)
+06-02 16:38:45.009 D/StrictMode( 9365): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:45.009 D/StrictMode( 9365): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:45.009 D/StrictMode( 9365): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:45.009 D/StrictMode( 9365): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:45.009 D/StrictMode( 9365): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:45.017 D/StrictMode( 9365): StrictMode policy violation; ~duration=138 ms: android.os.strictmode.DiskReadViolation
+06-02 16:38:45.017 D/StrictMode( 9365): at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+06-02 16:38:45.017 D/StrictMode( 9365): at java.io.UnixFileSystem.checkAccess(UnixFileSystem.java:251)
+06-02 16:38:45.017 D/StrictMode( 9365): at java.io.File.exists(File.java:815)
+06-02 16:38:45.017 D/StrictMode( 9365): at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:605)
+06-02 16:38:45.017 D/StrictMode( 9365): at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:596)
+06-02 16:38:45.017 D/StrictMode( 9365): at android.app.ContextImpl.getPreferencesDir(ContextImpl.java:552)
+06-02 16:38:45.017 D/StrictMode( 9365): at android.app.ContextImpl.getSharedPreferencesPath(ContextImpl.java:747)
+06-02 16:38:45.017 D/StrictMode( 9365): at android.app.ContextImpl.getSharedPreferences(ContextImpl.java:400)
+06-02 16:38:45.017 D/StrictMode( 9365): at android.content.ContextWrapper.getSharedPreferences(ContextWrapper.java:174)
+06-02 16:38:45.017 D/StrictMode( 9365): at mozilla.components.support.locale.LocaleManager$Storage.getSharedPreferences(LocaleManager.kt:123)
+06-02 16:38:45.017 D/StrictMode( 9365): at mozilla.components.support.locale.LocaleManager$Storage.getLocale(LocaleManager.kt:99)
+06-02 16:38:45.017 D/StrictMode( 9365): at mozilla.components.support.locale.LocaleManager.getCurrentLocale(LocaleManager.kt:42)
+06-02 16:38:45.017 D/StrictMode( 9365): at org.mozilla.fenix.settings.advanced.LocaleManagerExtensionKt.getSelectedLocale(LocaleManagerExtension.kt:39)
+06-02 16:38:45.017 D/StrictMode( 9365): at org.mozilla.fenix.settings.advanced.LocaleManagerExtensionKt.getSelectedLocale$default(LocaleManagerExtension.kt:37)
+06-02 16:38:45.017 D/StrictMode( 9365): at org.mozilla.fenix.components.TopSiteStorage.addDefaultTopSites(TopSiteStorage.kt:57)
+06-02 16:38:45.017 D/StrictMode( 9365): at org.mozilla.fenix.components.TopSiteStorage.<init>(TopSiteStorage.kt:30)
+06-02 16:38:45.017 D/StrictMode( 9365): at org.mozilla.fenix.components.Core$topSiteStorage$2.invoke(Core.kt:216)
+06-02 16:38:45.017 D/StrictMode( 9365): at org.mozilla.fenix.components.Core$topSiteStorage$2.invoke(Core.kt:57)
+06-02 16:38:45.017 D/StrictMode( 9365): at kotlin.SynchronizedLazyImpl.getValue(LazyJVM.kt:74)
+06-02 16:38:45.017 D/StrictMode( 9365): at org.mozilla.fenix.components.Core.getTopSiteStorage(Unknown Source:8)
+06-02 16:38:45.017 D/StrictMode( 9365): at org.mozilla.fenix.home.HomeFragment$onCreateView$2.invoke(HomeFragment.kt:210)
+06-02 16:38:45.017 D/StrictMode( 9365): at org.mozilla.fenix.home.HomeFragment$onCreateView$2.invoke(HomeFragment.kt:114)
+06-02 16:38:45.017 D/StrictMode( 9365): at org.mozilla.fenix.components.StoreProviderFactory.create(StoreProvider.kt:42)
+06-02 16:38:45.017 D/StrictMode( 9365): at androidx.lifecycle.ViewModelProvider.get(ViewModelProvider.java:187)
+06-02 16:38:45.017 D/StrictMode( 9365): at androidx.lifecycle.ViewModelProvider.get(ViewModelProvider.java:150)
+06-02 16:38:45.017 D/StrictMode( 9365): at org.mozilla.fenix.components.StoreProvider$Companion.get(StoreProvider.kt:46)
+06-02 16:38:45.017 D/StrictMode( 9365): at org.mozilla.fenix.home.HomeFragment.onCreateView(HomeFragment.kt:203)
+06-02 16:38:45.017 D/StrictMode( 9365): at androidx.fragment.app.Fragment.performCreateView(Fragment.java:2698)
+06-02 16:38:45.017 D/StrictMode( 9365): at androidx.fragment.app.FragmentStateManager.createView(FragmentStateManager.java:320)
+06-02 16:38:45.017 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1187)
+06-02 16:38:45.017 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.addAddedFragments(FragmentManager.java:2224)
+06-02 16:38:45.017 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.executeOpsTogether(FragmentManager.java:1997)
+06-02 16:38:45.017 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.removeRedundantOperationsAndExecute(FragmentManager.java:1953)
+06-02 16:38:45.017 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.execPendingActions(FragmentManager.java:1849)
+06-02 16:38:45.017 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.dispatchStateChange(FragmentManager.java:2629)
+06-02 16:38:45.017 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.dispatchActivityCreated(FragmentManager.java:2577)
+06-02 16:38:45.017 D/StrictMode( 9365): at androidx.fragment.app.Fragment.performActivityCreated(Fragment.java:2722)
+06-02 16:38:45.017 D/StrictMode( 9365): at androidx.fragment.app.FragmentStateManager.activityCreated(FragmentStateManager.java:346)
+06-02 16:38:45.017 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1188)
+06-02 16:38:45.017 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1356)
+06-02 16:38:45.017 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.moveFragmentToExpectedState(FragmentManager.java:1434)
+06-02 16:38:45.017 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1497)
+06-02 16:38:45.017 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.dispatchStateChange(FragmentManager.java:2625)
+06-02 16:38:45.017 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.dispatchActivityCreated(FragmentManager.java:2577)
+06-02 16:38:45.017 D/StrictMode( 9365): at androidx.fragment.app.FragmentController.dispatchActivityCreated(FragmentController.java:247)
+06-02 16:38:45.017 D/StrictMode( 9365): at androidx.fragment.app.FragmentActivity.onStart(FragmentActivity.java:541)
+06-02 16:38:45.017 D/StrictMode( 9365): at androidx.appcompat.app.AppCompatActivity.onStart(AppCompatActivity.java:210)
+06-02 16:38:45.017 D/StrictMode( 9365): at android.app.Instrumentation.callActivityOnStart(Instrumentation.java:1391)
+06-02 16:38:45.017 D/StrictMode( 9365):
+06-02 16:38:45.023 D/StrictMode( 9365): StrictMode policy violation; ~duration=66 ms: android.os.strictmode.DiskReadViolation
+06-02 16:38:45.023 D/StrictMode( 9365): at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+06-02 16:38:45.023 D/StrictMode( 9365): at java.io.UnixFileSystem.checkAccess(UnixFileSystem.java:251)
+06-02 16:38:45.023 D/StrictMode( 9365): at java.io.File.exists(File.java:815)
+06-02 16:38:45.023 D/StrictMode( 9365): at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:605)
+06-02 16:38:45.023 D/StrictMode( 9365): at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:596)
+06-02 16:38:45.023 D/StrictMode( 9365): at android.app.ContextImpl.getPreferencesDir(ContextImpl.java:552)
+06-02 16:38:45.023 D/StrictMode( 9365): at android.app.ContextImpl.getSharedPreferencesPath(ContextImpl.java:747)
+06-02 16:38:45.023 D/StrictMode( 9365): at android.app.ContextImpl.getSharedPreferences(ContextImpl.java:400)
+06-02 16:38:45.023 D/StrictMode( 9365): at android.content.ContextWrapper.getSharedPreferences(ContextWrapper.java:174)
+06-02 16:38:45.023 D/StrictMode( 9365): at org.mozilla.fenix.components.AccountAbnormalities.<init>(AccountAbnormalities.kt:78)
+06-02 16:38:45.023 D/StrictMode( 9365): at org.mozilla.fenix.components.AccountAbnormalities.<init>(AccountAbnormalities.kt:60)
+06-02 16:38:45.023 D/StrictMode( 9365): at org.mozilla.fenix.components.BackgroundServices.<init>(BackgroundServices.kt:103)
+06-02 16:38:45.023 D/StrictMode( 9365): at org.mozilla.fenix.components.Components$backgroundServices$2.invoke(Components.kt:34)
+06-02 16:38:45.023 D/StrictMode( 9365): at org.mozilla.fenix.components.Components$backgroundServices$2.invoke(Components.kt:32)
+06-02 16:38:45.023 D/StrictMode( 9365): at kotlin.SynchronizedLazyImpl.getValue(LazyJVM.kt:74)
+06-02 16:38:45.023 D/StrictMode( 9365): at org.mozilla.fenix.components.Components.getBackgroundServices(Unknown Source:7)
+06-02 16:38:45.023 D/StrictMode( 9365): at org.mozilla.fenix.home.HomeMenu$coreMenuItems$2.invoke(HomeMenu.kt:131)
+06-02 16:38:45.023 D/StrictMode( 9365): at org.mozilla.fenix.home.HomeMenu$coreMenuItems$2.invoke(HomeMenu.kt:31)
+06-02 16:38:45.023 D/StrictMode( 9365): at kotlin.SynchronizedLazyImpl.getValue(LazyJVM.kt:74)
+06-02 16:38:45.023 D/StrictMode( 9365): at org.mozilla.fenix.home.HomeMenu.getCoreMenuItems(Unknown Source:7)
+06-02 16:38:45.023 D/StrictMode( 9365): at org.mozilla.fenix.home.HomeMenu.<init>(HomeMenu.kt:170)
+06-02 16:38:45.023 D/StrictMode( 9365): at org.mozilla.fenix.home.HomeFragment.createHomeMenu(HomeFragment.kt:668)
+06-02 16:38:45.023 D/StrictMode( 9365): at org.mozilla.fenix.home.HomeFragment.onViewCreated(HomeFragment.kt:337)
+06-02 16:38:45.023 D/StrictMode( 9365): at androidx.fragment.app.FragmentStateManager.createView(FragmentStateManager.java:332)
+06-02 16:38:45.023 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1187)
+06-02 16:38:45.023 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.addAddedFragments(FragmentManager.java:2224)
+06-02 16:38:45.023 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.executeOpsTogether(FragmentManager.java:1997)
+06-02 16:38:45.023 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.removeRedundantOperationsAndExecute(FragmentManager.java:1953)
+06-02 16:38:45.023 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.execPendingActions(FragmentManager.java:1849)
+06-02 16:38:45.023 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.dispatchStateChange(FragmentManager.java:2629)
+06-02 16:38:45.023 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.dispatchActivityCreated(FragmentManager.java:2577)
+06-02 16:38:45.023 D/StrictMode( 9365): at androidx.fragment.app.Fragment.performActivityCreated(Fragment.java:2722)
+06-02 16:38:45.023 D/StrictMode( 9365): at androidx.fragment.app.FragmentStateManager.activityCreated(FragmentStateManager.java:346)
+06-02 16:38:45.023 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1188)
+06-02 16:38:45.023 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1356)
+06-02 16:38:45.023 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.moveFragmentToExpectedState(FragmentManager.java:1434)
+06-02 16:38:45.023 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1497)
+06-02 16:38:45.023 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.dispatchStateChange(FragmentManager.java:2625)
+06-02 16:38:45.023 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.dispatchActivityCreated(FragmentManager.java:2577)
+06-02 16:38:45.023 D/StrictMode( 9365): at androidx.fragment.app.FragmentController.dispatchActivityCreated(FragmentController.java:247)
+06-02 16:38:45.023 D/StrictMode( 9365): at androidx.fragment.app.FragmentActivity.onStart(FragmentActivity.java:541)
+06-02 16:38:45.023 D/StrictMode( 9365): at androidx.appcompat.app.AppCompatActivity.onStart(AppCompatActivity.java:210)
+06-02 16:38:45.023 D/StrictMode( 9365): at android.app.Instrumentation.callActivityOnStart(Instrumentation.java:1391)
+06-02 16:38:45.023 D/StrictMode( 9365): at android.app.Activity.performStart(Activity.java:7157)
+06-02 16:38:45.023 D/StrictMode( 9365): at android.app.ActivityThread.handleStartActivity(ActivityThread.java:2937)
+06-02 16:38:45.023 D/StrictMode( 9365): at android.app.servertransaction.TransactionExecutor.performLifecycleSequence(TransactionExecutor.java:180)
+06-02 16:38:45.023 D/StrictMode( 9365): at android.app.servertransaction.TransactionExecutor.cycleToPath(TransactionExecutor.java:165)
+06-02 16:38:45.023 D/StrictMode( 9365): at android.app.servertransaction.TransactionExecutor.executeLifecycleState(TransactionExecutor.java:142)
+06-02 16:38:45.023 D/StrictMode( 9365): at android.app.servertransaction.TransactionExecutor.execute(Transac
+06-02 16:38:45.029 D/StrictMode( 9365): StrictMode policy violation; ~duration=48 ms: android.os.strictmode.DiskReadViolation
+06-02 16:38:45.029 D/StrictMode( 9365): at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+06-02 16:38:45.029 D/StrictMode( 9365): at java.io.UnixFileSystem.checkAccess(UnixFileSystem.java:251)
+06-02 16:38:45.029 D/StrictMode( 9365): at java.io.File.exists(File.java:815)
+06-02 16:38:45.029 D/StrictMode( 9365): at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:605)
+06-02 16:38:45.029 D/StrictMode( 9365): at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:596)
+06-02 16:38:45.029 D/StrictMode( 9365): at android.app.ContextImpl.getPreferencesDir(ContextImpl.java:552)
+06-02 16:38:45.029 D/StrictMode( 9365): at android.app.ContextImpl.getSharedPreferencesPath(ContextImpl.java:747)
+06-02 16:38:45.029 D/StrictMode( 9365): at android.app.ContextImpl.getSharedPreferences(ContextImpl.java:400)
+06-02 16:38:45.029 D/StrictMode( 9365): at android.content.ContextWrapper.getSharedPreferences(ContextWrapper.java:174)
+06-02 16:38:45.029 D/StrictMode( 9365): at android.content.ContextWrapper.getSharedPreferences(ContextWrapper.java:174)
+06-02 16:38:45.029 D/StrictMode( 9365): at android.preference.PreferenceManager.getDefaultSharedPreferences(PreferenceManager.java:526)
+06-02 16:38:45.029 D/StrictMode( 9365): at org.mozilla.fenix.whatsnew.SharedPreferenceWhatsNewStorage.<init>(WhatsNewStorage.kt:35)
+06-02 16:38:45.029 D/StrictMode( 9365): at org.mozilla.fenix.whatsnew.WhatsNew$Companion.shouldHighlightWhatsNew(WhatsNew.kt:71)
+06-02 16:38:45.029 D/StrictMode( 9365): at org.mozilla.fenix.home.HomeMenu$coreMenuItems$2$whatsNewItem$1.invoke(HomeMenu.kt:92)
+06-02 16:38:45.029 D/StrictMode( 9365): at org.mozilla.fenix.home.HomeMenu$coreMenuItems$2$whatsNewItem$1.invoke(HomeMenu.kt:31)
+06-02 16:38:45.029 D/StrictMode( 9365): at mozilla.components.browser.menu.ext.BrowserMenuItemKt$getHighlight$3.invoke(BrowserMenuItem.kt:18)
+06-02 16:38:45.029 D/StrictMode( 9365): at mozilla.components.browser.menu.ext.BrowserMenuItemKt$getHighlight$3.invoke(Unknown Source:2)
+06-02 16:38:45.029 D/StrictMode( 9365): at kotlin.sequences.FilteringSequence$iterator$1.calcNext(Sequences.kt:133)
+06-02 16:38:45.029 D/StrictMode( 9365): at kotlin.sequences.FilteringSequence$iterator$1.hasNext(Sequences.kt:156)
+06-02 16:38:45.029 D/StrictMode( 9365): at kotlin.sequences.TransformingSequence$iterator$1.hasNext(Sequences.kt:176)
+06-02 16:38:45.029 D/StrictMode( 9365): at kotlin.sequences.FilteringSequence$iterator$1.calcNext(Sequences.kt:131)
+06-02 16:38:45.029 D/StrictMode( 9365): at kotlin.sequences.FilteringSequence$iterator$1.hasNext(Sequences.kt:156)
+06-02 16:38:45.029 D/StrictMode( 9365): at mozilla.components.browser.menu.ext.BrowserMenuItemKt.getHighlight(BrowserMenuItem.kt:31)
+06-02 16:38:45.029 D/StrictMode( 9365): at org.mozilla.fenix.home.HomeMenu$coreMenuItems$2.invoke(HomeMenu.kt:149)
+06-02 16:38:45.029 D/StrictMode( 9365): at org.mozilla.fenix.home.HomeMenu$coreMenuItems$2.invoke(HomeMenu.kt:31)
+06-02 16:38:45.029 D/StrictMode( 9365): at kotlin.SynchronizedLazyImpl.getValue(LazyJVM.kt:74)
+06-02 16:38:45.029 D/StrictMode( 9365): at org.mozilla.fenix.home.HomeMenu.getCoreMenuItems(Unknown Source:7)
+06-02 16:38:45.029 D/StrictMode( 9365): at org.mozilla.fenix.home.HomeMenu.<init>(HomeMenu.kt:170)
+06-02 16:38:45.029 D/StrictMode( 9365): at org.mozilla.fenix.home.HomeFragment.createHomeMenu(HomeFragment.kt:668)
+06-02 16:38:45.029 D/StrictMode( 9365): at org.mozilla.fenix.home.HomeFragment.onViewCreated(HomeFragment.kt:337)
+06-02 16:38:45.029 D/StrictMode( 9365): at androidx.fragment.app.FragmentStateManager.createView(FragmentStateManager.java:332)
+06-02 16:38:45.029 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1187)
+06-02 16:38:45.029 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.addAddedFragments(FragmentManager.java:2224)
+06-02 16:38:45.029 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.executeOpsTogether(FragmentManager.java:1997)
+06-02 16:38:45.029 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.removeRedundantOperationsAndExecute(FragmentManager.java:1953)
+06-02 16:38:45.029 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.execPendingActions(FragmentManager.java:1849)
+06-02 16:38:45.029 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.dispatchStateChange(FragmentManager.java:2629)
+06-02 16:38:45.029 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.dispatchActivityCreated(FragmentManager.java:2577)
+06-02 16:38:45.029 D/StrictMode( 9365): at androidx.fragment.app.Fragment.performActivityCreated(Fragment.java:2722)
+06-02 16:38:45.029 D/StrictMode( 9365): at androidx.fragment.app.FragmentStateManager.activityCreated(FragmentStateManager.java:346)
+06-02 16:38:45.029 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1188)
+06-02 16:38:45.029 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1356)
+06-02 16:38:45.029 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.moveFragmentToExpectedState(FragmentManager.java:1434)
+06-02 16:38:45.029 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1497)
+06-02 16:38:45.029 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.dispatchStateChange(FragmentManager.java:2625)
+06-02 16:38:45.029 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.dispatchActivityCreated(FragmentManager.java:2577)
+06-02 16:38:45.029 D/StrictMode( 9365): at androidx.fragment.app.FragmentController.dispatchActivityCreated(FragmentController.java:247)
+06-02 16:38:45.029 D/StrictMode( 9365): at androidx.fragment.app.FragmentActivity.onStart(FragmentActivity.java:541)
+06-02 16:38:45.029 D/StrictMode( 9365): at androidx.appcompat.app.AppCompatAc
+06-02 16:38:45.030 D/LeakCanary( 9365): LeakCanary is running and ready to detect leaks
+06-02 16:38:45.055 I/libglean_ffi( 9365): glean_core::ping: Collecting baseline
+06-02 16:38:45.066 D/libglean_ffi( 9365): glean_core::ping: Storing ping 'f2c83a47-027f-4c73-8ff2-9bc7dc060222' at '/data/user/0/org.mozilla.fenix.debug/glean_data/pending_pings/f2c83a47-027f-4c73-8ff2-9bc7dc060222'
+06-02 16:38:45.066 I/libglean_ffi( 9365): glean_core: The ping 'baseline' was submitted and will be sent as soon as possible
+06-02 16:38:45.068 D/GeckoNetworkManager( 9365): Incoming event enableNotifications for state OnNoListeners -> OnWithListeners
+06-02 16:38:45.070 D/GeckoNetworkManager( 9365): New network state: UP, WIFI, WIFI
+06-02 16:38:45.071 W/ActivityManager( 1869): Receiver with filter android.content.IntentFilter@d026f98 already registered for pid 9365, callerPackage is org.mozilla.fenix.debug
+06-02 16:38:45.087 D/GeckoNetworkManager( 9365): Incoming event receivedUpdate for state OnWithListeners -> OnWithListeners
+06-02 16:38:45.090 D/GeckoNetworkManager( 9365): New network state: UP, WIFI, WIFI
+06-02 16:38:45.117 D/GeckoThread( 9365): State changed to PROFILE_READY
+06-02 16:38:45.186 D/GeckoViewStartup( 9365): observe: profile-after-change
+06-02 16:38:45.190 D/WIFI_UT ( 1869): got request NetworkRequest [ TRACK_DEFAULT id=35, [ Capabilities: INTERNET&NOT_RESTRICTED&TRUSTED Unwanted: Uid: 10099] ] with score 60
+06-02 16:38:45.190 D/WIFI ( 1869): got request NetworkRequest [ TRACK_DEFAULT id=35, [ Capabilities: INTERNET&NOT_RESTRICTED&TRUSTED Unwanted: Uid: 10099] ] with score 60
+06-02 16:38:45.190 D/PhoneSwitcherNetworkRequstListener( 2121): got request NetworkRequest [ TRACK_DEFAULT id=35, [ Capabilities: INTERNET&NOT_RESTRICTED&TRUSTED Unwanted: Uid: 10099] ] with score 60
+06-02 16:38:45.195 D/GeckoViewTelemetryController( 9365): setup - canRecordPrereleaseData true, canRecordReleaseData true
+06-02 16:38:45.244 D/GeckoThread( 9365): State changed to RUNNING
+06-02 16:38:45.246 D/gralloc_ranchu( 1619): gralloc_alloc: Creating ashmem region of size 9334784
+06-02 16:38:45.252 D/ ( 1728): HostConnection::get() New Host Connection established 0xe90ddf00, tid 1756
+06-02 16:38:45.253 D/gralloc_ranchu( 1619): gralloc_alloc: Creating ashmem region of size 9334784
+06-02 16:38:45.254 I/lla.fenix.debu( 9365): Background concurrent copying GC freed 21920(1766KB) AllocSpace objects, 40(1216KB) LOS objects, 49% free, 4MB/9MB, paused 1.515ms total 189.223ms
+06-02 16:38:45.261 D/gralloc_ranchu( 1619): gralloc_alloc: Creating ashmem region of size 9334784
+06-02 16:38:45.263 I/Gecko ( 9365): -*- nsDNSServiceDiscovery.js : nsDNSServiceDiscovery
+06-02 16:38:45.275 D/GeckoThread( 9414): State changed to LAUNCHED
+06-02 16:38:45.277 I/GeckoThread( 9414): preparing to run Gecko
+06-02 16:38:45.278 W/ctxmgr ( 2473): [AclManager]No 2 for (accnt=account#-517948760#, com.google.android.gms(10008):IndoorOutdoorProducer, vrsn=13280000, 0, 3pPkg = null , 3pMdlId = null , pid = 2473). Was: 3 for 57, account#-517948760#
+06-02 16:38:45.280 D/ ( 9365): HostConnection::get() New Host Connection established 0xe51aef80, tid 9452
+06-02 16:38:45.283 I/ConfigStore( 9365): android::hardware::configstore::V1_0::ISurfaceFlingerConfigs::hasWideColorDisplay retrieved: 0
+06-02 16:38:45.284 I/ConfigStore( 9365): android::hardware::configstore::V1_0::ISurfaceFlingerConfigs::hasHDRDisplay retrieved: 0
+06-02 16:38:45.284 I/OpenGLRenderer( 9365): Initialized EGL, version 1.4
+06-02 16:38:45.284 D/OpenGLRenderer( 9365): Swap behavior 1
+06-02 16:38:45.284 W/OpenGLRenderer( 9365): Failed to choose config with EGL_SWAP_BEHAVIOR_PRESERVED, retrying without...
+06-02 16:38:45.284 D/OpenGLRenderer( 9365): Swap behavior 0
+06-02 16:38:45.285 D/EGL_emulation( 9365): eglCreateContext: 0xe51a88e0: maj 3 min 0 rcv 3
+06-02 16:38:45.286 D/EGL_emulation( 9365): eglMakeCurrent: 0xe51a88e0: ver 3 0 (tinfo 0xe1995a60)
+06-02 16:38:45.288 E/SurfaceFlinger( 1728): ro.sf.lcd_density must be defined as a build property
+06-02 16:38:45.307 W/lla.fenix.debu( 9365): Accessing hidden field Landroid/os/Trace;->TRACE_TAG_APP:J (light greylist, reflection)
+06-02 16:38:45.312 D/StrictMode( 9365): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/os/Trace;->TRACE_TAG_APP:J
+06-02 16:38:45.312 D/StrictMode( 9365): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:45.312 D/StrictMode( 9365): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:45.312 D/StrictMode( 9365): at java.lang.Class.getPublicFieldRecursive(Native Method)
+06-02 16:38:45.312 D/StrictMode( 9365): at java.lang.Class.getField(Class.java:1599)
+06-02 16:38:45.312 D/StrictMode( 9365): at androidx.core.os.TraceCompat.<clinit>(TraceCompat.java:48)
+06-02 16:38:45.312 D/StrictMode( 9365): at androidx.core.os.TraceCompat.beginSection(TraceCompat.java:100)
+06-02 16:38:45.312 D/StrictMode( 9365): at androidx.recyclerview.widget.RecyclerView.onLayout(RecyclerView.java:4403)
+06-02 16:38:45.312 D/StrictMode( 9365): at android.view.View.layout(View.java:20672)
+06-02 16:38:45.312 D/StrictMode( 9365): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:45.312 D/StrictMode( 9365): at com.google.android.material.appbar.HeaderScrollingViewBehavior.layoutChild(HeaderScrollingViewBehavior.java:148)
+06-02 16:38:45.312 D/StrictMode( 9365): at com.google.android.material.appbar.ViewOffsetBehavior.onLayoutChild(ViewOffsetBehavior.java:43)
+06-02 16:38:45.312 D/StrictMode( 9365): at com.google.android.material.appbar.AppBarLayout$ScrollingViewBehavior.onLayoutChild(AppBarLayout.java:1892)
+06-02 16:38:45.312 D/StrictMode( 9365): at androidx.coordinatorlayout.widget.CoordinatorLayout.onLayout(CoordinatorLayout.java:918)
+06-02 16:38:45.312 D/StrictMode( 9365): at android.view.View.layout(View.java:20672)
+06-02 16:38:45.312 D/StrictMode( 9365): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:45.312 D/StrictMode( 9365): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:45.312 D/StrictMode( 9365): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:45.312 D/StrictMode( 9365): at android.view.View.layout(View.java:20672)
+06-02 16:38:45.312 D/StrictMode( 9365): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:45.312 D/StrictMode( 9365): at android.widget.LinearLayout.setChildFrame(LinearLayout.java:1812)
+06-02 16:38:45.312 D/StrictMode( 9365): at android.widget.LinearLayout.layoutVertical(LinearLayout.java:1656)
+06-02 16:38:45.312 D/StrictMode( 9365): at android.widget.LinearLayout.onLayout(LinearLayout.java:1565)
+06-02 16:38:45.312 D/StrictMode( 9365): at android.view.View.layout(View.java:20672)
+06-02 16:38:45.312 D/StrictMode( 9365): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:45.312 D/StrictMode( 9365): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:45.312 D/StrictMode( 9365): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:45.312 D/StrictMode( 9365): at android.view.View.layout(View.java:20672)
+06-02 16:38:45.312 D/StrictMode( 9365): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:45.312 D/StrictMode( 9365): at android.widget.LinearLayout.setChildFrame(LinearLayout.java:1812)
+06-02 16:38:45.312 D/StrictMode( 9365): at android.widget.LinearLayout.layoutVertical(LinearLayout.java:1656)
+06-02 16:38:45.312 D/StrictMode( 9365): at android.widget.LinearLayout.onLayout(LinearLayout.java:1565)
+06-02 16:38:45.312 D/StrictMode( 9365): at android.view.View.layout(View.java:20672)
+06-02 16:38:45.312 D/StrictMode( 9365): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:45.312 D/StrictMode( 9365): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:45.312 D/StrictMode( 9365): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:45.312 D/StrictMode( 9365): at android.view.View.layout(View.java:20672)
+06-02 16:38:45.312 D/StrictMode( 9365): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:45.312 D/StrictMode( 9365): at android.widget.LinearLayout.setChildFrame(LinearLayout.java:1812)
+06-02 16:38:45.312 D/StrictMode( 9365): at android.widget.LinearLayout.layoutVertical(LinearLayout.java:1656)
+06-02 16:38:45.312 D/StrictMode( 9365): at android.widget.LinearLayout.onLayout(LinearLayout.java:1565)
+06-02 16:38:45.312 D/StrictMode( 9365): at android.view.View.layout(View.java:20672)
+06-02 16:38:45.312 D/StrictMode( 9365): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:45.312 D/StrictMode( 9365): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:45.312 D/StrictMode( 9365): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:45.312 D/StrictMode( 9365): at com.android.internal.policy.DecorView.onLayout(DecorView.java:753)
+06-02 16:38:45.312 D/StrictMode( 9365): at android.view.View.layout(View.java:20672)
+06-02 16:38:45.312 D/StrictMode( 9365): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:45.312 D/StrictMode( 9365): at android.view.ViewRootImpl.performLayout(ViewRootImpl.java:2792)
+06-02 16:38:45.312 D/StrictMode( 9365): at android.view.ViewRootImpl.performTraversals(ViewRootImpl.java:2319)
+06-02 16:38:45.312 D/StrictMode( 9365): at android.view.ViewRootImpl.doTraversal(ViewRootImpl.java:1460)
+06-02 16:38:45.312 D/StrictMode( 9365): at android.view.ViewRootImpl$TraversalRunnable.run(ViewRootImpl.java:7183)
+06-02 16:38:45.312 D/StrictMode( 9365): at android.view.Choreographer$CallbackRecord.run(Choreographer.java:949)
+06-02 16:38:45.312 D/StrictMode( 9365): at android.view.Choreographer.doCallbacks(Choreographer.java:761)
+06-02 16:38:45.312 D/StrictMode( 9365): at android.view.Choreographer.doFrame(Choreographer.java:696)
+06-02 16:38:45.312 D/StrictMode( 9365): at android.view.Choreographer$FrameDisplayEventReceiver.run(Choreographer.java:935)
+06-02 16:38:45.312 D/StrictMode( 9365): at android.os.Handler.handleCallback(Handler.java:873)
+06-02 16:38:45.312 D/StrictMode( 9365): at android.os.Handler.dispatchMessage(Handler.java:99)
+06-02 16:38:45.312 D/StrictMode( 9365): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:45.312 D/StrictMode( 9365): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:45.312 D/StrictMode( 9365): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:45.312 D/StrictMode( 9365): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:45.312 D/StrictMode( 9365): at com.and
+06-02 16:38:45.312 W/lla.fenix.debu( 9365): Accessing hidden method Landroid/os/Trace;->isTagEnabled(J)Z (light greylist, reflection)
+06-02 16:38:45.316 D/StrictMode( 9365): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/os/Trace;->isTagEnabled(J)Z
+06-02 16:38:45.316 D/StrictMode( 9365): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:45.316 D/StrictMode( 9365): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:45.316 D/StrictMode( 9365): at java.lang.Class.getDeclaredMethodInternal(Native Method)
+06-02 16:38:45.316 D/StrictMode( 9365): at java.lang.Class.getPublicMethodRecursive(Class.java:2075)
+06-02 16:38:45.316 D/StrictMode( 9365): at java.lang.Class.getMethod(Class.java:2063)
+06-02 16:38:45.316 D/StrictMode( 9365): at java.lang.Class.getMethod(Class.java:1690)
+06-02 16:38:45.316 D/StrictMode( 9365): at androidx.core.os.TraceCompat.<clinit>(TraceCompat.java:51)
+06-02 16:38:45.316 D/StrictMode( 9365): at androidx.core.os.TraceCompat.beginSection(TraceCompat.java:100)
+06-02 16:38:45.316 D/StrictMode( 9365): at androidx.recyclerview.widget.RecyclerView.onLayout(RecyclerView.java:4403)
+06-02 16:38:45.316 D/StrictMode( 9365): at android.view.View.layout(View.java:20672)
+06-02 16:38:45.316 D/StrictMode( 9365): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:45.316 D/StrictMode( 9365): at com.google.android.material.appbar.HeaderScrollingViewBehavior.layoutChild(HeaderScrollingViewBehavior.java:148)
+06-02 16:38:45.316 D/StrictMode( 9365): at com.google.android.material.appbar.ViewOffsetBehavior.onLayoutChild(ViewOffsetBehavior.java:43)
+06-02 16:38:45.316 D/StrictMode( 9365): at com.google.android.material.appbar.AppBarLayout$ScrollingViewBehavior.onLayoutChild(AppBarLayout.java:1892)
+06-02 16:38:45.316 D/StrictMode( 9365): at androidx.coordinatorlayout.widget.CoordinatorLayout.onLayout(CoordinatorLayout.java:918)
+06-02 16:38:45.316 D/StrictMode( 9365): at android.view.View.layout(View.java:20672)
+06-02 16:38:45.316 D/StrictMode( 9365): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:45.316 D/StrictMode( 9365): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:45.316 D/StrictMode( 9365): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:45.316 D/StrictMode( 9365): at android.view.View.layout(View.java:20672)
+06-02 16:38:45.316 D/StrictMode( 9365): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:45.316 D/StrictMode( 9365): at android.widget.LinearLayout.setChildFrame(LinearLayout.java:1812)
+06-02 16:38:45.316 D/StrictMode( 9365): at android.widget.LinearLayout.layoutVertical(LinearLayout.java:1656)
+06-02 16:38:45.316 D/StrictMode( 9365): at android.widget.LinearLayout.onLayout(LinearLayout.java:1565)
+06-02 16:38:45.316 D/StrictMode( 9365): at android.view.View.layout(View.java:20672)
+06-02 16:38:45.316 D/StrictMode( 9365): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:45.316 D/StrictMode( 9365): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:45.316 D/StrictMode( 9365): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:45.316 D/StrictMode( 9365): at android.view.View.layout(View.java:20672)
+06-02 16:38:45.316 D/StrictMode( 9365): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:45.316 D/StrictMode( 9365): at android.widget.LinearLayout.setChildFrame(LinearLayout.java:1812)
+06-02 16:38:45.316 D/StrictMode( 9365): at android.widget.LinearLayout.layoutVertical(LinearLayout.java:1656)
+06-02 16:38:45.316 D/StrictMode( 9365): at android.widget.LinearLayout.onLayout(LinearLayout.java:1565)
+06-02 16:38:45.316 D/StrictMode( 9365): at android.view.View.layout(View.java:20672)
+06-02 16:38:45.316 D/StrictMode( 9365): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:45.316 D/StrictMode( 9365): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:45.316 D/StrictMode( 9365): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:45.316 D/StrictMode( 9365): at android.view.View.layout(View.java:20672)
+06-02 16:38:45.316 D/StrictMode( 9365): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:45.316 D/StrictMode( 9365): at android.widget.LinearLayout.setChildFrame(LinearLayout.java:1812)
+06-02 16:38:45.316 D/StrictMode( 9365): at android.widget.LinearLayout.layoutVertical(LinearLayout.java:1656)
+06-02 16:38:45.316 D/StrictMode( 9365): at android.widget.LinearLayout.onLayout(LinearLayout.java:1565)
+06-02 16:38:45.316 D/StrictMode( 9365): at android.view.View.layout(View.java:20672)
+06-02 16:38:45.316 D/StrictMode( 9365): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:45.316 D/StrictMode( 9365): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:45.316 D/StrictMode( 9365): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:45.316 D/StrictMode( 9365): at com.android.internal.policy.DecorView.onLayout(DecorView.java:753)
+06-02 16:38:45.316 D/StrictMode( 9365): at android.view.View.layout(View.java:20672)
+06-02 16:38:45.316 D/StrictMode( 9365): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:45.316 D/StrictMode( 9365): at android.view.ViewRootImpl.performLayout(ViewRootImpl.java:2792)
+06-02 16:38:45.316 D/StrictMode( 9365): at android.view.ViewRootImpl.performTraversals(ViewRootImpl.java:2319)
+06-02 16:38:45.316 D/StrictMode( 9365): at android.view.ViewRootImpl.doTraversal(ViewRootImpl.java:1460)
+06-02 16:38:45.316 D/StrictMode( 9365): at android.view.ViewRootImpl$TraversalRunnable.run(ViewRootImpl.java:7183)
+06-02 16:38:45.316 D/StrictMode( 9365): at android.view.Choreographer$CallbackRecord.run(Choreographer.java:949)
+06-02 16:38:45.316 D/StrictMode( 9365): at android.view.Choreographer.doCallbacks(Choreographer.java:761)
+06-02 16:38:45.316 D/StrictMode( 9365): at android.view.Choreographer.doFrame(Choreographer.java:696)
+06-02 16:38:45.316 D/StrictMode( 9365): at android.view.Choreographer$FrameDisplayEventReceiver.run(Choreographer.java:935)
+06-02 16:38:45.316 D/StrictMode( 9365): at android.os.Handler.handleCallback(Handler.java:873)
+06-02 16:38:45.316 D/StrictMode( 9365): at android.os.Handler.dispatchMessage(Handler.java:99)
+06-02 16:38:45.316 D/StrictMode( 9365): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:45.316 D/StrictMode( 9365): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:45.316 D/StrictMode( 9365): at java.lang.reflect.Method.invoke
+06-02 16:38:45.316 W/lla.fenix.debu( 9365): Accessing hidden method Landroid/os/Trace;->asyncTraceBegin(JLjava/lang/String;I)V (light greylist, reflection)
+06-02 16:38:45.323 D/StrictMode( 9365): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/os/Trace;->asyncTraceBegin(JLjava/lang/String;I)V
+06-02 16:38:45.323 D/StrictMode( 9365): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:45.323 D/StrictMode( 9365): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:45.323 D/StrictMode( 9365): at java.lang.Class.getDeclaredMethodInternal(Native Method)
+06-02 16:38:45.323 D/StrictMode( 9365): at java.lang.Class.getPublicMethodRecursive(Class.java:2075)
+06-02 16:38:45.323 D/StrictMode( 9365): at java.lang.Class.getMethod(Class.java:2063)
+06-02 16:38:45.323 D/StrictMode( 9365): at java.lang.Class.getMethod(Class.java:1690)
+06-02 16:38:45.323 D/StrictMode( 9365): at androidx.core.os.TraceCompat.<clinit>(TraceCompat.java:52)
+06-02 16:38:45.323 D/StrictMode( 9365): at androidx.core.os.TraceCompat.beginSection(TraceCompat.java:100)
+06-02 16:38:45.323 D/StrictMode( 9365): at androidx.recyclerview.widget.RecyclerView.onLayout(RecyclerView.java:4403)
+06-02 16:38:45.323 D/StrictMode( 9365): at android.view.View.layout(View.java:20672)
+06-02 16:38:45.323 D/StrictMode( 9365): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:45.323 D/StrictMode( 9365): at com.google.android.material.appbar.HeaderScrollingViewBehavior.layoutChild(HeaderScrollingViewBehavior.java:148)
+06-02 16:38:45.323 D/StrictMode( 9365): at com.google.android.material.appbar.ViewOffsetBehavior.onLayoutChild(ViewOffsetBehavior.java:43)
+06-02 16:38:45.323 D/StrictMode( 9365): at com.google.android.material.appbar.AppBarLayout$ScrollingViewBehavior.onLayoutChild(AppBarLayout.java:1892)
+06-02 16:38:45.323 D/StrictMode( 9365): at androidx.coordinatorlayout.widget.CoordinatorLayout.onLayout(CoordinatorLayout.java:918)
+06-02 16:38:45.323 D/StrictMode( 9365): at android.view.View.layout(View.java:20672)
+06-02 16:38:45.323 D/StrictMode( 9365): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:45.323 D/StrictMode( 9365): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:45.323 D/StrictMode( 9365): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:45.323 D/StrictMode( 9365): at android.view.View.layout(View.java:20672)
+06-02 16:38:45.323 D/StrictMode( 9365): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:45.323 D/StrictMode( 9365): at android.widget.LinearLayout.setChildFrame(LinearLayout.java:1812)
+06-02 16:38:45.323 D/StrictMode( 9365): at android.widget.LinearLayout.layoutVertical(LinearLayout.java:1656)
+06-02 16:38:45.323 D/StrictMode( 9365): at android.widget.LinearLayout.onLayout(LinearLayout.java:1565)
+06-02 16:38:45.323 D/StrictMode( 9365): at android.view.View.layout(View.java:20672)
+06-02 16:38:45.323 D/StrictMode( 9365): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:45.323 D/StrictMode( 9365): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:45.323 D/StrictMode( 9365): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:45.323 D/StrictMode( 9365): at android.view.View.layout(View.java:20672)
+06-02 16:38:45.323 D/StrictMode( 9365): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:45.323 D/StrictMode( 9365): at android.widget.LinearLayout.setChildFrame(LinearLayout.java:1812)
+06-02 16:38:45.323 D/StrictMode( 9365): at android.widget.LinearLayout.layoutVertical(LinearLayout.java:1656)
+06-02 16:38:45.323 D/StrictMode( 9365): at android.widget.LinearLayout.onLayout(LinearLayout.java:1565)
+06-02 16:38:45.323 D/StrictMode( 9365): at android.view.View.layout(View.java:20672)
+06-02 16:38:45.323 D/StrictMode( 9365): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:45.323 D/StrictMode( 9365): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:45.323 D/StrictMode( 9365): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:45.323 D/StrictMode( 9365): at android.view.View.layout(View.java:20672)
+06-02 16:38:45.323 D/StrictMode( 9365): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:45.323 D/StrictMode( 9365): at android.widget.LinearLayout.setChildFrame(LinearLayout.java:1812)
+06-02 16:38:45.323 D/StrictMode( 9365): at android.widget.LinearLayout.layoutVertical(LinearLayout.java:1656)
+06-02 16:38:45.323 D/StrictMode( 9365): at android.widget.LinearLayout.onLayout(LinearLayout.java:1565)
+06-02 16:38:45.323 D/StrictMode( 9365): at android.view.View.layout(View.java:20672)
+06-02 16:38:45.323 D/StrictMode( 9365): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:45.323 D/StrictMode( 9365): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:45.323 D/StrictMode( 9365): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:45.323 D/StrictMode( 9365): at com.android.internal.policy.DecorView.onLayout(DecorView.java:753)
+06-02 16:38:45.323 D/StrictMode( 9365): at android.view.View.layout(View.java:20672)
+06-02 16:38:45.323 D/StrictMode( 9365): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:45.323 D/StrictMode( 9365): at android.view.ViewRootImpl.performLayout(ViewRootImpl.java:2792)
+06-02 16:38:45.323 D/StrictMode( 9365): at android.view.ViewRootImpl.performTraversals(ViewRootImpl.java:2319)
+06-02 16:38:45.323 D/StrictMode( 9365): at android.view.ViewRootImpl.doTraversal(ViewRootImpl.java:1460)
+06-02 16:38:45.323 D/StrictMode( 9365): at android.view.ViewRootImpl$TraversalRunnable.run(ViewRootImpl.java:7183)
+06-02 16:38:45.323 D/StrictMode( 9365): at android.view.Choreographer$CallbackRecord.run(Choreographer.java:949)
+06-02 16:38:45.323 D/StrictMode( 9365): at android.view.Choreographer.doCallbacks(Choreographer.java:761)
+06-02 16:38:45.323 D/StrictMode( 9365): at android.view.Choreographer.doFrame(Choreographer.java:696)
+06-02 16:38:45.323 D/StrictMode( 9365): at android.view.Choreographer$FrameDisplayEventReceiver.run(Choreographer.java:935)
+06-02 16:38:45.323 D/StrictMode( 9365): at android.os.Handler.handleCallback(Handler.java:873)
+06-02 16:38:45.323 D/StrictMode( 9365): at android.os.Handler.dispatchMessage(Handler.java:99)
+06-02 16:38:45.323 D/StrictMode( 9365): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:45.323 D/StrictMode( 9365): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:45.323 D/StrictMode( 9365): at java.lang
+06-02 16:38:45.323 W/lla.fenix.debu( 9365): Accessing hidden method Landroid/os/Trace;->asyncTraceEnd(JLjava/lang/String;I)V (light greylist, reflection)
+06-02 16:38:45.326 D/StrictMode( 9365): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/os/Trace;->asyncTraceEnd(JLjava/lang/String;I)V
+06-02 16:38:45.326 D/StrictMode( 9365): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:45.326 D/StrictMode( 9365): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:45.326 D/StrictMode( 9365): at java.lang.Class.getDeclaredMethodInternal(Native Method)
+06-02 16:38:45.326 D/StrictMode( 9365): at java.lang.Class.getPublicMethodRecursive(Class.java:2075)
+06-02 16:38:45.326 D/StrictMode( 9365): at java.lang.Class.getMethod(Class.java:2063)
+06-02 16:38:45.326 D/StrictMode( 9365): at java.lang.Class.getMethod(Class.java:1690)
+06-02 16:38:45.326 D/StrictMode( 9365): at androidx.core.os.TraceCompat.<clinit>(TraceCompat.java:54)
+06-02 16:38:45.326 D/StrictMode( 9365): at androidx.core.os.TraceCompat.beginSection(TraceCompat.java:100)
+06-02 16:38:45.326 D/StrictMode( 9365): at androidx.recyclerview.widget.RecyclerView.onLayout(RecyclerView.java:4403)
+06-02 16:38:45.326 D/StrictMode( 9365): at android.view.View.layout(View.java:20672)
+06-02 16:38:45.326 D/StrictMode( 9365): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:45.326 D/StrictMode( 9365): at com.google.android.material.appbar.HeaderScrollingViewBehavior.layoutChild(HeaderScrollingViewBehavior.java:148)
+06-02 16:38:45.326 D/StrictMode( 9365): at com.google.android.material.appbar.ViewOffsetBehavior.onLayoutChild(ViewOffsetBehavior.java:43)
+06-02 16:38:45.326 D/StrictMode( 9365): at com.google.android.material.appbar.AppBarLayout$ScrollingViewBehavior.onLayoutChild(AppBarLayout.java:1892)
+06-02 16:38:45.326 D/StrictMode( 9365): at androidx.coordinatorlayout.widget.CoordinatorLayout.onLayout(CoordinatorLayout.java:918)
+06-02 16:38:45.326 D/StrictMode( 9365): at android.view.View.layout(View.java:20672)
+06-02 16:38:45.326 D/StrictMode( 9365): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:45.326 D/StrictMode( 9365): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:45.326 D/StrictMode( 9365): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:45.326 D/StrictMode( 9365): at android.view.View.layout(View.java:20672)
+06-02 16:38:45.326 D/StrictMode( 9365): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:45.326 D/StrictMode( 9365): at android.widget.LinearLayout.setChildFrame(LinearLayout.java:1812)
+06-02 16:38:45.326 D/StrictMode( 9365): at android.widget.LinearLayout.layoutVertical(LinearLayout.java:1656)
+06-02 16:38:45.326 D/StrictMode( 9365): at android.widget.LinearLayout.onLayout(LinearLayout.java:1565)
+06-02 16:38:45.326 D/StrictMode( 9365): at android.view.View.layout(View.java:20672)
+06-02 16:38:45.326 D/StrictMode( 9365): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:45.326 D/StrictMode( 9365): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:45.326 D/StrictMode( 9365): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:45.326 D/StrictMode( 9365): at android.view.View.layout(View.java:20672)
+06-02 16:38:45.326 D/StrictMode( 9365): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:45.326 D/StrictMode( 9365): at android.widget.LinearLayout.setChildFrame(LinearLayout.java:1812)
+06-02 16:38:45.326 D/StrictMode( 9365): at android.widget.LinearLayout.layoutVertical(LinearLayout.java:1656)
+06-02 16:38:45.326 D/StrictMode( 9365): at android.widget.LinearLayout.onLayout(LinearLayout.java:1565)
+06-02 16:38:45.326 D/StrictMode( 9365): at android.view.View.layout(View.java:20672)
+06-02 16:38:45.326 D/StrictMode( 9365): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:45.326 D/StrictMode( 9365): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:45.326 D/StrictMode( 9365): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:45.326 D/StrictMode( 9365): at android.view.View.layout(View.java:20672)
+06-02 16:38:45.326 D/StrictMode( 9365): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:45.326 D/StrictMode( 9365): at android.widget.LinearLayout.setChildFrame(LinearLayout.java:1812)
+06-02 16:38:45.326 D/StrictMode( 9365): at android.widget.LinearLayout.layoutVertical(LinearLayout.java:1656)
+06-02 16:38:45.326 D/StrictMode( 9365): at android.widget.LinearLayout.onLayout(LinearLayout.java:1565)
+06-02 16:38:45.326 D/StrictMode( 9365): at android.view.View.layout(View.java:20672)
+06-02 16:38:45.326 D/StrictMode( 9365): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:45.326 D/StrictMode( 9365): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:45.326 D/StrictMode( 9365): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:45.326 D/StrictMode( 9365): at com.android.internal.policy.DecorView.onLayout(DecorView.java:753)
+06-02 16:38:45.326 D/StrictMode( 9365): at android.view.View.layout(View.java:20672)
+06-02 16:38:45.326 D/StrictMode( 9365): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:45.326 D/StrictMode( 9365): at android.view.ViewRootImpl.performLayout(ViewRootImpl.java:2792)
+06-02 16:38:45.326 D/StrictMode( 9365): at android.view.ViewRootImpl.performTraversals(ViewRootImpl.java:2319)
+06-02 16:38:45.326 D/StrictMode( 9365): at android.view.ViewRootImpl.doTraversal(ViewRootImpl.java:1460)
+06-02 16:38:45.326 D/StrictMode( 9365): at android.view.ViewRootImpl$TraversalRunnable.run(ViewRootImpl.java:7183)
+06-02 16:38:45.326 D/StrictMode( 9365): at android.view.Choreographer$CallbackRecord.run(Choreographer.java:949)
+06-02 16:38:45.326 D/StrictMode( 9365): at android.view.Choreographer.doCallbacks(Choreographer.java:761)
+06-02 16:38:45.326 D/StrictMode( 9365): at android.view.Choreographer.doFrame(Choreographer.java:696)
+06-02 16:38:45.326 D/StrictMode( 9365): at android.view.Choreographer$FrameDisplayEventReceiver.run(Choreographer.java:935)
+06-02 16:38:45.326 D/StrictMode( 9365): at android.os.Handler.handleCallback(Handler.java:873)
+06-02 16:38:45.326 D/StrictMode( 9365): at android.os.Handler.dispatchMessage(Handler.java:99)
+06-02 16:38:45.326 D/StrictMode( 9365): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:45.326 D/StrictMode( 9365): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:45.326 D/StrictMode( 9365): at java.lang.r
+06-02 16:38:45.327 W/lla.fenix.debu( 9365): Accessing hidden method Landroid/os/Trace;->traceCounter(JLjava/lang/String;I)V (light greylist, reflection)
+06-02 16:38:45.330 D/StrictMode( 9365): StrictMode policy violation: android.os.strictmode.NonSdkApiUsedViolation: Landroid/os/Trace;->traceCounter(JLjava/lang/String;I)V
+06-02 16:38:45.330 D/StrictMode( 9365): at android.os.StrictMode.lambda$static$1(StrictMode.java:428)
+06-02 16:38:45.330 D/StrictMode( 9365): at android.os.-$$Lambda$StrictMode$lu9ekkHJ2HMz0jd3F8K8MnhenxQ.accept(Unknown Source:2)
+06-02 16:38:45.330 D/StrictMode( 9365): at java.lang.Class.getDeclaredMethodInternal(Native Method)
+06-02 16:38:45.330 D/StrictMode( 9365): at java.lang.Class.getPublicMethodRecursive(Class.java:2075)
+06-02 16:38:45.330 D/StrictMode( 9365): at java.lang.Class.getMethod(Class.java:2063)
+06-02 16:38:45.330 D/StrictMode( 9365): at java.lang.Class.getMethod(Class.java:1690)
+06-02 16:38:45.330 D/StrictMode( 9365): at androidx.core.os.TraceCompat.<clinit>(TraceCompat.java:56)
+06-02 16:38:45.330 D/StrictMode( 9365): at androidx.core.os.TraceCompat.beginSection(TraceCompat.java:100)
+06-02 16:38:45.330 D/StrictMode( 9365): at androidx.recyclerview.widget.RecyclerView.onLayout(RecyclerView.java:4403)
+06-02 16:38:45.330 D/StrictMode( 9365): at android.view.View.layout(View.java:20672)
+06-02 16:38:45.330 D/StrictMode( 9365): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:45.330 D/StrictMode( 9365): at com.google.android.material.appbar.HeaderScrollingViewBehavior.layoutChild(HeaderScrollingViewBehavior.java:148)
+06-02 16:38:45.330 D/StrictMode( 9365): at com.google.android.material.appbar.ViewOffsetBehavior.onLayoutChild(ViewOffsetBehavior.java:43)
+06-02 16:38:45.330 D/StrictMode( 9365): at com.google.android.material.appbar.AppBarLayout$ScrollingViewBehavior.onLayoutChild(AppBarLayout.java:1892)
+06-02 16:38:45.330 D/StrictMode( 9365): at androidx.coordinatorlayout.widget.CoordinatorLayout.onLayout(CoordinatorLayout.java:918)
+06-02 16:38:45.330 D/StrictMode( 9365): at android.view.View.layout(View.java:20672)
+06-02 16:38:45.330 D/StrictMode( 9365): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:45.330 D/StrictMode( 9365): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:45.330 D/StrictMode( 9365): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:45.330 D/StrictMode( 9365): at android.view.View.layout(View.java:20672)
+06-02 16:38:45.330 D/StrictMode( 9365): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:45.330 D/StrictMode( 9365): at android.widget.LinearLayout.setChildFrame(LinearLayout.java:1812)
+06-02 16:38:45.330 D/StrictMode( 9365): at android.widget.LinearLayout.layoutVertical(LinearLayout.java:1656)
+06-02 16:38:45.330 D/StrictMode( 9365): at android.widget.LinearLayout.onLayout(LinearLayout.java:1565)
+06-02 16:38:45.330 D/StrictMode( 9365): at android.view.View.layout(View.java:20672)
+06-02 16:38:45.330 D/StrictMode( 9365): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:45.330 D/StrictMode( 9365): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:45.330 D/StrictMode( 9365): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:45.330 D/StrictMode( 9365): at android.view.View.layout(View.java:20672)
+06-02 16:38:45.330 D/StrictMode( 9365): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:45.330 D/StrictMode( 9365): at android.widget.LinearLayout.setChildFrame(LinearLayout.java:1812)
+06-02 16:38:45.330 D/StrictMode( 9365): at android.widget.LinearLayout.layoutVertical(LinearLayout.java:1656)
+06-02 16:38:45.330 D/StrictMode( 9365): at android.widget.LinearLayout.onLayout(LinearLayout.java:1565)
+06-02 16:38:45.330 D/StrictMode( 9365): at android.view.View.layout(View.java:20672)
+06-02 16:38:45.330 D/StrictMode( 9365): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:45.330 D/StrictMode( 9365): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:45.330 D/StrictMode( 9365): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:45.330 D/StrictMode( 9365): at android.view.View.layout(View.java:20672)
+06-02 16:38:45.330 D/StrictMode( 9365): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:45.330 D/StrictMode( 9365): at android.widget.LinearLayout.setChildFrame(LinearLayout.java:1812)
+06-02 16:38:45.330 D/StrictMode( 9365): at android.widget.LinearLayout.layoutVertical(LinearLayout.java:1656)
+06-02 16:38:45.330 D/StrictMode( 9365): at android.widget.LinearLayout.onLayout(LinearLayout.java:1565)
+06-02 16:38:45.330 D/StrictMode( 9365): at android.view.View.layout(View.java:20672)
+06-02 16:38:45.330 D/StrictMode( 9365): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:45.330 D/StrictMode( 9365): at android.widget.FrameLayout.layoutChildren(FrameLayout.java:323)
+06-02 16:38:45.330 D/StrictMode( 9365): at android.widget.FrameLayout.onLayout(FrameLayout.java:261)
+06-02 16:38:45.330 D/StrictMode( 9365): at com.android.internal.policy.DecorView.onLayout(DecorView.java:753)
+06-02 16:38:45.330 D/StrictMode( 9365): at android.view.View.layout(View.java:20672)
+06-02 16:38:45.330 D/StrictMode( 9365): at android.view.ViewGroup.layout(ViewGroup.java:6194)
+06-02 16:38:45.330 D/StrictMode( 9365): at android.view.ViewRootImpl.performLayout(ViewRootImpl.java:2792)
+06-02 16:38:45.330 D/StrictMode( 9365): at android.view.ViewRootImpl.performTraversals(ViewRootImpl.java:2319)
+06-02 16:38:45.330 D/StrictMode( 9365): at android.view.ViewRootImpl.doTraversal(ViewRootImpl.java:1460)
+06-02 16:38:45.330 D/StrictMode( 9365): at android.view.ViewRootImpl$TraversalRunnable.run(ViewRootImpl.java:7183)
+06-02 16:38:45.330 D/StrictMode( 9365): at android.view.Choreographer$CallbackRecord.run(Choreographer.java:949)
+06-02 16:38:45.330 D/StrictMode( 9365): at android.view.Choreographer.doCallbacks(Choreographer.java:761)
+06-02 16:38:45.330 D/StrictMode( 9365): at android.view.Choreographer.doFrame(Choreographer.java:696)
+06-02 16:38:45.330 D/StrictMode( 9365): at android.view.Choreographer$FrameDisplayEventReceiver.run(Choreographer.java:935)
+06-02 16:38:45.330 D/StrictMode( 9365): at android.os.Handler.handleCallback(Handler.java:873)
+06-02 16:38:45.330 D/StrictMode( 9365): at android.os.Handler.dispatchMessage(Handler.java:99)
+06-02 16:38:45.330 D/StrictMode( 9365): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:45.330 D/StrictMode( 9365): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:45.330 D/StrictMode( 9365): at java.lang.re
+06-02 16:38:45.335 D/GeckoNetworkManager( 9365): Incoming event receivedUpdate for state OnWithListeners -> OnWithListeners
+06-02 16:38:45.336 D/GeckoNetworkManager( 9365): New network state: UP, WIFI, WIFI
+06-02 16:38:45.363 D/EGL_emulation( 9365): eglMakeCurrent: 0xe51a88e0: ver 3 0 (tinfo 0xe1995a60)
+06-02 16:38:45.406 D/glean/PingUploadWorker( 9365): Processing persisted pings at /data/user/0/org.mozilla.fenix.debug/glean_data/pending_pings
+06-02 16:38:45.406 D/glean/PingUploadWorker( 9365): Processing ping: f2c83a47-027f-4c73-8ff2-9bc7dc060222
+06-02 16:38:45.413 I/ActivityManager( 1869): Displayed org.mozilla.fenix.debug/.App: +2s34ms
+06-02 16:38:45.414 I/GoogleInputMethod( 1996): onFinishInput() : Dummy InputConnection bound
+06-02 16:38:45.414 I/GoogleInputMethod( 1996): onStartInput() : Dummy InputConnection bound
+06-02 16:38:45.418 D/glean/ConceptFetchHttpUploader( 9365): Submitting ping to: https://incoming.telemetry.mozilla.org/submit/org-mozilla-fenix-debug/baseline/1/f2c83a47-027f-4c73-8ff2-9bc7dc060222
+06-02 16:38:45.426 E/adbd ( 4408): failed to connect to socket 'tcp:2829': Connection refused
+06-02 16:38:45.443 D/GeckoViewStartup( 9365): onEvent GeckoView:SetLocale
+06-02 16:38:45.444 D/GeckoViewStartup( 9365): onEvent GeckoView:ResetUserPrefs
+06-02 16:38:45.459 D/GeckoViewRemoteDebugger( 9365): onInit
+06-02 16:38:45.459 D/GeckoViewConsole( 9365): enabled = false
+06-02 16:38:45.475 D/StrictMode( 9365): StrictMode policy violation; ~duration=23 ms: android.os.strictmode.DiskReadViolation
+06-02 16:38:45.475 D/StrictMode( 9365): at android.os.StrictMode$AndroidBlockGuardPolicy.onReadFromDisk(StrictMode.java:1504)
+06-02 16:38:45.475 D/StrictMode( 9365): at java.io.UnixFileSystem.checkAccess(UnixFileSystem.java:251)
+06-02 16:38:45.475 D/StrictMode( 9365): at java.io.File.exists(File.java:815)
+06-02 16:38:45.475 D/StrictMode( 9365): at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:605)
+06-02 16:38:45.475 D/StrictMode( 9365): at android.app.ContextImpl.ensurePrivateDirExists(ContextImpl.java:596)
+06-02 16:38:45.475 D/StrictMode( 9365): at android.app.ContextImpl.getFilesDir(ContextImpl.java:641)
+06-02 16:38:45.475 D/StrictMode( 9365): at android.content.ContextWrapper.getFilesDir(ContextWrapper.java:239)
+06-02 16:38:45.475 D/StrictMode( 9365): at mozilla.components.feature.tab.collections.TabCollectionStorage.<init>(TabCollectionStorage.kt:29)
+06-02 16:38:45.475 D/StrictMode( 9365): at org.mozilla.fenix.components.TabCollectionStorage$collectionStorage$2.invoke(TabCollectionStorage.kt:52)
+06-02 16:38:45.475 D/StrictMode( 9365): at org.mozilla.fenix.components.TabCollectionStorage$collectionStorage$2.invoke(TabCollectionStorage.kt:23)
+06-02 16:38:45.475 D/StrictMode( 9365): at kotlin.SynchronizedLazyImpl.getValue(LazyJVM.kt:74)
+06-02 16:38:45.475 D/StrictMode( 9365): at org.mozilla.fenix.components.TabCollectionStorage.getCollectionStorage(Unknown Source:7)
+06-02 16:38:45.475 D/StrictMode( 9365): at org.mozilla.fenix.components.TabCollectionStorage.getCollections(TabCollectionStorage.kt:70)
+06-02 16:38:45.475 D/StrictMode( 9365): at org.mozilla.fenix.components.TabCollectionStorage.getCollections$default(TabCollectionStorage.kt:69)
+06-02 16:38:45.475 D/StrictMode( 9365): at org.mozilla.fenix.home.HomeFragment.subscribeToTabCollections(HomeFragment.kt:750)
+06-02 16:38:45.475 D/StrictMode( 9365): at org.mozilla.fenix.home.HomeFragment.onStart(HomeFragment.kt:404)
+06-02 16:38:45.475 D/StrictMode( 9365): at androidx.fragment.app.Fragment.performStart(Fragment.java:2730)
+06-02 16:38:45.475 D/StrictMode( 9365): at androidx.fragment.app.FragmentStateManager.start(FragmentStateManager.java:365)
+06-02 16:38:45.475 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1194)
+06-02 16:38:45.475 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1356)
+06-02 16:38:45.475 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.moveFragmentToExpectedState(FragmentManager.java:1434)
+06-02 16:38:45.475 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.moveToState(FragmentManager.java:1497)
+06-02 16:38:45.475 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.completeExecute(FragmentManager.java:2125)
+06-02 16:38:45.475 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager$StartEnterTransitionListener.completeTransaction(FragmentManager.java:3022)
+06-02 16:38:45.475 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.executePostponedTransaction(FragmentManager.java:1895)
+06-02 16:38:45.475 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.ensureExecReady(FragmentManager.java:1803)
+06-02 16:38:45.475 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager.execPendingActions(FragmentManager.java:1843)
+06-02 16:38:45.475 D/StrictMode( 9365): at androidx.fragment.app.FragmentManager$4.run(FragmentManager.java:413)
+06-02 16:38:45.475 D/StrictMode( 9365): at android.os.Handler.handleCallback(Handler.java:873)
+06-02 16:38:45.475 D/StrictMode( 9365): at android.os.Handler.dispatchMessage(Handler.java:99)
+06-02 16:38:45.475 D/StrictMode( 9365): at android.os.Looper.loop(Looper.java:193)
+06-02 16:38:45.475 D/StrictMode( 9365): at android.app.ActivityThread.main(ActivityThread.java:6669)
+06-02 16:38:45.475 D/StrictMode( 9365): at java.lang.reflect.Method.invoke(Native Method)
+06-02 16:38:45.475 D/StrictMode( 9365): at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:493)
+06-02 16:38:45.475 D/StrictMode( 9365): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:858)
+06-02 16:38:45.518 D/GeckoViewStartup( 9365): onEvent GeckoView:SetLocale
+06-02 16:38:45.519 D/GeckoViewStartup( 9365): onEvent GeckoView:SetDefaultPrefs
+06-02 16:38:45.528 D/GeckoViewStartup( 9365): onEvent GeckoView:SetDefaultPrefs
+06-02 16:38:45.532 E/adbd ( 4408): failed to connect to socket 'tcp:2829': Connection refused
+06-02 16:38:45.534 D/GeckoViewStartup( 9365): onEvent GeckoView:SetDefaultPrefs
+06-02 16:38:45.549 I/chatty ( 9365): uid=10099(org.mozilla.fenix.debug) Gecko identical 2 lines
+06-02 16:38:45.562 D/GeckoViewStartup( 9365): onEvent GeckoView:SetDefaultPrefs
+06-02 16:38:45.568 D/gralloc_ranchu( 1869): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:45.569 D/ ( 1869): HostConnection::get() New Host Connection established 0xc1b060c0, tid 1930
+06-02 16:38:45.570 D/gralloc_ranchu( 1869): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:45.571 D/ ( 1869): HostConnection::get() New Host Connection established 0xc1b060c0, tid 1930
+06-02 16:38:45.571 D/gralloc_ranchu( 1869): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:45.572 D/ ( 1869): HostConnection::get() New Host Connection established 0xc1b060c0, tid 1930
+06-02 16:38:45.572 D/gralloc_ranchu( 1869): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:45.573 W/SurfaceFlinger( 1728): Attempting to set client state on removed layer: Splash Screen org.mozilla.fenix.debug#0
+06-02 16:38:45.573 W/SurfaceFlinger( 1728): Attempting to destroy on removed layer: Splash Screen org.mozilla.fenix.debug#0
+06-02 16:38:45.580 D/GeckoViewStartup( 9365): onEvent GeckoView:SetDefaultPrefs
+06-02 16:38:45.584 D/GeckoViewStartup( 9365): onEvent GeckoView:SetDefaultPrefs
+06-02 16:38:45.584 D/gralloc_ranchu( 1897): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:45.585 D/ ( 1897): HostConnection::get() New Host Connection established 0xed7c3080, tid 1897
+06-02 16:38:45.585 D/gralloc_ranchu( 1897): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:45.585 D/ ( 1897): HostConnection::get() New Host Connection established 0xed7c3080, tid 1897
+06-02 16:38:45.586 D/gralloc_ranchu( 1897): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:45.586 D/ ( 1897): HostConnection::get() New Host Connection established 0xed7c3080, tid 1897
+06-02 16:38:45.586 D/gralloc_ranchu( 1897): gralloc_unregister_buffer: exiting HostConnection (is buffer-handling thread)
+06-02 16:38:45.588 D/GeckoViewStartup( 9365): onEvent GeckoView:SetDefaultPrefs
+06-02 16:38:45.594 D/ ( 1897): HostConnection::get() New Host Connection established 0xed7c3080, tid 1897
+06-02 16:38:45.595 D/GeckoViewStartup( 9365): onEvent GeckoView:SetDefaultPrefs
+06-02 16:38:45.595 D/GeckoViewStartup( 9365): onEvent GeckoView:SetDefaultPrefs
+06-02 16:38:45.602 D/GeckoViewConsole( 9365): onEvent GeckoView:RegisterWebExtension {"allowContentMessaging":true,"id":"webcompat@mozilla.com","locationUri":"resource://android/assets/extensions/webcompat/"}
+06-02 16:38:45.607 D/GeckoViewConsole( 9365): onEvent GeckoView:WebExtension:List null
+06-02 16:38:45.609 D/GeckoViewConsole( 9365): onEvent GeckoView:RegisterWebExtension {"allowContentMessaging":true,"id":"mozacBrowserIcons","locationUri":"resource://android/assets/extensions/browser-icons/"}
+06-02 16:38:45.610 D/GeckoViewConsole( 9365): onEvent GeckoView:RegisterWebExtension {"allowContentMessaging":true,"id":"mozacBrowserAds","locationUri":"resource://android/assets/extensions/ads/"}
+06-02 16:38:45.611 D/GeckoViewConsole( 9365): onEvent GeckoView:RegisterWebExtension {"allowContentMessaging":true,"id":"BrowserCookiesExtension","locationUri":"resource://android/assets/extensions/cookies/"}
+06-02 16:38:45.638 E/adbd ( 4408): failed to connect to socket 'tcp:2829': Connection refused
+06-02 16:38:45.730 I/ActivityManager( 1869): Fully drawn org.mozilla.fenix.debug/.App: +2s360ms
+06-02 16:38:45.736 I/Gecko ( 9365): 1591130325735 Marionette INFO Listening on port 2829
+06-02 16:38:45.738 I/DefaultSupportedAddonsChecker( 9365): Register check for new supported add-ons
+06-02 16:38:45.798 I/SupportedAddonsWorker( 9365): Trying to check for new supported add-ons
+06-02 16:38:45.922 D/App ( 9365): Installed browser-icons extension
+06-02 16:38:45.933 D/ ( 9365): HostConnection::get() New Host Connection established 0xce299840, tid 9393
+06-02 16:38:45.934 E/EGL_emulation( 9365): tid 9393: eglBindAPI(1259): error 0x300c (EGL_BAD_PARAMETER)
+06-02 16:38:45.936 D/EGL_emulation( 9365): eglCreateContext: 0xae005040: maj 3 min 0 rcv 3
+06-02 16:38:45.938 D/EGL_emulation( 9365): eglMakeCurrent: 0xae005040: ver 3 0 (tinfo 0xe9131ba0)
+06-02 16:38:45.976 E/GeckoConsole( 9365): [JavaScript Error: "NetworkError when attempting to fetch resource."]
+06-02 16:38:45.976 E/GeckoConsole( 9365): get@resource://services-settings/RemoteSettingsClient.jsm:350:12
+06-02 16:38:46.028 D/glean/ConceptFetchHttpUploader( 9365): Ping successfully sent (200)
+06-02 16:38:46.028 D/glean/PingUploadWorker( 9365): f2c83a47-027f-4c73-8ff2-9bc7dc060222 was deleted: true
+06-02 16:38:46.031 I/WM-WorkerWrapper( 9365): Worker result SUCCESS for Work [ id=07b3427a-abbb-48f8-b3a5-aa6f1e13a994, tags={ mozilla.telemetry.glean.scheduler.PingUploadWorker, mozac_service_glean_ping_upload_worker } ]
+06-02 16:38:46.112 D/BrowserIcons( 9365): Loaded icon (source = DOWNLOAD): https://www.youtube.com/
+06-02 16:38:46.163 D/BrowserIcons( 9365): Loaded icon (source = DOWNLOAD): https://www.wikipedia.org/
+06-02 16:38:46.216 W/GeckoConsole( 9365): [JavaScript Warning: "Security wrapper denied access to property "ONE_QUARTER" on privileged Javascript object. Support for exposing privileged objects to untrusted content via __exposedProps__ has been removed - use WebIDL bindings or Components.utils.cloneInto instead. Note that only the first denied property access from a given global object will be reported." {file: "moz-extension://d5e9c9f4-8e39-47cd-b0bd-15e32f8acf11/data/picture_in_picture_overrides.js" line: 26}]
+06-02 16:38:46.227 W/SurfaceFlinger( 1728): couldn't log to binary event log: overflow.
+06-02 16:38:46.234 D/mozac-webcompat( 9365): Installed WebCompat webextension: webcompat@mozilla.com
+06-02 16:38:46.282 E/GeckoConsole( 9365): [JavaScript Error: "can't access property "startupData", state is undefined" {file: "resource://gre/modules/addons/XPIProvider.jsm" line: 3079}]
+06-02 16:38:46.282 E/GeckoConsole( 9365): setStartupData@resource://gre/modules/addons/XPIProvider.jsm:3079:5
+06-02 16:38:46.282 E/GeckoConsole( 9365): saveStartupData@resource://gre/modules/Extension.jsm:2035:17
+06-02 16:38:46.282 E/GeckoConsole( 9365): _writePersistentListeners@resource://gre/modules/ExtensionCommon.jsm:2271:15
+06-02 16:38:46.282 E/GeckoConsole( 9365): savePersistentListener@resource://gre/modules/ExtensionCommon.jsm:2362:18
+06-02 16:38:46.282 E/GeckoConsole( 9365): addListener@resource://gre/modules/ExtensionCommon.jsm:2495:20
+06-02 16:38:46.282 E/GeckoConsole( 9365): addListener@resource://gre/modules/ExtensionCommon.jsm:2550:38
+06-02 16:38:46.282 E/GeckoConsole( 9365): recvAddListener@resource://gre/modules/ExtensionParent.jsm:1079:13
+06-02 16:38:46.294 I/chatty ( 9365): uid=10099(org.mozilla.fenix.debug) Gecko identical 24 lines
+06-02 16:38:46.336 E/GeckoConsole( 9365): [JavaScript Error: "can't access property "startupData", state is undefined" {file: "resource://gre/modules/addons/XPIProvider.jsm" line: 3079}]
+06-02 16:38:46.336 E/GeckoConsole( 9365): setStartupData@resource://gre/modules/addons/XPIProvider.jsm:3079:5
+06-02 16:38:46.336 E/GeckoConsole( 9365): saveStartupData@resource://gre/modules/Extension.jsm:2035:17
+06-02 16:38:46.336 E/GeckoConsole( 9365): _writePersistentListeners@resource://gre/modules/ExtensionCommon.jsm:2271:15
+06-02 16:38:46.336 E/GeckoConsole( 9365): savePersistentListener@resource://gre/modules/ExtensionCommon.jsm:2362:18
+06-02 16:38:46.336 E/GeckoConsole( 9365): addListener@resource://gre/modules/ExtensionCommon.jsm:2495:20
+06-02 16:38:46.336 E/GeckoConsole( 9365): addListener@resource://gre/modules/ExtensionCommon.jsm:2550:38
+06-02 16:38:46.336 E/GeckoConsole( 9365): recvAddListener@resource://gre/modules/ExtensionParent.jsm:1079:13
+06-02 16:38:46.342 I/PBSessionCacheImpl( 2402): Deleted sessionId[359508686912] from persistence.
+06-02 16:38:46.345 W/SearchService( 2402): Abort, client detached.
+06-02 16:38:46.471 D/WificondControl( 1869): Scan result ready event
+06-02 16:38:46.796 I/WM-WorkerWrapper( 9365): Worker result SUCCESS for Work [ id=2fa60760-cfd5-4e94-b83f-a9793a0ebc54, tags={ mozilla.components.feature.addons.migration.DefaultSupportedAddonsChecker.periodicWork, mozilla.components.feature.addons.migration.SupportedAddonsWorker } ]
+06-02 16:38:48.279 I/EventLogSendingHelper( 2402): Sending log events.
+06-02 16:38:50.279 W/ctxmgr ( 2473): [AclManager]No 2 for (accnt=account#-517948760#, com.google.android.gms(10008):IndoorOutdoorProducer, vrsn=13280000, 0, 3pPkg = null , 3pMdlId = null , pid = 2473). Was: 3 for 57, account#-517948760#
+06-02 16:38:50.338 I/FenixApplication( 9365): Kicking-off account manager...
+06-02 16:38:50.338 I/FenixApplication( 9365): Running post-visual completeness tasks...
+06-02 16:38:50.338 I/FenixApplication( 9365): Storage initialization...
+06-02 16:38:50.341 I/PlacesHistoryStorage( 9365): Warming up places storage...
+06-02 16:38:50.344 D/RustNativeSupport( 9365): findMegazordLibraryName(places, 0.59.0
+06-02 16:38:50.344 D/RustNativeSupport( 9365): lib in use: none
+06-02 16:38:50.344 D/RustNativeSupport( 9365): lib configured: megazord
+06-02 16:38:50.344 D/RustNativeSupport( 9365): lib version configured: 0.59.0
+06-02 16:38:50.344 D/RustNativeSupport( 9365): settled on megazord
+06-02 16:38:50.344 I/FirefoxAccountStateMachine( 9365): Enabling/updating sync with a new SyncConfig: SyncConfig(supportedEngines=[mozilla.components.service.fxa.SyncEngine$History@49c28e, mozilla.components.service.fxa.SyncEngine$Bookmarks@834aaf, mozilla.components.service.fxa.SyncEngine$Passwords@b024ebc], syncPeriodInMinutes=240)
+06-02 16:38:50.346 I/BgSyncManager( 9365): Periodic syncing enabled at a 240 interval
+06-02 16:38:50.346 D/places_ffi( 9365): places_api_new
+06-02 16:38:50.346 I/FirefoxAccountStateMachine( 9365): Sync is enabled
+06-02 16:38:50.348 I/FenixApplication( 9365): 'Kicking-off account manager' took 10 ms
+06-02 16:38:50.349 I/FirefoxAccountStateMachine( 9365): Processing event Init for state Start. Next state is Start
+06-02 16:38:50.363 I/keystore( 1734): del USRPKEY_org.mozilla.fenix.debug 10099
+06-02 16:38:50.364 I/keystore( 1734): del USRCERT_org.mozilla.fenix.debug 10099
+06-02 16:38:50.364 I/keystore( 1734): del CACERT_org.mozilla.fenix.debug 10099
+06-02 16:38:50.367 D/places::db::schema( 9365): Creating schema
+06-02 16:38:50.382 I/FirefoxAccountStateMachine( 9365): Ran 'Init' side-effects for state Start, got successive event AccountNotFound
+06-02 16:38:50.382 I/FirefoxAccountStateMachine( 9365): Processing event AccountNotFound for state Start. Next state is NotAuthenticated
+06-02 16:38:50.387 D/RustNativeSupport( 9365): findMegazordLibraryName(fxaclient, 0.59.0
+06-02 16:38:50.387 D/RustNativeSupport( 9365): lib in use: none
+06-02 16:38:50.387 D/RustNativeSupport( 9365): lib configured: megazord
+06-02 16:38:50.387 D/RustNativeSupport( 9365): lib version configured: 0.59.0
+06-02 16:38:50.387 D/RustNativeSupport( 9365): settled on megazord
+06-02 16:38:50.389 D/fxaclient_ffi( 9365): fxa_new
+06-02 16:38:50.391 W/FirefoxAccountStateMachine( 9365): Got invalid event Init for state NotAuthenticated.
+06-02 16:38:50.415 D/sql_support::conn_ext( 9365): Transaction commited after 48.284882ms
+06-02 16:38:50.415 D/places_ffi( 9365): places_connection_new
+06-02 16:38:50.421 D/places_ffi( 9365): places_connection_new
+06-02 16:38:50.425 I/PlacesHistoryStorage( 9365): 'Warming up places storage' took 84 ms
+06-02 16:38:50.427 I/PlacesBookmarksStorage( 9365): Warming up places storage...
+06-02 16:38:50.427 D/places_ffi( 9365): places_connection_new
+06-02 16:38:50.431 I/PlacesBookmarksStorage( 9365): 'Warming up places storage' took 3 ms
+06-02 16:38:50.450 I/keystore( 1734): 1 0
+06-02 16:38:50.461 I/SyncableLoginsStorage( 9365): Warming up storage...
+06-02 16:38:50.479 D/RustNativeSupport( 9365): findMegazordLibraryName(logins, 0.59.0
+06-02 16:38:50.479 D/RustNativeSupport( 9365): lib in use: none
+06-02 16:38:50.479 D/RustNativeSupport( 9365): lib configured: megazord
+06-02 16:38:50.479 D/RustNativeSupport( 9365): lib version configured: 0.59.0
+06-02 16:38:50.479 D/RustNativeSupport( 9365): settled on megazord
+06-02 16:38:50.484 D/logins_ffi( 9365): sync15_passwords_state_new
+06-02 16:38:50.493 D/logins::schema( 9365): Creating schema
+06-02 16:38:50.609 I/SyncableLoginsStorage( 9365): 'Warming up storage' took 147 ms
+06-02 16:38:50.609 I/FenixApplication( 9365): 'Storage initialization' took 270 ms
+06-02 16:38:51.401 D/EGL_emulation( 2002): eglMakeCurrent: 0xe3385900: ver 3 0 (tinfo 0xe33839b0)
+06-02 16:38:53.250 I/EventLogSendingHelper( 2402): Sending log events.
+06-02 16:38:53.453 E/memtrack( 1869): Couldn't load memtrack module
+06-02 16:38:53.453 W/android.os.Debug( 1869): failed to get memory consumption info: -1
+06-02 16:38:55.290 W/ctxmgr ( 2473): [AclManager]No 2 for (accnt=account#-517948760#, com.google.android.gms(10008):IndoorOutdoorProducer, vrsn=13280000, 0, 3pPkg = null , 3pMdlId = null , pid = 2473). Was: 3 for 57, account#-517948760#
+06-02 16:38:55.416 E/memtrack( 1869): Couldn't load memtrack module
+06-02 16:38:55.417 W/android.os.Debug( 1869): failed to get memory consumption info: -1
+06-02 16:38:55.430 E/memtrack( 1869): Couldn't load memtrack module
+06-02 16:38:55.430 W/android.os.Debug( 1869): failed to get memory consumption info: -1
+06-02 16:39:00.023 D/hwcomposer( 1897): hw_composer sent 60 syncs in 60s
+06-02 16:39:00.309 W/ctxmgr ( 2473): [AclManager]No 2 for (accnt=account#-517948760#, com.google.android.gms(10008):IndoorOutdoorProducer, vrsn=13280000, 0, 3pPkg = null , 3pMdlId = null , pid = 2473). Was: 3 for 57, account#-517948760#
diff --git a/python/mozperftest/mozperftest/tests/data/mozinfo.json b/python/mozperftest/mozperftest/tests/data/mozinfo.json
new file mode 100644
index 0000000000..f8789c5123
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/data/mozinfo.json
@@ -0,0 +1 @@
+{"appname": "firefox", "artifact": false, "asan": false, "bin_suffix": "", "bits": 64, "buildapp": "browser", "buildtype_guess": "opt", "cc_type": "clang", "ccov": false, "crashreporter": true, "datareporting": true, "debug": false, "devedition": false, "early_beta_or_earlier": true, "healthreport": true, "mozconfig": "/Users/tarek/Dev/gecko/mozilla-central-opt/browser/config/mozconfig", "nightly_build": true, "normandy": true, "official": false, "os": "mac", "pgo": false, "platform_guess": "macosx64", "processor": "x86_64", "release_or_beta": false, "require_signing": false, "stylo": true, "sync": true, "telemetry": false, "tests_enabled": true, "toolkit": "cocoa", "topsrcdir": "/Users/tarek/Dev/gecko/mozilla-central-opt", "tsan": false, "ubsan": false, "updater": true} \ No newline at end of file
diff --git a/python/mozperftest/mozperftest/tests/data/multiple_transforms_error/test_transformer_1.py b/python/mozperftest/mozperftest/tests/data/multiple_transforms_error/test_transformer_1.py
new file mode 100644
index 0000000000..2cfbc47e28
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/data/multiple_transforms_error/test_transformer_1.py
@@ -0,0 +1,6 @@
+class TestTransformer1(object):
+ def transform(self, data):
+ pass
+
+ def merge(self, sde):
+ pass
diff --git a/python/mozperftest/mozperftest/tests/data/multiple_transforms_error/test_transformer_1_copy.py b/python/mozperftest/mozperftest/tests/data/multiple_transforms_error/test_transformer_1_copy.py
new file mode 100644
index 0000000000..2cfbc47e28
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/data/multiple_transforms_error/test_transformer_1_copy.py
@@ -0,0 +1,6 @@
+class TestTransformer1(object):
+ def transform(self, data):
+ pass
+
+ def merge(self, sde):
+ pass
diff --git a/python/mozperftest/mozperftest/tests/data/perftestetl_plugin/test_transformer_perftestetl_plugin_1.py b/python/mozperftest/mozperftest/tests/data/perftestetl_plugin/test_transformer_perftestetl_plugin_1.py
new file mode 100644
index 0000000000..2cfbc47e28
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/data/perftestetl_plugin/test_transformer_perftestetl_plugin_1.py
@@ -0,0 +1,6 @@
+class TestTransformer1(object):
+ def transform(self, data):
+ pass
+
+ def merge(self, sde):
+ pass
diff --git a/python/mozperftest/mozperftest/tests/data/perftestetl_plugin/test_transformer_perftestetl_plugin_2.py b/python/mozperftest/mozperftest/tests/data/perftestetl_plugin/test_transformer_perftestetl_plugin_2.py
new file mode 100644
index 0000000000..f681095b20
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/data/perftestetl_plugin/test_transformer_perftestetl_plugin_2.py
@@ -0,0 +1,6 @@
+class TestTransformer2(object):
+ def transform(self, data):
+ pass
+
+ def merge(self, sde):
+ pass
diff --git a/python/mozperftest/mozperftest/tests/data/samples/head.js b/python/mozperftest/mozperftest/tests/data/samples/head.js
new file mode 100644
index 0000000000..e29b227f93
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/data/samples/head.js
@@ -0,0 +1,7 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+/* exported require */
+
+"use strict";
+
diff --git a/python/mozperftest/mozperftest/tests/data/samples/perftest_example.js b/python/mozperftest/mozperftest/tests/data/samples/perftest_example.js
new file mode 100644
index 0000000000..52162dedf1
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/data/samples/perftest_example.js
@@ -0,0 +1,46 @@
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+/* eslint-env node */
+"use strict";
+
+var someVar;
+
+someVar = 2;
+
+async function setUp(context) {
+ context.log.info("setUp example!");
+}
+
+async function test(context, commands) {
+ context.log.info("Test with setUp/tearDown example!");
+ await commands.measure.start("https://www.sitespeed.io/");
+ await commands.measure.start("https://www.mozilla.org/en-US/");
+}
+
+async function tearDown(context) {
+ context.log.info("tearDown example!");
+}
+
+module.noexport = {};
+
+module.exports = {
+ setUp,
+ tearDown,
+ test,
+ owner: "Performance Testing Team",
+ name: "Example",
+ description: "The description of the example test.",
+ longDescription: `
+ This is a longer description of the test perhaps including information
+ about how it should be run locally or links to relevant information.
+ `,
+ supportedBrowsers: ["Fenix nightly", "Geckoview_example", "Fennec", "Firefox"],
+ supportedPlatforms: ["Android", "Desktop"],
+ options: {
+ default: {perfherder: true, verbose: false},
+ linux: {perfherder_metrics: [{name:"speed",unit:"bps_lin"}], verbose: true},
+ mac: {perfherder_metrics: [{name:"speed",unit:"bps_mac"}], verbose: true},
+ win: {perfherder_metrics: [{name:"speed",unit:"bps_win"}], verbose: true}
+ }
+};
diff --git a/python/mozperftest/mozperftest/tests/data/samples/test_perftest_WPT_init_file.js b/python/mozperftest/mozperftest/tests/data/samples/test_perftest_WPT_init_file.js
new file mode 100644
index 0000000000..eee6f61234
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/data/samples/test_perftest_WPT_init_file.js
@@ -0,0 +1,113 @@
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+/* eslint-env node */
+"use strict";
+function setUp() {}
+
+function tearDown() {}
+
+function test() {}
+
+module.exports = {
+ setUp,
+ tearDown,
+ test,
+ owner: "Performance Testing Team",
+ name: "webpagetest",
+ description:
+ "Run webpagetest performance pageload tests on Firefox against Alexa top 50 websites",
+ longDescription: `This mozperftest gets webpagetest to run pageload tests on Firefox against the 50 most popular
+ websites and provide data. The full list of data returned from webpagetest: firstContentfulPaint,
+ timeToContentfulPaint, visualComplete90, firstPaint, visualComplete99, visualComplete, SpeedIndex, bytesIn, bytesOut,
+ TTFB, fullyLoadedCPUms, fullyLoadedCPUpct, domElements, domContentLoadedEventStart, domContentLoadedEventEnd,
+ loadEventStart, loadEventEnd`,
+ options: {
+ test_parameters: {
+ location: "ec2-us-east-1",
+ browser: "Firefox",
+ connection: "Cable",
+ timeout_limit: 21600,
+ wait_between_requests: 5,
+ statistics: ["average", "median", "standardDeviation"],
+ label: "",
+ runs: 3,
+ fvonly: 0,
+ private: 0,
+ web10: 0,
+ script: "",
+ block: "",
+ video: 1,
+ tcpdump: 0,
+ noimages: 0,
+ keepua: 1,
+ uastring: "",
+ htmlbody: 0,
+ custom: "",
+ ignoreSSL: 0,
+ appendua: "",
+ injectScript: "",
+ disableAVIF: 0,
+ disableWEBP: 0,
+ disableJXL: 0,
+ },
+ test_list: [
+ "google.com",
+ "youtube.com",
+ "facebook.com",
+ "qq.com",
+ "baidu.com",
+ "sohu.com",
+ "360.cn",
+ "jd.com",
+ "amazon.com",
+ "yahoo.com",
+ "zoom.us",
+ "sina.com.cn",
+ "live.com",
+ "reddit.com",
+ "netflix.com",
+ "microsoft.com",
+ "instagram.com",
+ "panda.tv",
+ "google.com.hk",
+ "csdn.net",
+ "bing.com",
+ "vk.com",
+ "yahoo.co.jp",
+ "twitter.com",
+ "naver.com",
+ "canva.com",
+ "ebay.com",
+ "force.com",
+ "amazon.in",
+ "adobe.com",
+ "aliexpress.com",
+ "linkedin.com",
+ "tianya.cn",
+ "yy.com",
+ "huanqiu.com",
+ "amazon.co.jp",
+ "okezone.com",
+ ],
+ browser_metrics: [
+ "firstContentfulPaint",
+ "timeToContentfulPaint",
+ "visualComplete90",
+ "firstPaint",
+ "visualComplete99",
+ "visualComplete",
+ "SpeedIndex",
+ "bytesIn",
+ "bytesOut",
+ "TTFB",
+ "fullyLoadedCPUms",
+ "fullyLoadedCPUpct",
+ "domElements",
+ "domContentLoadedEventStart",
+ "domContentLoadedEventEnd",
+ "loadEventStart",
+ "loadEventEnd",
+ ],
+ },
+};
diff --git a/python/mozperftest/mozperftest/tests/data/samples/test_perftest_android_startup.js b/python/mozperftest/mozperftest/tests/data/samples/test_perftest_android_startup.js
new file mode 100644
index 0000000000..f704bf67a0
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/data/samples/test_perftest_android_startup.js
@@ -0,0 +1,34 @@
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+/* eslint-env node */
+"use strict";
+
+async function test(context, commands) {}
+
+module.exports = {
+ test,
+ owner: "Performance Team",
+ name: "android-startup",
+ description: "Measures cold process view time",
+ longDescription: `
+ This test launches the appropriate android app, simulating a opening a link through VIEW intent
+ workflow. The application is launched with the intent action
+ android.intent.action.VIEW loading a trivially simple website. The reported
+ metric is the time from process start to navigationStart, reported as processLaunchToNavStart
+ `,
+ usage: `
+ ./mach perftest --flavor mobile-browser --AndroidStartUp testing/performance/perftest_android_startup.js
+ --browsertime-cycles=0 --AndroidStartUp-test-name=cold_view_nav_start --perfherder --hooks
+ testing/performance/hooks_android_startup.py --AndroidStartUp-product=fenix
+ --AndroidStartUp-release-channel=nightly
+ `,
+ options: {
+ test_parameters: {
+ single_date: null, // Dates in YYYY.MM.DD format
+ date_range: [], // 2 Dates in YYYY.MM.DD format the first and last date(inclusive)
+ startup_cache: true,
+ test_cycles: 50,
+ },
+ },
+};
diff --git a/python/mozperftest/mozperftest/tests/data/samples/test_xpcshell.js b/python/mozperftest/mozperftest/tests/data/samples/test_xpcshell.js
new file mode 100644
index 0000000000..5796f23a64
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/data/samples/test_xpcshell.js
@@ -0,0 +1,39 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+"use strict";
+
+
+add_task(async function dummy_test() {
+ /*
+ * Do some test here, get some metrics
+ */
+ var metrics = {"metrics1": 1, "metrics2": 2};
+ info("perfMetrics", metrics);
+ info("perfMetrics", {"metrics3": 3});
+ await true;
+});
+
+var anotherVar = {
+ something: "cool"
+};
+
+var perfMetadata = {
+ owner: "Performance Testing Team",
+ name: "Example",
+ description: "The description of the example test.",
+ longDescription: `
+ This is a longer description of the test perhaps including information
+ about how it should be run locally or links to relevant information.
+ `,
+ supportedBrowsers: ["Firefox"],
+ supportedPlatforms: ["Desktop"],
+ options: {
+ default: {perfherder: true},
+ linux: {perfherder_metrics: [{name:"speed",unit: "bps_lin"}]},
+ mac: {perfherder_metrics: [{name:"speed",unit: "bps_mac"}]},
+ win: {perfherder_metrics: [{name:"speed",unit: "bps_win"}]},
+ wat: {},
+ }
+};
diff --git a/python/mozperftest/mozperftest/tests/data/samples/test_xpcshell_flavor2.js b/python/mozperftest/mozperftest/tests/data/samples/test_xpcshell_flavor2.js
new file mode 100644
index 0000000000..3223f6c603
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/data/samples/test_xpcshell_flavor2.js
@@ -0,0 +1,35 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+"use strict";
+
+var anotherVar = {
+ something: "cool"
+};
+
+var perfMetadata = {
+ owner: "Performance Testing Team",
+ name: "Example",
+ description: "The description of the example test.",
+ longDescription: `
+ This is a longer description of the test perhaps including information
+ about how it should be run locally or links to relevant information.
+ `,
+ supportedBrowsers: ["Firefox"],
+ supportedPlatforms: ["Desktop"],
+ options: {
+ default: {perfherder: true},
+ linux: {perfherder_metrics: [{name:"speed",unit: "bps_lin"}]},
+ mac: {perfherder_metrics: [{name:"speed",unit: "bps_mac"}]},
+ win: {perfherder_metrics: [{name:"speed",unit: "bps_win"}]}
+ }
+};
+
+function run_next_test() {
+ // do something
+}
+
+function run_test() {
+ // do something
+}
diff --git a/python/mozperftest/mozperftest/tests/data/samples/xpcshell.ini b/python/mozperftest/mozperftest/tests/data/samples/xpcshell.ini
new file mode 100644
index 0000000000..47c62f7d15
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/data/samples/xpcshell.ini
@@ -0,0 +1,5 @@
+[DEFAULT]
+head = head.js
+firefox-appdir = browser
+
+[test_xpcshell.js]
diff --git a/python/mozperftest/mozperftest/tests/data/xpcshell b/python/mozperftest/mozperftest/tests/data/xpcshell
new file mode 100644
index 0000000000..792d600548
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/data/xpcshell
@@ -0,0 +1 @@
+#
diff --git a/python/mozperftest/mozperftest/tests/fetched_artifact.zip b/python/mozperftest/mozperftest/tests/fetched_artifact.zip
new file mode 100644
index 0000000000..3f6431896b
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/fetched_artifact.zip
Binary files differ
diff --git a/python/mozperftest/mozperftest/tests/support.py b/python/mozperftest/mozperftest/tests/support.py
new file mode 100644
index 0000000000..4df5823d43
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/support.py
@@ -0,0 +1,120 @@
+import contextlib
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+from pathlib import Path
+from unittest.mock import MagicMock
+
+from mozperftest import utils
+from mozperftest.environment import MachEnvironment
+from mozperftest.hooks import Hooks
+from mozperftest.metadata import Metadata
+from mozperftest.script import ScriptInfo
+
+HERE = Path(__file__).parent
+ROOT = Path(HERE, "..", "..", "..", "..").resolve()
+EXAMPLE_TESTS_DIR = os.path.join(HERE, "data", "samples")
+EXAMPLE_TEST = os.path.join(EXAMPLE_TESTS_DIR, "perftest_example.js")
+EXAMPLE_XPCSHELL_TEST = Path(EXAMPLE_TESTS_DIR, "test_xpcshell.js")
+EXAMPLE_XPCSHELL_TEST2 = Path(EXAMPLE_TESTS_DIR, "test_xpcshell_flavor2.js")
+EXAMPLE_WPT_TEST = Path(EXAMPLE_TESTS_DIR, "test_perftest_WPT_init_file.js")
+BT_DATA = Path(HERE, "data", "browsertime-results", "browsertime.json")
+BT_DATA_VIDEO = Path(HERE, "data", "browsertime-results-video", "browsertime.json")
+DMG = Path(HERE, "data", "firefox.dmg")
+MOZINFO = Path(HERE, "data", "mozinfo.json")
+EXAMPLE_ANDROID_STARTUP_TEST = Path(
+ EXAMPLE_TESTS_DIR, "test_perftest_android_startup.js"
+)
+
+
+@contextlib.contextmanager
+def temp_file(name="temp", content=None):
+ tempdir = tempfile.mkdtemp()
+ path = os.path.join(tempdir, name)
+ if content is not None:
+ with open(path, "w") as f:
+ f.write(content)
+ try:
+ yield path
+ finally:
+ shutil.rmtree(tempdir)
+
+
+def get_running_env(**kwargs):
+ from mozbuild.base import MozbuildObject
+
+ config = MozbuildObject.from_environment()
+ mach_cmd = MagicMock()
+
+ def get_binary_path(*args):
+ return ""
+
+ def run_pip(args):
+ subprocess.check_call(
+ [sys.executable, "-m", "pip"] + args,
+ stderr=subprocess.STDOUT,
+ cwd=config.topsrcdir,
+ universal_newlines=True,
+ )
+
+ mach_cmd.get_binary_path = get_binary_path
+ mach_cmd.topsrcdir = config.topsrcdir
+ mach_cmd.topobjdir = config.topobjdir
+ mach_cmd.distdir = config.distdir
+ mach_cmd.bindir = config.bindir
+ mach_cmd._mach_context = MagicMock()
+ mach_cmd._mach_context.state_dir = tempfile.mkdtemp()
+ mach_cmd.run_process.return_value = 0
+ mach_cmd.virtualenv_manager = MagicMock()
+ mach_cmd.virtualenv_manager.python_path = sys.executable
+ mach_cmd.virtualenv_manager.bin_path = Path(sys.executable).parent
+ mach_cmd.virtualenv_manager._run_pip = run_pip
+
+ mach_args = {
+ "flavor": "desktop-browser",
+ "test_objects": None,
+ "resolve_tests": True,
+ "browsertime-clobber": False,
+ "browsertime-install-url": None,
+ }
+ mach_args.update(kwargs)
+ hooks = Hooks(mach_cmd, mach_args.pop("hooks", None))
+ tests = mach_args.get("tests", [])
+ if len(tests) > 0:
+ script = ScriptInfo(tests[0])
+ else:
+ script = None
+ env = MachEnvironment(mach_cmd, hooks=hooks, **mach_args)
+ metadata = Metadata(mach_cmd, env, "desktop-browser", script)
+ return mach_cmd, metadata, env
+
+
+def requests_content(chunks=None):
+ if chunks is None:
+ chunks = [b"some ", b"content"]
+
+ def _content(*args, **kw):
+ class Resp:
+ def iter_content(self, **kw):
+ for chunk in chunks:
+ yield chunk
+
+ return Resp()
+
+ return _content
+
+
+@contextlib.contextmanager
+def running_on_try(on_try=True):
+ old = utils.ON_TRY
+ utils.ON_TRY = on_try
+ try:
+ if on_try:
+ with utils.temporary_env(MOZ_AUTOMATION="1"):
+ yield
+ else:
+ yield
+ finally:
+ utils.ON_TRY = old
diff --git a/python/mozperftest/mozperftest/tests/test_android.py b/python/mozperftest/mozperftest/tests/test_android.py
new file mode 100644
index 0000000000..881fcb1cd8
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/test_android.py
@@ -0,0 +1,331 @@
+#!/usr/bin/env python
+import pathlib
+from unittest import mock
+
+import mozunit
+import pytest
+
+from mozperftest.environment import SYSTEM
+from mozperftest.system.android import DeviceError
+from mozperftest.system.android_perf_tuner import PerformanceTuner
+from mozperftest.tests.support import get_running_env, requests_content, temp_file
+from mozperftest.utils import silence
+
+
+class FakeDevice:
+ def __init__(self, **args):
+ self.apps = []
+ self._logger = mock.MagicMock()
+ self._have_su = True
+ self._have_android_su = True
+ self._have_root_shell = True
+ self.is_rooted = True
+
+ def clear_logcat(self, *args, **kwargs):
+ return True
+
+ def shell_output(self, *args, **kwargs):
+ return "A Fake Device"
+
+ def shell_bool(self, *args, **kwargs):
+ return True
+
+ def uninstall_app(self, apk_name):
+ return True
+
+ def install_app(self, apk, replace=True):
+ if apk not in self.apps:
+ self.apps.append(apk)
+
+ def is_app_installed(self, app_name):
+ return True
+
+
+@mock.patch("mozperftest.system.android.ADBLoggedDevice", new=FakeDevice)
+def test_android():
+ args = {
+ "flavor": "mobile-browser",
+ "android-install-apk": ["this.apk"],
+ "android": True,
+ "android-timeout": 30,
+ "android-capture-adb": "stdout",
+ "android-app-name": "org.mozilla.fenix",
+ }
+
+ mach_cmd, metadata, env = get_running_env(**args)
+ system = env.layers[SYSTEM]
+ with system as android, silence(system):
+ android(metadata)
+
+
+@mock.patch("mozperftest.system.android.ADBLoggedDevice")
+def test_android_perf_tuning_rooted(device):
+ # Check to make sure that performance tuning runs
+ # on rooted devices correctly
+ device._have_su = True
+ device._have_android_su = True
+ device._have_root_shell = True
+ device.is_rooted = True
+ with mock.patch(
+ "mozperftest.system.android_perf_tuner.PerformanceTuner.set_kernel_performance_parameters"
+ ) as mockfunc:
+ tuner = PerformanceTuner(device)
+ tuner.tune_performance()
+ mockfunc.assert_called()
+
+
+@mock.patch("mozperftest.system.android.ADBLoggedDevice")
+def test_android_perf_tuning_nonrooted(device):
+ # Check to make sure that performance tuning runs
+ # on non-rooted devices correctly
+ device._have_su = False
+ device._have_android_su = False
+ device._have_root_shell = False
+ device.is_rooted = False
+ with mock.patch(
+ "mozperftest.system.android_perf_tuner.PerformanceTuner.set_kernel_performance_parameters"
+ ) as mockfunc:
+ tuner = PerformanceTuner(device)
+ tuner.tune_performance()
+ mockfunc.assert_not_called()
+
+
+class Device:
+ def __init__(self, name, rooted=True):
+ self.device_name = name
+ self.is_rooted = rooted
+ self.call_counts = 0
+
+ @property
+ def _logger(self):
+ return self
+
+ def noop(self, *args, **kw):
+ pass
+
+ debug = error = info = clear_logcat = noop
+
+ def shell_bool(self, *args, **kw):
+ self.call_counts += 1
+ return True
+
+ def shell_output(self, *args, **kw):
+ self.call_counts += 1
+ return self.device_name
+
+
+def test_android_perf_tuning_all_calls():
+ # Check without mocking PerformanceTuner functions
+ for name in ("Moto G (5)", "Pixel 2", "?"):
+ device = Device(name)
+ tuner = PerformanceTuner(device)
+ tuner.tune_performance()
+ assert device.call_counts > 1
+
+
+@mock.patch("mozperftest.system.android_perf_tuner.PerformanceTuner")
+@mock.patch("mozperftest.system.android.ADBLoggedDevice")
+def test_android_with_perftuning(device, tuner):
+ args = {
+ "flavor": "mobile-browser",
+ "android-install-apk": ["this.apk"],
+ "android": True,
+ "android-timeout": 30,
+ "android-capture-adb": "stdout",
+ "android-app-name": "org.mozilla.fenix",
+ "android-perf-tuning": True,
+ }
+ tuner.return_value = tuner
+
+ mach_cmd, metadata, env = get_running_env(**args)
+ system = env.layers[SYSTEM]
+ with system as android, silence(system):
+ android(metadata)
+
+ # Make sure the tuner was actually called
+ tuner.tune_performance.assert_called()
+
+
+def test_android_failure():
+ # no patching so it'll try for real and fail
+ args = {
+ "flavor": "mobile-browser",
+ "android-install-apk": ["this"],
+ "android": True,
+ "android-timeout": 120,
+ "android-app-name": "org.mozilla.fenix",
+ "android-capture-adb": "stdout",
+ }
+
+ mach_cmd, metadata, env = get_running_env(**args)
+ system = env.layers[SYSTEM]
+ with system as android, silence(system), pytest.raises(DeviceError):
+ android(metadata)
+
+
+@mock.patch(
+ "mozperftest.system.android.AndroidDevice.custom_apk_exists", new=lambda x: False
+)
+@mock.patch("mozperftest.utils.requests.get", new=requests_content())
+@mock.patch("mozperftest.system.android.ADBLoggedDevice")
+def test_android_apk_alias(device):
+ args = {
+ "flavor": "mobile-browser",
+ "android-install-apk": ["fenix_nightly_armeabi_v7a"],
+ "android": True,
+ "android-app-name": "org.mozilla.fenix",
+ "android-capture-adb": "stdout",
+ }
+
+ mach_cmd, metadata, env = get_running_env(**args)
+ system = env.layers[SYSTEM]
+ with system as android, silence(system):
+ android(metadata)
+
+ assert device.mock_calls[1][1][0] == "org.mozilla.fenix"
+ assert device.mock_calls[2][1][0].endswith("target.apk")
+
+
+@mock.patch("mozperftest.utils.requests.get", new=requests_content())
+@mock.patch("mozperftest.system.android.ADBLoggedDevice")
+def test_android_timeout(device):
+ args = {
+ "flavor": "mobile-browser",
+ "android-install-apk": ["gve_nightly_api16"],
+ "android": True,
+ "android-timeout": 60,
+ "android-app-name": "org.mozilla.geckoview_example",
+ "android-capture-adb": "stdout",
+ }
+
+ mach_cmd, metadata, env = get_running_env(**args)
+ system = env.layers[SYSTEM]
+ with system as android, silence(system):
+ android(metadata)
+ options = device.mock_calls[0][-1]
+ assert options["timeout"] == 60
+
+
+@mock.patch("mozperftest.utils.requests.get", new=requests_content())
+def test_android_log_adb():
+ with temp_file() as log_adb:
+ args = {
+ "flavor": "mobile-browser",
+ "android-install-apk": ["gve_nightly_api16"],
+ "android": True,
+ "android-timeout": 60,
+ "android-app-name": "org.mozilla.geckoview_example",
+ "android-capture-adb": log_adb,
+ }
+
+ mach_cmd, metadata, env = get_running_env(**args)
+ system = env.layers[SYSTEM]
+ with system as android, silence(system), pytest.raises(DeviceError):
+ android(metadata)
+ with open(log_adb) as f:
+ assert "DEBUG ADBLoggedDevice" in f.read()
+
+
+@mock.patch("mozperftest.utils.requests.get", new=requests_content())
+@mock.patch("mozperftest.system.android.ADBLoggedDevice")
+def test_android_log_cat(device):
+ with temp_file() as log_cat:
+ args = {
+ "flavor": "mobile-browser",
+ "android-install-apk": ["gve_nightly_api16"],
+ "android": True,
+ "android-timeout": 60,
+ "android-app-name": "org.mozilla.geckoview_example",
+ "android-capture-logcat": log_cat,
+ "android-clear-logcat": True,
+ "android-capture-adb": "stdout",
+ }
+
+ mach_cmd, metadata, env = get_running_env(**args)
+ system = env.layers[SYSTEM]
+ andro = system.layers[1]
+
+ with system as layer, silence(system):
+ andro.device = device
+ andro.device.get_logcat = mock.Mock(result_value=[])
+ layer(metadata)
+
+ andro.device.get_logcat.assert_called()
+ andro.device.clear_logcat.assert_called()
+
+
+@mock.patch("mozperftest.system.android.AndroidDevice.setup", new=mock.MagicMock)
+@mock.patch("mozperftest.system.android.Path")
+@mock.patch("mozperftest.system.android.ADBLoggedDevice", new=FakeDevice)
+def test_android_custom_apk(mozperftest_android_path):
+ args = {
+ "flavor": "mobile-browser",
+ "android": True,
+ }
+
+ with temp_file(name="user_upload.apk", content="") as sample_apk:
+ sample_apk = pathlib.Path(sample_apk)
+ mozperftest_android_path.return_value = sample_apk
+
+ mach_cmd, metadata, env = get_running_env(**args)
+ system = env.layers[SYSTEM]
+ android = system.layers[1]
+
+ with system as _, silence(system):
+ assert android._custom_apk_path is None
+ assert android.custom_apk_exists()
+ assert android.custom_apk_path == sample_apk
+
+ mozperftest_android_path.assert_called_once()
+
+
+@mock.patch("mozperftest.system.android.AndroidDevice.setup", new=mock.MagicMock)
+@mock.patch("mozperftest.system.android.Path.exists")
+@mock.patch("mozperftest.system.android.ADBLoggedDevice", new=FakeDevice)
+def test_android_custom_apk_nonexistent(path_exists):
+ args = {
+ "flavor": "mobile-browser",
+ "android": True,
+ }
+
+ path_exists.return_value = False
+
+ mach_cmd, metadata, env = get_running_env(**args)
+ system = env.layers[SYSTEM]
+ android = system.layers[1]
+
+ with system as _, silence(system):
+ assert android._custom_apk_path is None
+ assert not android.custom_apk_exists()
+ assert android.custom_apk_path is None
+
+ path_exists.assert_called()
+
+
+@mock.patch("mozperftest.system.android.Path")
+@mock.patch("mozperftest.system.android.ADBLoggedDevice", new=FakeDevice)
+def test_android_setup_custom_apk(mozperftest_android_path):
+ args = {
+ "flavor": "mobile-browser",
+ "android": True,
+ }
+
+ with temp_file(name="user_upload.apk", content="") as sample_apk:
+ sample_apk = pathlib.Path(sample_apk)
+ mozperftest_android_path.return_value = sample_apk
+
+ mach_cmd, metadata, env = get_running_env(**args)
+ system = env.layers[SYSTEM]
+ android = system.layers[1]
+
+ with system as _, silence(system):
+ # The custom apk should be found immediately, and it
+ # should replace any --android-install-apk settings
+ assert android._custom_apk_path == sample_apk
+ assert env.get_arg("android-install-apk") == [sample_apk]
+
+ mozperftest_android_path.assert_called_once()
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozperftest/mozperftest/tests/test_android_startup.py b/python/mozperftest/mozperftest/tests/test_android_startup.py
new file mode 100644
index 0000000000..9620a7d901
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/test_android_startup.py
@@ -0,0 +1,285 @@
+import copy
+import json
+import pathlib
+import random
+import time
+from datetime import date
+from unittest import mock
+
+import mozunit
+import pytest
+import requests
+
+from mozperftest.system import android_startup
+from mozperftest.system.android_startup import (
+ AndroidStartUpInstallError,
+ AndroidStartUpMatchingError,
+ AndroidStartUpUnknownTestError,
+)
+from mozperftest.tests.support import (
+ EXAMPLE_ANDROID_STARTUP_TEST,
+ get_running_env,
+ temp_file,
+)
+
+SAMPLE_APK_METADATA = {
+ "name": "fenix_nightly_armeabi-v7a_2022_09_27.apk",
+ "date": date(2022, 9, 27),
+ "commit": "",
+ "architecture": "armeabi-v7a",
+ "product": "fenix",
+}
+
+ARGS = {
+ "AndroidStartUp-test-name": "cold_view_nav_start",
+ "AndroidStartUp-product": "fenix",
+ "AndroidStartUp-release-channel": "nightly",
+ "apk_metadata": SAMPLE_APK_METADATA,
+ "test-date": "2023.01.01",
+}
+
+
+class FakeDevice:
+ def __init__(self, **kwargs):
+ self.name = ""
+ pass
+
+ def uninstall_app(self, app):
+ pass
+
+ def install_app(self, app_name):
+ return app_name
+
+ def is_app_installed(self, name):
+ self.name = name
+ if name == "is_app_installed_fail":
+ return False
+ else:
+ return True
+
+ def stop_application(self, package_id):
+ pass
+
+ def shell(self, cmd):
+ pass
+
+ def shell_output(self, cmd):
+ if cmd == "logcat -d":
+ return (
+ "ActivityManager: Start proc 23943:org.mozilla.fenix/u0a283 \n"
+ "ActivityManager: Start proc 23942:org.mozilla.fenix/u0a283 \n"
+ "11-23 14:10:13.391 13135 13135 I "
+ "GeckoSession: handleMessage GeckoView:PageStart uri=\n"
+ "11-23 14:10:13.391 13135 13135 I "
+ "GeckoSession: handleMessage GeckoView:PageStart uri="
+ )
+ if self.name == "name_for_intent_not_2_lines":
+ return "3 \n lines \n not 2"
+ elif self.name == "name_for_multiple_Totaltime_strings":
+ return "2 lines but \n no TotalTime"
+ elif self.name == "name_for_single_total_time":
+ return "TotalTime: 123 \n test"
+
+
+def setup_metadata(metadata, **kwargs):
+ new_metadata = copy.copy(metadata)
+ for key, value in kwargs.items():
+ new_metadata[key] = value
+ return new_metadata
+
+
+def running_env(**kw):
+ return get_running_env(flavor="mobile-browser", **kw)
+
+
+def init_mocked_request(status_code, **kwargs):
+ mock_data = {}
+ for key, value in kwargs.items():
+ mock_data["data"][key] = value
+ mock_request = requests.Response()
+ mock_request.status_code = status_code
+ mock_request._content = json.dumps(mock_data).encode("utf-8")
+ return mock_request
+
+
+@mock.patch(
+ "mozperftest.system.android.AndroidDevice.custom_apk_exists", new=lambda x: False
+)
+@mock.patch(
+ "mozdevice.ADBDevice",
+ new=FakeDevice,
+)
+@mock.patch("requests.get", return_value=init_mocked_request(200))
+@mock.patch("time.sleep", return_value=time.sleep(0))
+def test_install_of_nightly_failed(*mocked):
+ SAMPLE_APK_METADATA["name"] = "is_app_installed_fail"
+ ARGS["apk_metadata"] = SAMPLE_APK_METADATA
+ mach_cmd, metadata, env = running_env(
+ tests=[str(EXAMPLE_ANDROID_STARTUP_TEST)], **ARGS
+ )
+ test = android_startup.AndroidStartUp(env, mach_cmd)
+ with pytest.raises(AndroidStartUpInstallError):
+ test.run(metadata)
+ SAMPLE_APK_METADATA["name"] = "fenix_nightly_armeabi-v7a_2022_09_27.apk"
+ ARGS["apk_metadata"] = SAMPLE_APK_METADATA
+ pass
+
+
+@mock.patch(
+ "mozdevice.ADBDevice",
+ new=FakeDevice,
+)
+@mock.patch("time.sleep", return_value=time.sleep(0))
+def test_invalid_test_name(*mocked):
+ ARGS["AndroidStartUp-test-name"] = "BAD TEST NAME"
+ mach_cmd, metadata, env = running_env(
+ tests=[str(EXAMPLE_ANDROID_STARTUP_TEST)], **ARGS
+ )
+ test = android_startup.AndroidStartUp(env, mach_cmd)
+ with pytest.raises(AndroidStartUpUnknownTestError):
+ test.run(metadata)
+ ARGS["AndroidStartUp-test-name"] = "cold_main_first_frame"
+ pass
+
+
+@mock.patch(
+ "mozperftest.system.android.AndroidDevice.custom_apk_exists", new=lambda x: False
+)
+@mock.patch(
+ "mozdevice.ADBDevice",
+ new=FakeDevice,
+)
+@mock.patch("time.sleep", return_value=time.sleep(0))
+def test_multiple_matching_lines(*mocked):
+ SAMPLE_APK_METADATA["name"] = "name_for_intent_not_2_lines"
+ ARGS["apk_metadata"] = SAMPLE_APK_METADATA
+ mach_cmd, metadata, env = running_env(
+ tests=[str(EXAMPLE_ANDROID_STARTUP_TEST)], **ARGS
+ )
+ test = android_startup.AndroidStartUp(env, mach_cmd)
+ with pytest.raises(AndroidStartUpMatchingError):
+ test.run(metadata)
+ SAMPLE_APK_METADATA["name"] = "fenix_nightly_armeabi-v7a_2022_09_27.apk"
+ ARGS["apk_metadata"] = SAMPLE_APK_METADATA
+ pass
+
+
+@mock.patch(
+ "mozperftest.system.android.AndroidDevice.custom_apk_exists", new=lambda x: False
+)
+@mock.patch(
+ "mozdevice.ADBDevice",
+ new=FakeDevice,
+)
+@mock.patch("time.sleep", return_value=time.sleep(0))
+def test_multiple_total_time_prefix(*mocked):
+ SAMPLE_APK_METADATA["name"] = "name_for_multiple_Totaltime_strings"
+ ARGS["apk_metadata"] = SAMPLE_APK_METADATA
+ mach_cmd, metadata, env = running_env(
+ tests=[str(EXAMPLE_ANDROID_STARTUP_TEST)], **ARGS
+ )
+ test = android_startup.AndroidStartUp(env, mach_cmd)
+ with pytest.raises(AndroidStartUpMatchingError):
+ test.run(metadata)
+ SAMPLE_APK_METADATA["name"] = "fenix_nightly_armeabi-v7a_2022_09_27.apk"
+ ARGS["apk_metadata"] = SAMPLE_APK_METADATA
+ pass
+
+
+@mock.patch(
+ "mozperftest.system.android.AndroidDevice.custom_apk_exists", new=lambda x: False
+)
+@mock.patch(
+ "mozdevice.ADBDevice",
+ new=FakeDevice,
+)
+@mock.patch("time.sleep", return_value=time.sleep(0))
+def test_multiple_start_proc_lines(*mocked):
+ SAMPLE_APK_METADATA["name"] = "name_for_multiple_Totaltime_strings"
+ ARGS["apk_metadata"] = SAMPLE_APK_METADATA
+ ARGS["apk_metadata"] = SAMPLE_APK_METADATA
+ mach_cmd, metadata, env = running_env(
+ tests=[str(EXAMPLE_ANDROID_STARTUP_TEST)], **ARGS
+ )
+ test = android_startup.AndroidStartUp(env, mach_cmd)
+ with pytest.raises(AndroidStartUpMatchingError):
+ test.run(metadata)
+ SAMPLE_APK_METADATA["name"] = "fenix_nightly_armeabi-v7a_2022_09_27.apk"
+ ARGS["apk_metadata"] = SAMPLE_APK_METADATA
+ pass
+
+
+@mock.patch(
+ "mozperftest.system.android.AndroidDevice.custom_apk_exists", new=lambda x: False
+)
+@mock.patch(
+ "mozdevice.ADBDevice",
+ new=FakeDevice,
+)
+@mock.patch("time.sleep", return_value=time.sleep(0))
+@mock.patch(
+ "mozperftest.system.android_startup.AndroidStartUp.get_measurement",
+ return_value=random.randint(500, 1000),
+)
+def test_perfherder_layer(*mocked):
+ SAMPLE_APK_METADATA["name"] = "name_for_multiple_Totaltime_strings"
+ ARGS["apk_metadata"] = SAMPLE_APK_METADATA
+ mach_cmd, metadata, env = running_env(
+ tests=[str(EXAMPLE_ANDROID_STARTUP_TEST)], **ARGS
+ )
+ test = android_startup.AndroidStartUp(env, mach_cmd)
+ test.run(metadata)
+
+
+@mock.patch("mozperftest.system.android.Path")
+@mock.patch(
+ "mozdevice.ADBDevice",
+ new=FakeDevice,
+)
+@mock.patch("time.sleep", return_value=time.sleep(0))
+@mock.patch(
+ "mozperftest.system.android_startup.AndroidStartUp.get_measurement",
+ return_value=random.randint(500, 1000),
+)
+def test_custom_apk_startup(get_measurement_mock, time_sleep_mock, path_mock):
+ SAMPLE_APK_METADATA["name"] = "name_for_multiple_Totaltime_strings"
+ ARGS["apk_metadata"] = SAMPLE_APK_METADATA
+ mach_cmd, metadata, env = running_env(
+ tests=[str(EXAMPLE_ANDROID_STARTUP_TEST)], **ARGS
+ )
+
+ with temp_file(name="user_upload.apk", content="") as sample_apk:
+ sample_apk = pathlib.Path(sample_apk)
+ path_mock.return_value = sample_apk
+
+ with mock.patch(
+ "mozperftest.system.android_startup.AndroidStartUp.run_tests"
+ ) as _:
+ test = android_startup.AndroidStartUp(env, mach_cmd)
+ test.run_tests = lambda: True
+ test.package_id = "FakeID"
+ assert test.run_performance_analysis(SAMPLE_APK_METADATA)
+
+
+@mock.patch(
+ "mozperftest.system.android.AndroidDevice.custom_apk_exists", new=lambda x: False
+)
+@mock.patch(
+ "mozdevice.ADBDevice",
+ new=FakeDevice,
+)
+@mock.patch("time.sleep", return_value=time.sleep(0))
+def test_get_measurement_from_nav_start_logcat_match_error(*mocked):
+ SAMPLE_APK_METADATA["name"] = "name_for_single_total_time"
+ ARGS["apk_metadata"] = SAMPLE_APK_METADATA
+ ARGS["AndroidStartUp-test-name"] = "cold_view_nav_start"
+ mach_cmd, metadata, env = running_env(
+ tests=[str(EXAMPLE_ANDROID_STARTUP_TEST)], **ARGS
+ )
+ test = android_startup.AndroidStartUp(env, mach_cmd)
+ with pytest.raises(AndroidStartUpMatchingError):
+ test.run(metadata)
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozperftest/mozperftest/tests/test_androidlog.py b/python/mozperftest/mozperftest/tests/test_androidlog.py
new file mode 100644
index 0000000000..b7aa56b704
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/test_androidlog.py
@@ -0,0 +1,81 @@
+#!/usr/bin/env python
+import pathlib
+from unittest import mock
+
+import mozunit
+
+from mozperftest.environment import METRICS, SYSTEM, TEST
+from mozperftest.tests.support import EXAMPLE_TEST, get_running_env, temp_file
+from mozperftest.utils import temp_dir
+
+HERE = pathlib.Path(__file__).parent
+LOGCAT = HERE / "data" / "logcat"
+
+
+def fetch(self, url):
+ return str(HERE / "fetched_artifact.zip")
+
+
+class FakeDevice:
+ def __init__(self, **args):
+ self.apps = []
+
+ def uninstall_app(self, apk_name):
+ return True
+
+ def install_app(self, apk, replace=True):
+ if apk not in self.apps:
+ self.apps.append(apk)
+
+ def is_app_installed(self, app_name):
+ return True
+
+ def get_logcat(self):
+ with LOGCAT.open() as f:
+ for line in f:
+ yield line
+
+
+@mock.patch("mozperftest.test.browsertime.runner.install_package")
+@mock.patch(
+ "mozperftest.test.noderunner.NodeRunner.verify_node_install", new=lambda x: True
+)
+@mock.patch("mozbuild.artifact_cache.ArtifactCache.fetch", new=fetch)
+@mock.patch(
+ "mozperftest.test.browsertime.runner.BrowsertimeRunner._setup_node_packages",
+ new=lambda x, y: None,
+)
+@mock.patch("mozperftest.system.android.ADBLoggedDevice", new=FakeDevice)
+def test_android_log(*mocked):
+ with temp_file() as logcat, temp_dir() as output:
+ args = {
+ "flavor": "mobile-browser",
+ "android-install-apk": ["this.apk"],
+ "android": True,
+ "console": True,
+ "android-timeout": 30,
+ "android-capture-adb": "stdout",
+ "android-capture-logcat": logcat,
+ "android-app-name": "org.mozilla.fenix",
+ "androidlog": True,
+ "output": output,
+ "browsertime-no-window-recorder": False,
+ "browsertime-viewport-size": "1234x567",
+ "tests": [EXAMPLE_TEST],
+ }
+
+ mach_cmd, metadata, env = get_running_env(**args)
+
+ with env.layers[SYSTEM] as sys, env.layers[TEST] as andro:
+ metadata = andro(sys(metadata))
+
+ # we want to drop the first result
+ metadata._results = metadata._results[1:]
+ with env.layers[METRICS] as metrics:
+ metadata = metrics(metadata)
+
+ assert pathlib.Path(output, "LogCatstd-output.json").exists()
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozperftest/mozperftest/tests/test_argparser.py b/python/mozperftest/mozperftest/tests/test_argparser.py
new file mode 100644
index 0000000000..b65a0809f8
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/test_argparser.py
@@ -0,0 +1,160 @@
+#!/usr/bin/env python
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+from datetime import date
+
+import mozunit
+import pytest
+
+from mozperftest.argparser import (
+ Options,
+ PerftestArgumentParser,
+ PerftestToolsArgumentParser,
+)
+
+
+def test_argparser():
+ parser = PerftestArgumentParser()
+ args = ["test_one.js"]
+ res = parser.parse_args(args)
+ assert res.tests == ["test_one.js"]
+
+
+def test_argparser_defaults():
+ parser = PerftestArgumentParser()
+ args = ["test_one.js"]
+ res = parser.parse_args(args)
+ assert res.console_simplify_exclude == ["statistics"]
+
+
+def test_options():
+ assert Options.args["--proxy"]["help"] == "Activates the proxy layer"
+ assert Options.args["--no-browsertime"]["help"] == (
+ "Deactivates the " "browsertime layer"
+ )
+
+
+def test_layer_option():
+ parser = PerftestArgumentParser()
+ assert parser.parse_args(["--notebook-metrics"]) == parser.parse_args(
+ ["--notebook-metrics", "--notebook"]
+ )
+ assert parser.parse_known_args(["--notebook-metrics"]) == parser.parse_known_args(
+ ["--notebook-metrics", "--notebook"]
+ )
+
+
+def test_bad_test_date():
+ parser = PerftestArgumentParser()
+ args = ["test_one.js", "--test-date", "bleh"]
+ with pytest.raises(SystemExit):
+ parser.parse_args(args)
+
+
+def test_test_date_today():
+ parser = PerftestArgumentParser()
+ args = ["test_one.js", "--test-date", "today"]
+ res = parser.parse_args(args)
+ assert res.test_date == date.today().strftime("%Y.%m.%d")
+
+
+def test_perfherder_metrics():
+
+ parser = PerftestArgumentParser()
+ args = [
+ "test_one.js",
+ "--perfherder-metrics",
+ "name:foo,unit:ms,alertThreshold:2",
+ "name:baz,unit:count,alertThreshold:2,lowerIsBetter:false",
+ ]
+
+ res = parser.parse_args(args)
+ assert res.perfherder_metrics[0]["name"] == "foo"
+ assert res.perfherder_metrics[1]["alertThreshold"] == 2
+
+ args = [
+ "test_one.js",
+ "--perfherder-metrics",
+ "name:foo,unit:ms,alertThreshold:2",
+ "name:baz,UNKNOWN:count,alertThreshold:2,lowerIsBetter:false",
+ ]
+
+ with pytest.raises(SystemExit):
+ parser.parse_args(args)
+
+ args = [
+ "test_one.js",
+ "--perfherder-metrics",
+ "name:foo,unit:ms,alertThreshold:2",
+ "namemalformedbaz,alertThreshold:2,lowerIsBetter:false",
+ ]
+
+ with pytest.raises(SystemExit):
+ parser.parse_args(args)
+
+ # missing the name!
+ args = [
+ "test_one.js",
+ "--perfherder-metrics",
+ "name:foo,unit:ms,alertThreshold:2",
+ "alertThreshold:2,lowerIsBetter:false",
+ ]
+
+ with pytest.raises(SystemExit):
+ parser.parse_args(args)
+
+ # still supporting just plain names
+ args = [
+ "test_one.js",
+ "--perfherder-metrics",
+ "name:foo,unit:euros,alertThreshold:2",
+ "baz",
+ ]
+
+ res = parser.parse_args(args)
+ assert res.perfherder_metrics[1]["name"] == "baz"
+ assert res.perfherder_metrics[0]["name"] == "foo"
+ assert res.perfherder_metrics[0]["unit"] == "euros"
+
+
+def test_tools_argparser_bad_tool():
+ with pytest.raises(SystemExit):
+ PerftestToolsArgumentParser()
+
+
+def test_tools_bad_argparser():
+ PerftestToolsArgumentParser.tool = "side-by-side"
+ parser = PerftestToolsArgumentParser()
+ args = [
+ "-t",
+ "browsertime-first-install-firefox-welcome",
+ "--base-platform",
+ "test-linux1804-64-shippable-qr",
+ ]
+ with pytest.raises(SystemExit):
+ parser.parse_args(args)
+
+
+def test_tools_argparser():
+ PerftestToolsArgumentParser.tool = "side-by-side"
+ parser = PerftestToolsArgumentParser()
+ args = [
+ "-t",
+ "browsertime-first-install-firefox-welcome",
+ "--base-platform",
+ "test-linux1804-64-shippable-qr",
+ "--base-revision",
+ "438092d03ac4b9c36b52ba455da446afc7e14213",
+ "--new-revision",
+ "29943068938aa9e94955dbe13c2e4c254553e4ce",
+ ]
+ res = parser.parse_args(args)
+ assert res.test_name == "browsertime-first-install-firefox-welcome"
+ assert res.platform == "test-linux1804-64-shippable-qr"
+ assert res.base_revision == "438092d03ac4b9c36b52ba455da446afc7e14213"
+ assert res.new_revision == "29943068938aa9e94955dbe13c2e4c254553e4ce"
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozperftest/mozperftest/tests/test_browsertime.py b/python/mozperftest/mozperftest/tests/test_browsertime.py
new file mode 100644
index 0000000000..e0cd298da7
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/test_browsertime.py
@@ -0,0 +1,364 @@
+#!/usr/bin/env python
+import os
+import pathlib
+import random
+import shutil
+import string
+from unittest import mock
+
+import mozunit
+import pytest
+
+from mozperftest.environment import SYSTEM, TEST
+from mozperftest.test.browsertime import add_options
+from mozperftest.test.browsertime.runner import (
+ NodeException,
+ extract_browser_name,
+ matches,
+)
+from mozperftest.tests.support import EXAMPLE_TEST, get_running_env
+from mozperftest.utils import silence, temporary_env
+
+HERE = os.path.dirname(__file__)
+
+# Combine these dictionaries as required for mocking the
+# Browsertime installation related methods
+BTIME_PKG_DEP = {
+ "devDependencies": {"browsertime": "89771a1d6be54114db190427dbc281582cba3d47"}
+}
+BTIME_PKG_NO_INSTALL = {
+ "packages": {
+ "node_modules/browsertime": {
+ "resolved": (
+ "browsertime@https://github.com/sitespeedio/browsertime"
+ "/tarball/89771a1d6be54114db190427dbc281582cba3d47"
+ )
+ }
+ }
+}
+BTIME_PKG_REINSTALL = {
+ "packages": {
+ "node_modules/browsertime": {
+ "resolved": (
+ "browsertime@https://github.com/sitespeedio/browsertime"
+ "/tarball/98747854be54114db190427dbc281582cba3d47"
+ )
+ }
+ }
+}
+
+
+def fetch(self, url):
+ return os.path.join(HERE, "fetched_artifact.zip")
+
+
+def mocked_jsonload(val):
+ return val.__iter__.return_value
+
+
+def build_mock_open(files_data):
+ mocked_opens = []
+
+ for data in files_data:
+ mocked_file = mock.MagicMock()
+ mocked_file.__enter__.return_value.__iter__.return_value = data
+ mocked_opens.append(mocked_file)
+
+ m = mock.mock_open()
+ m.side_effect = mocked_opens
+ return m
+
+
+@mock.patch("mozperftest.test.browsertime.runner.install_package")
+@mock.patch(
+ "mozperftest.test.noderunner.NodeRunner.verify_node_install", new=lambda x: True
+)
+@mock.patch("mozbuild.artifact_cache.ArtifactCache.fetch", new=fetch)
+@mock.patch(
+ "mozperftest.test.browsertime.runner.BrowsertimeRunner._setup_node_packages",
+ new=lambda x, y: None,
+)
+def test_browser(*mocked):
+ mach_cmd, metadata, env = get_running_env(
+ android=True,
+ android_app_name="something",
+ browsertime_geckodriver="GECKODRIVER",
+ browsertime_iterations=1,
+ browsertime_extra_options="one=1,two=2",
+ tests=[EXAMPLE_TEST],
+ browsertime_no_window_recorder=False,
+ browsertime_viewport_size="1234x567",
+ )
+
+ sys = env.layers[SYSTEM]
+ browser = env.layers[TEST]
+ try:
+ with sys as s, browser as b, silence():
+ b(s(metadata))
+ finally:
+ shutil.rmtree(mach_cmd._mach_context.state_dir)
+ assert mach_cmd.run_process.call_count == 1
+
+ # Make sure all arguments are of type str
+ for option in mach_cmd.run_process.call_args[0][0]:
+ assert isinstance(option, str)
+
+ cmd = " ".join(mach_cmd.run_process.call_args[0][0])
+ assert EXAMPLE_TEST in cmd
+ assert "--firefox.geckodriverPath GECKODRIVER" in cmd
+ assert "--one 1" in cmd
+ assert "--two 2" in cmd
+
+ results = metadata.get_results()
+ assert len(results) == 1
+ assert set(list(results[0].keys())) - set(["name", "results"]) == set()
+ assert results[0]["name"] == "Example"
+
+
+@mock.patch(
+ "mozperftest.test.browsertime.runner.BrowsertimeRunner.browsertime_js",
+ new=pathlib.Path("doesn't-exist"),
+)
+@mock.patch(
+ "mozperftest.test.browsertime.runner.BrowsertimeRunner.visualmetrics_py",
+ new=pathlib.Path("doesn't-exist-either"),
+)
+def test_browsertime_not_existing():
+ _, _, env = get_running_env(
+ android=True,
+ android_app_name="something",
+ browsertime_geckodriver="GECKODRIVER",
+ browsertime_iterations=1,
+ browsertime_extra_options="one=1,two=2",
+ tests=[EXAMPLE_TEST],
+ )
+ browser = env.layers[TEST]
+ btime_layer = browser.layers[0]
+ assert btime_layer._should_install()
+
+
+@mock.patch(
+ "mozperftest.test.browsertime.runner.pathlib.Path.exists", new=lambda x: True
+)
+def test_browsertime_no_reinstall():
+ _, _, env = get_running_env(
+ android=True,
+ android_app_name="something",
+ browsertime_geckodriver="GECKODRIVER",
+ browsertime_iterations=1,
+ browsertime_extra_options="one=1,two=2",
+ tests=[EXAMPLE_TEST],
+ )
+
+ with mock.patch(
+ "mozperftest.test.browsertime.runner.pathlib.Path.open",
+ build_mock_open([BTIME_PKG_DEP, BTIME_PKG_NO_INSTALL]),
+ ), mock.patch("mozperftest.test.browsertime.runner.json.load", new=mocked_jsonload):
+ browser = env.layers[TEST]
+ btime_layer = browser.layers[0]
+ assert not btime_layer._should_install()
+
+
+@mock.patch(
+ "mozperftest.test.browsertime.runner.pathlib.Path.exists", new=lambda x: True
+)
+def test_browsertime_should_reinstall():
+ _, _, env = get_running_env(
+ android=True,
+ android_app_name="something",
+ browsertime_geckodriver="GECKODRIVER",
+ browsertime_iterations=1,
+ browsertime_extra_options="one=1,two=2",
+ tests=[EXAMPLE_TEST],
+ )
+
+ with mock.patch(
+ "mozperftest.test.browsertime.runner.pathlib.Path.open",
+ build_mock_open([BTIME_PKG_DEP, BTIME_PKG_REINSTALL]),
+ ), mock.patch("mozperftest.test.browsertime.runner.json.load", new=mocked_jsonload):
+ browser = env.layers[TEST]
+ btime_layer = browser.layers[0]
+ assert btime_layer._should_install()
+
+
+@mock.patch("mozperftest.test.browsertime.runner.install_package")
+@mock.patch(
+ "mozperftest.test.noderunner.NodeRunner.verify_node_install", new=lambda x: True
+)
+@mock.patch("mozbuild.artifact_cache.ArtifactCache.fetch", new=fetch)
+@mock.patch(
+ "mozperftest.test.browsertime.runner.BrowsertimeRunner._setup_node_packages",
+ new=lambda x, y: None,
+)
+def test_browser_failed(*mocked):
+ mach_cmd, metadata, env = get_running_env(
+ android=True,
+ android_app_name="something",
+ browsertime_geckodriver="GECKODRIVER",
+ browsertime_iterations=1,
+ browsertime_extra_options="one=1,two=2",
+ tests=[EXAMPLE_TEST],
+ browsertime_no_window_recorder=False,
+ browsertime_viewport_size="1234x567",
+ )
+ # set the return value to 1 to simulate a node failure
+ mach_cmd.run_process.return_value = 1
+ browser = env.layers[TEST]
+ sys = env.layers[SYSTEM]
+
+ with sys as s, browser as b, silence(), pytest.raises(NodeException):
+ b(s(metadata))
+
+
+@mock.patch("mozperftest.test.browsertime.runner.install_package")
+@mock.patch(
+ "mozperftest.test.noderunner.NodeRunner.verify_node_install", new=lambda x: True
+)
+@mock.patch("mozbuild.artifact_cache.ArtifactCache.fetch", new=fetch)
+@mock.patch(
+ "mozperftest.test.browsertime.runner.BrowsertimeRunner._setup_node_packages",
+ new=lambda x, y: None,
+)
+def test_browser_desktop(*mocked):
+ mach_cmd, metadata, env = get_running_env(
+ browsertime_iterations=1,
+ browsertime_extra_options="one=1,two=2",
+ tests=[EXAMPLE_TEST],
+ browsertime_no_window_recorder=False,
+ browsertime_viewport_size="1234x567",
+ )
+ browser = env.layers[TEST]
+ sys = env.layers[SYSTEM]
+
+ try:
+ with sys as s, browser as b, silence():
+ # just checking that the setup_helper property gets
+ # correctly initialized
+ browsertime = browser.layers[-1]
+ assert browsertime.setup_helper is not None
+ helper = browsertime.setup_helper
+ assert browsertime.setup_helper is helper
+
+ b(s(metadata))
+ finally:
+ shutil.rmtree(mach_cmd._mach_context.state_dir)
+
+ assert mach_cmd.run_process.call_count == 1
+ cmd = " ".join(mach_cmd.run_process.call_args[0][0])
+ # check that --firefox.binaryPath is set automatically
+ assert "--firefox.binaryPath" in cmd
+
+
+@mock.patch("mozperftest.test.browsertime.runner.install_package")
+@mock.patch(
+ "mozperftest.test.noderunner.NodeRunner.verify_node_install", new=lambda x: True
+)
+@mock.patch("mozbuild.artifact_cache.ArtifactCache.fetch", new=fetch)
+@mock.patch(
+ "mozperftest.test.browsertime.runner.BrowsertimeRunner._setup_node_packages",
+ new=lambda x, y: None,
+)
+def test_existing_results(*mocked):
+ mach_cmd, metadata, env = get_running_env(
+ browsertime_existing_results="/some/path",
+ tests=[EXAMPLE_TEST],
+ )
+ browser = env.layers[TEST]
+ sys = env.layers[SYSTEM]
+
+ try:
+ with sys as s, browser as b, silence():
+ # just checking that the setup_helper property gets
+ # correctly initialized
+ browsertime = browser.layers[-1]
+ assert browsertime.setup_helper is not None
+ helper = browsertime.setup_helper
+ assert browsertime.setup_helper is helper
+
+ m = b(s(metadata))
+ results = m.get_results()
+ assert len(results) == 1
+ assert results[0]["results"] == "/some/path"
+ assert results[0]["name"] == "Example"
+ finally:
+ shutil.rmtree(mach_cmd._mach_context.state_dir)
+
+ assert mach_cmd.run_process.call_count == 0
+
+
+def test_add_options():
+ mach_cmd, metadata, env = get_running_env()
+ options = [("one", 1), ("two", 2)]
+ add_options(env, options)
+ extra = env.get_arg("browsertime-extra-options")
+ assert "one=1" in extra
+ assert "two=2" in extra
+
+
+@mock.patch("mozperftest.test.browsertime.runner.install_package")
+@mock.patch(
+ "mozperftest.test.noderunner.NodeRunner.verify_node_install", new=lambda x: True
+)
+@mock.patch("mozbuild.artifact_cache.ArtifactCache.fetch", new=fetch)
+@mock.patch("mozperftest.test.browsertime.runner.BrowsertimeRunner.setup_helper")
+def test_install_url(*mocked):
+ url = "https://here/tarball/" + "".join(
+ [random.choice(string.hexdigits[:-6]) for c in range(40)]
+ )
+ mach, metadata, env = get_running_env(
+ browsertime_install_url=url,
+ tests=[EXAMPLE_TEST],
+ browsertime_no_window_recorder=False,
+ browsertime_viewport_size="1234x567",
+ )
+ browser = env.layers[TEST]
+ sys = env.layers[SYSTEM]
+
+ try:
+ with sys as s, temporary_env(MOZ_AUTOMATION="1"), browser as b, silence():
+ b(s(metadata))
+ finally:
+ shutil.rmtree(mach._mach_context.state_dir)
+
+ assert mach.run_process.call_count == 1
+
+
+@mock.patch("mozperftest.test.browsertime.runner.install_package")
+@mock.patch(
+ "mozperftest.test.noderunner.NodeRunner.verify_node_install", new=lambda x: True
+)
+@mock.patch("mozbuild.artifact_cache.ArtifactCache.fetch", new=fetch)
+@mock.patch(
+ "mozperftest.test.browsertime.runner.BrowsertimeRunner._setup_node_packages",
+ new=lambda x, y: None,
+)
+def test_install_url_bad(*mocked):
+ mach, metadata, env = get_running_env(
+ browsertime_install_url="meh",
+ tests=[EXAMPLE_TEST],
+ )
+ browser = env.layers[TEST]
+ sys = env.layers[SYSTEM]
+
+ with pytest.raises(ValueError):
+ try:
+ with sys as s, browser as b, silence():
+ b(s(metadata))
+ finally:
+ shutil.rmtree(mach._mach_context.state_dir)
+
+
+def test_matches():
+ args = ["arg1=1", "--arg2=value2"]
+
+ assert matches(args, "arg1")
+ assert not matches(args, "arg3")
+
+
+def test_extract_browser_name():
+ args = ["arg1=1", "--arg2=value2", "--browser=me", "--zome"]
+ assert extract_browser_name(args) == "me"
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozperftest/mozperftest/tests/test_change_detector.py b/python/mozperftest/mozperftest/tests/test_change_detector.py
new file mode 100644
index 0000000000..ee9fa5fa0f
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/test_change_detector.py
@@ -0,0 +1,113 @@
+#!/usr/bin/env python
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import pathlib
+from unittest import mock
+
+import mozunit
+import pytest
+
+from mozperftest.tests.support import temp_file
+from mozperftest.tools import PerformanceChangeDetected, run_change_detector
+
+
+def test_change_detector_basic(kwargs=None, return_value=({}, {})):
+ mocked_detector = mock.MagicMock()
+ mocked_detector_module = mock.MagicMock()
+ mocked_detector_module.ChangeDetector = mocked_detector
+
+ with mock.patch.dict(
+ "sys.modules",
+ {
+ "mozperftest_tools.regression_detector": mocked_detector_module,
+ },
+ ):
+ mocked_detector.return_value.detect_changes.return_value = return_value
+
+ with temp_file() as f:
+ parent_dir = pathlib.Path(f).parent
+
+ if kwargs is None:
+ kwargs = {
+ "test_name": "browsertime-test",
+ "new_test_name": None,
+ "platform": "test-platform/opt",
+ "new_platform": None,
+ "base_branch": "try",
+ "new_branch": "try",
+ "base_revision": "99",
+ "new_revision": "99",
+ }
+
+ run_change_detector(parent_dir, kwargs)
+
+ mocked_detector.return_value.detect_changes.assert_called()
+
+ return mocked_detector_module
+
+
+def test_change_detector_with_task_name():
+ test_change_detector_basic(
+ {
+ "task_names": ["test-platform/opt-browsertime-test"],
+ "new_test_name": None,
+ "platform": None,
+ "new_platform": None,
+ "base_branch": "try",
+ "new_branch": "try",
+ "base_revision": "99",
+ "new_revision": "99",
+ }
+ )
+
+
+def test_change_detector_option_failure():
+ with pytest.raises(Exception):
+ test_change_detector_basic(
+ {
+ "test_name": None,
+ "new_test_name": None,
+ "platform": "test-platform/opt",
+ "new_platform": None,
+ "base_branch": "try",
+ "new_branch": "try",
+ "base_revision": "99",
+ "new_revision": "99",
+ }
+ )
+
+ with pytest.raises(Exception):
+ test_change_detector_basic(
+ {
+ "test_name": "browsertime-test",
+ "new_test_name": None,
+ "platform": None,
+ "new_platform": None,
+ "base_branch": "try",
+ "new_branch": "try",
+ "base_revision": "99",
+ "new_revision": "99",
+ }
+ )
+
+
+def test_change_detector_with_detection():
+ with pytest.raises(PerformanceChangeDetected):
+ test_change_detector_basic(
+ {
+ "task_names": ["test-platform/opt-browsertime-test"],
+ "new_test_name": None,
+ "platform": None,
+ "new_platform": None,
+ "base_branch": "try",
+ "new_branch": "try",
+ "base_revision": "99",
+ "new_revision": "99",
+ },
+ (["detection"], {"warm": {"metric": {"detection": [99]}}, "cold": {}}),
+ )
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozperftest/mozperftest/tests/test_consoleoutput.py b/python/mozperftest/mozperftest/tests/test_consoleoutput.py
new file mode 100644
index 0000000000..e64cb82430
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/test_consoleoutput.py
@@ -0,0 +1,36 @@
+#!/usr/bin/env python
+from unittest import mock
+
+import mozunit
+
+from mozperftest.environment import METRICS
+from mozperftest.tests.support import BT_DATA, EXAMPLE_TEST, get_running_env
+from mozperftest.utils import silence, temp_dir
+
+
+@mock.patch("mozperftest.metrics.common.validate_intermediate_results")
+def test_console_output(*mocked):
+ with temp_dir() as tempdir:
+ options = {
+ "console-prefix": "",
+ "console": True,
+ "output": tempdir,
+ }
+ mach_cmd, metadata, env = get_running_env(**options)
+ runs = []
+
+ def _run_process(*args, **kw):
+ runs.append((args, kw))
+
+ mach_cmd.run_process = _run_process
+ metrics = env.layers[METRICS]
+ env.set_arg("tests", [EXAMPLE_TEST])
+ res = {"name": "name", "results": [str(BT_DATA)]}
+ metadata.add_result(res)
+
+ with metrics as console, silence():
+ console(metadata)
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozperftest/mozperftest/tests/test_constant.py b/python/mozperftest/mozperftest/tests/test_constant.py
new file mode 100644
index 0000000000..99821f9680
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/test_constant.py
@@ -0,0 +1,13 @@
+import mozunit
+
+from mozperftest.metrics.notebook.constant import Constant
+from mozperftest.metrics.notebook.transforms.single_json import SingleJsonRetriever
+
+
+def test_predefined_transformers():
+ tfms = Constant().predefined_transformers
+ assert SingleJsonRetriever.__name__ in tfms
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozperftest/mozperftest/tests/test_environment.py b/python/mozperftest/mozperftest/tests/test_environment.py
new file mode 100644
index 0000000000..556c7eec85
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/test_environment.py
@@ -0,0 +1,158 @@
+#!/usr/bin/env python
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+from pathlib import Path
+from unittest import mock
+
+import mozunit
+import pytest
+
+from mozperftest.environment import MachEnvironment
+from mozperftest.hooks import Hooks
+from mozperftest.layers import Layer
+from mozperftest.tests.support import get_running_env, requests_content
+
+HERE = Path(__file__).parent.resolve()
+
+
+def _get_env(hooks_path):
+ return MachEnvironment(mock.MagicMock(), hooks=Hooks(mock.MagicMock(), hooks_path))
+
+
+def test_run_hooks():
+ env = _get_env(Path(HERE, "data", "hook.py"))
+ assert env.hooks.run("doit", env) == "OK"
+
+
+def test_bad_hooks():
+ with pytest.raises(IOError):
+ _get_env("Idontexists")
+
+
+doit = [b"def doit(*args, **kw):\n", b" return 'OK'\n"]
+
+
+@mock.patch("mozperftest.utils.requests.get", requests_content(doit))
+def test_run_hooks_url():
+ env = _get_env("http://somewhere/hooks.py")
+ assert env.hooks.run("doit", env) == "OK"
+
+
+def test_layers():
+ env = MachEnvironment(mock.MagicMock())
+ assert env.get_layer("browsertime").name == "browsertime"
+
+
+def test_context():
+ mach, metadata, env = get_running_env()
+ env.layers = [mock.MagicMock(), mock.MagicMock(), mock.MagicMock()]
+ with env:
+ env.run(metadata)
+
+
+class FailureException(Exception):
+ pass
+
+
+class Failure(Layer):
+ user_exception = True
+
+ def run(self, metadata):
+ raise FailureException()
+
+
+def create_mock():
+ m = mock.Mock()
+
+ # need to manually set those
+ def enter(self):
+ self.setup()
+ return self
+
+ def exit(self, type, value, traceback):
+ self.teardown()
+
+ m.__enter__ = enter
+ m.__exit__ = exit
+ m.__call__ = mock.Mock()
+ return m
+
+
+def test_exception_return():
+ # the last layer is not called, the error is swallowed
+ hooks = str(Path(HERE, "data", "hook.py"))
+ mach, metadata, env = get_running_env(hooks=hooks)
+ last_layer = create_mock()
+ env.layers = [create_mock(), Failure(env, mach), last_layer]
+ with env:
+ env.run(metadata)
+ last_layer.assert_not_called()
+
+
+def test_exception_resume():
+ # the last layer is called, the error is swallowed
+ hooks = str(Path(HERE, "data", "hook_resume.py"))
+ mach, metadata, env = get_running_env(hooks=hooks)
+ last_layer = create_mock()
+ env.layers = [create_mock(), Failure(env, mach), last_layer]
+ with env:
+ env.run(metadata)
+ last_layer.assert_called()
+
+
+def test_exception_no_user_exception():
+ # the last layer is called, the error is raised
+ # because user_exception = False
+ hooks = str(Path(HERE, "data", "hook_resume.py"))
+ mach, metadata, env = get_running_env(hooks=hooks)
+ last_layer = create_mock()
+ f = Failure(env, mach)
+ f.user_exception = False
+ env.layers = [create_mock(), f, last_layer]
+ with env, pytest.raises(FailureException):
+ env.run(metadata)
+ last_layer._call__.assert_not_called()
+
+
+def test_exception_raised():
+ # the error is raised
+ hooks = str(Path(HERE, "data", "hook_raises.py"))
+ mach, metadata, env = get_running_env(hooks=hooks)
+ last_layer = create_mock()
+ env.layers = [create_mock(), Failure(env, mach), last_layer]
+ with env, pytest.raises(FailureException):
+ env.run(metadata)
+ last_layer.__call__.assert_not_called()
+
+
+def test_metrics_last():
+ mach, metadata, env = get_running_env()
+
+ system = create_mock()
+ browser = create_mock()
+
+ # Check that the metrics layer is entered after
+ # other have finished and that the other layers
+ # were only called once
+ class M:
+ def __enter__(self):
+ system.setup.assert_called_once()
+ browser.setup.assert_called_once()
+ system.teardown.assert_called_once()
+ browser.teardown.assert_called_once()
+ return self
+
+ def __exit__(self, *args, **kw):
+ return
+
+ def __call__(self, metadata):
+ return
+
+ env.layers = [system, browser, M()]
+ with env:
+ env.run(metadata)
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozperftest/mozperftest/tests/test_fzf.py b/python/mozperftest/mozperftest/tests/test_fzf.py
new file mode 100644
index 0000000000..7fdd9a87a4
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/test_fzf.py
@@ -0,0 +1,59 @@
+#!/usr/bin/env python
+import json
+from pathlib import Path
+from unittest import mock
+
+import mozunit
+
+from mozperftest.fzf.fzf import select
+from mozperftest.fzf.preview import main
+from mozperftest.tests.support import EXAMPLE_TEST, temp_file
+from mozperftest.utils import silence
+
+
+class Fzf:
+ def __init__(self, cmd, *args, **kw):
+ self.cmd = cmd
+
+ def communicate(self, *args):
+ return "query\n" + args[0], "stderr"
+
+
+def fzf_executable(*args):
+ return None if len(args) == 2 else "fzf"
+
+
+@mock.patch("subprocess.Popen", new=Fzf)
+@mock.patch("mozperftest.fzf.fzf.find_executable", new=fzf_executable)
+def test_select(*mocked):
+ test_objects = [{"path": EXAMPLE_TEST}]
+ selection = select(test_objects)
+ assert len(selection) == 1
+
+
+@mock.patch("subprocess.Popen", new=Fzf)
+@mock.patch("mozperftest.fzf.fzf.find_executable", new=fzf_executable)
+def test_find_fzf_executable(*mocked):
+ test_objects = [{"path": EXAMPLE_TEST}]
+ selection = select(test_objects)
+ assert len(selection) == 1
+
+
+def test_preview():
+ content = Path(EXAMPLE_TEST)
+ line = f"[bt][sometag] {content.name} in {content.parent}"
+ test_objects = [{"path": str(content)}]
+ cache = Path(Path.home(), ".mozbuild", ".perftestfuzzy")
+ with cache.open("w") as f:
+ f.write(json.dumps(test_objects))
+
+ with temp_file(content=str(line)) as tasklist, silence() as out:
+ main(args=["-t", tasklist])
+
+ stdout, __ = out
+ stdout.seek(0)
+ assert ":owner: Performance Testing Team" in stdout.read()
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozperftest/mozperftest/tests/test_influx.py b/python/mozperftest/mozperftest/tests/test_influx.py
new file mode 100644
index 0000000000..5e5db67eb8
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/test_influx.py
@@ -0,0 +1,121 @@
+#!/usr/bin/env python
+import json
+import re
+import sys
+
+import mozunit
+import pytest
+import responses
+
+from mozperftest.metrics.perfboard.influx import Influx
+from mozperftest.tests.support import (
+ BT_DATA,
+ EXAMPLE_TEST,
+ get_running_env,
+ running_on_try,
+)
+from mozperftest.utils import ON_TRY, temp_dir
+
+
+def mocks():
+ # mocking the Influx service
+ responses.add(
+ responses.GET,
+ "http://influxdb/ping",
+ body=json.dumps({"version": "1"}),
+ headers={"x-influxdb-version": "1"},
+ status=204,
+ )
+
+ responses.add(
+ responses.POST,
+ "http://influxdb/write",
+ body=json.dumps({"version": "1"}),
+ headers={"x-influxdb-version": "1"},
+ status=204,
+ )
+
+ responses.add(
+ responses.GET,
+ "http://grafana/api/search?tag=component",
+ body=json.dumps([]),
+ status=200,
+ )
+
+ responses.add(
+ responses.POST,
+ "http://grafana/api/dashboards/db",
+ body=json.dumps({"uid": "id"}),
+ status=200,
+ )
+
+ responses.add(
+ responses.GET,
+ "http://grafana/api/dashboards/uid/id",
+ body=json.dumps({"dashboard": {"panels": []}}),
+ status=200,
+ )
+
+ responses.add(
+ responses.GET,
+ re.compile(
+ "https://firefox-ci-tc.services.mozilla.com/secrets/*|"
+ "http://taskcluster/secrets/*"
+ ),
+ body=json.dumps(
+ {
+ "secret": {
+ "influx_host": "influxdb",
+ "influx_port": 0,
+ "influx_user": "admin",
+ "influx_password": "pass",
+ "influx_db": "db",
+ "grafana_key": "xxx",
+ "grafana_host": "grafana",
+ "grafana_port": 0,
+ }
+ }
+ ),
+ status=200,
+ )
+
+
+@responses.activate
+@pytest.mark.parametrize("on_try", [True, False])
+def test_influx_service(on_try):
+ if ON_TRY and sys.platform == "darwin":
+ # macos slave in the CI are restricted
+ return
+
+ mocks()
+ with running_on_try(on_try), temp_dir() as output:
+ args = {
+ "verbose": True,
+ "output": output,
+ "perfboard-influx-password": "xxx",
+ "perfboard-grafana-key": "xxx",
+ "perfboard-grafana-host": "grafana",
+ "perfboard-influx-port": 0,
+ "perfboard-influx-host": "influxdb",
+ "tests": [EXAMPLE_TEST],
+ }
+
+ mach_cmd, metadata, env = get_running_env(**args)
+ metadata.add_result({"results": str(BT_DATA), "name": "browsertime"})
+ layer = Influx(env, mach_cmd)
+ layer.setup()
+ try:
+ metadata = layer.run(metadata)
+ finally:
+ layer.teardown()
+
+ index = on_try and 2 or 1
+ sent_data = responses.calls[index].request.body.split(b"\n")
+ fields = [line.split(b",")[0].strip() for line in sent_data]
+ assert b"rumspeedindex" in fields
+
+ responses.reset()
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozperftest/mozperftest/tests/test_ir_schema.py b/python/mozperftest/mozperftest/tests/test_ir_schema.py
new file mode 100644
index 0000000000..41f3ad7804
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/test_ir_schema.py
@@ -0,0 +1,103 @@
+#!/usr/bin/env python
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import mozunit
+import pytest
+from jsonschema.exceptions import ValidationError
+
+from mozperftest.metrics.utils import validate_intermediate_results
+
+
+def test_results_with_directory():
+ test_result = {"results": "path-to-results", "name": "the-name"}
+ validate_intermediate_results(test_result)
+
+
+def test_results_with_measurements():
+ test_result = {
+ "results": [
+ {"name": "metric-1", "values": [0, 1, 1, 0]},
+ {"name": "metric-2", "values": [0, 1, 1, 0]},
+ ],
+ "name": "the-name",
+ }
+ validate_intermediate_results(test_result)
+
+
+def test_results_with_suite_perfherder_options():
+ test_result = {
+ "results": [
+ {"name": "metric-1", "values": [0, 1, 1, 0]},
+ {"name": "metric-2", "values": [0, 1, 1, 0]},
+ ],
+ "name": "the-name",
+ "extraOptions": ["an-extra-option"],
+ "value": 9000,
+ }
+ validate_intermediate_results(test_result)
+
+
+def test_results_with_subtest_perfherder_options():
+ test_result = {
+ "results": [
+ {"name": "metric-1", "shouldAlert": True, "values": [0, 1, 1, 0]},
+ {"name": "metric-2", "alertThreshold": 1.0, "values": [0, 1, 1, 0]},
+ ],
+ "name": "the-name",
+ "extraOptions": ["an-extra-option"],
+ "value": 9000,
+ }
+ validate_intermediate_results(test_result)
+
+
+def test_results_with_bad_suite_property():
+ test_result = {
+ "results": "path-to-results",
+ "name": "the-name",
+ "I'll cause a failure,": "an expected failure",
+ }
+ with pytest.raises(ValidationError):
+ validate_intermediate_results(test_result)
+
+
+def test_results_with_bad_subtest_property():
+ test_result = {
+ "results": [
+ # Error is in "shouldalert", it should be "shouldAlert"
+ {"name": "metric-1", "shouldalert": True, "values": [0, 1, 1, 0]},
+ {"name": "metric-2", "alertThreshold": 1.0, "values": [0, 1, 1, 0]},
+ ],
+ "name": "the-name",
+ "extraOptions": ["an-extra-option"],
+ "value": 9000,
+ }
+ with pytest.raises(ValidationError):
+ validate_intermediate_results(test_result)
+
+
+def test_results_with_missing_suite_property():
+ test_result = {
+ # Missing "results"
+ "name": "the-name"
+ }
+ with pytest.raises(ValidationError):
+ validate_intermediate_results(test_result)
+
+
+def test_results_with_missing_subtest_property():
+ test_result = {
+ "results": [
+ # Missing "values"
+ {"name": "metric-2", "alertThreshold": 1.0}
+ ],
+ "name": "the-name",
+ "extraOptions": ["an-extra-option"],
+ "value": 9000,
+ }
+ with pytest.raises(ValidationError):
+ validate_intermediate_results(test_result)
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozperftest/mozperftest/tests/test_layers.py b/python/mozperftest/mozperftest/tests/test_layers.py
new file mode 100644
index 0000000000..c29b9ef2c0
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/test_layers.py
@@ -0,0 +1,88 @@
+#!/usr/bin/env python
+from unittest.mock import MagicMock
+
+import mozunit
+import pytest
+
+from mozperftest.environment import MachEnvironment
+from mozperftest.layers import Layer, Layers
+
+
+class _TestLayer(Layer):
+ name = "test"
+ activated = True
+ called = 0
+ arguments = {"--arg1": {"type": str, "default": "xxx", "help": "arg1"}}
+
+ def setup(self):
+ self.called += 1
+
+ def teardown(self):
+ self.called += 1
+
+
+class _TestLayer2(_TestLayer):
+ name = "test2"
+ activated = True
+ arguments = {"arg2": {"type": str, "default": "xxx", "help": "arg2"}}
+
+
+class _TestLayer3(_TestLayer):
+ name = "test3"
+ activated = True
+
+
+def test_layer():
+ mach = MagicMock()
+ env = MachEnvironment(mach, test=True, test_arg1="ok")
+
+ with _TestLayer(env, mach) as layer:
+ layer.info("info")
+ layer.warning("warning")
+ layer.debug("debug")
+ assert layer.get_arg("test")
+ assert layer.get_arg("arg1") == "ok"
+ assert layer.get_arg("test-arg1") == "ok"
+ layer.set_arg("arg1", "two")
+ assert layer.get_arg("test-arg1") == "two"
+ layer.set_arg("test-arg1", 1)
+ assert layer.get_arg("test-arg1") == 1
+ with pytest.raises(KeyError):
+ layer.set_arg("another", 1)
+
+ layer(object())
+
+ assert layer.called == 2
+
+
+def test_layers():
+ mach = MagicMock()
+ factories = [_TestLayer, _TestLayer2, _TestLayer3]
+ env = MachEnvironment(
+ mach, no_test3=True, test_arg1="ok", test2=True, test2_arg2="2"
+ )
+
+ with Layers(env, mach, factories) as layers:
+ # layer3 was deactivated with test3=False
+ assert len(layers.layers) == 2
+ layers.info("info")
+ layers.debug("debug")
+ layers.warning("warning")
+ assert layers.get_arg("--test2")
+ assert layers.get_arg("test-arg1") == "ok"
+ layers.set_arg("test-arg1", "two")
+ assert layers.get_arg("test-arg1") == "two"
+ layers.set_arg("--test-arg1", 1)
+ assert layers.get_arg("test-arg1") == 1
+ assert layers.get_layer("test2").name == "test2"
+ assert layers.get_layer("test3") is None
+ assert layers.name == "test + test2"
+ with pytest.raises(KeyError):
+ layers.set_arg("another", 1)
+
+ for layer in layers:
+ assert layer.called == 2
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozperftest/mozperftest/tests/test_logcat_transformer.py b/python/mozperftest/mozperftest/tests/test_logcat_transformer.py
new file mode 100644
index 0000000000..8f94caedfc
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/test_logcat_transformer.py
@@ -0,0 +1,125 @@
+#!/usr/bin/env python
+import mozunit
+import pytest
+
+from mozperftest.metrics.exceptions import (
+ NotebookTransformError,
+ NotebookTransformOptionsError,
+)
+from mozperftest.metrics.notebook.transforms.logcattime import LogCatTimeTransformer
+from mozperftest.tests.support import HERE
+
+
+@pytest.fixture(scope="session", autouse=True)
+def tfm():
+ yield LogCatTimeTransformer()
+
+
+@pytest.fixture(scope="session", autouse=True)
+def logcat_data(tfm):
+ data = tfm.open_data(str(HERE / "data" / "home_activity.txt"))
+ assert data
+ yield data
+
+
+def test_logcat_transform_two_regex(tfm, logcat_data):
+ restart = r".*Activity.*Manager.*START.*org\.mozilla\.fennec_aurora/org\.mozilla\.fenix\.HomeActivity.*" # noqa
+ reend = r".*Displayed.*org\.mozilla\.fennec_aurora.*"
+ opts = {
+ "first-timestamp": restart,
+ "second-timestamp": reend,
+ "transform-subtest-name": "HANOOBish",
+ }
+
+ actual_result = tfm.transform(logcat_data, **opts)
+ expected_result = [
+ {
+ "data": [
+ {"value": 1782.0, "xaxis": 0},
+ {"value": 1375.0, "xaxis": 1},
+ {"value": 1497.0, "xaxis": 2},
+ ],
+ "subtest": "HANOOBish",
+ }
+ ]
+ assert actual_result == expected_result
+
+ # We should get the same results back from merge
+ # since we are working with only one file
+ merged = tfm.merge(actual_result)
+ assert merged == expected_result
+
+
+def test_logcat_transform_one_regex(tfm, logcat_data):
+ def processor(groups):
+ """Parses the time from a displayed time string into milliseconds."""
+ return (float(groups[0]) * 1000) + float(groups[1])
+
+ re_w_group = r".*Displayed.*org\.mozilla\.fennec_aurora.*\+([\d]+)s([\d]+)ms.*"
+ opts = {
+ "first-timestamp": re_w_group,
+ "processor": processor,
+ "transform-subtest-name": "TimeToDisplayed",
+ }
+
+ actual_result = tfm.transform(logcat_data, **opts)
+ expected_result = [
+ {
+ "data": [
+ {"value": 1743.0, "xaxis": 0},
+ {"value": 1325.0, "xaxis": 1},
+ {"value": 1462.0, "xaxis": 2},
+ ],
+ "subtest": "TimeToDisplayed",
+ }
+ ]
+ assert actual_result == expected_result
+
+
+def test_logcat_transform_no_processor(tfm, logcat_data):
+ re_w_group = r".*Displayed.*org\.mozilla\.fennec_aurora.*\+([\d]+)s([\d]+)ms.*"
+ opts = {
+ "first-timestamp": re_w_group,
+ "transform-subtest-name": "TimeToDisplayed",
+ }
+
+ actual_result = tfm.transform(logcat_data, **opts)
+ expected_result = [
+ {
+ "data": [
+ {"value": 1.0, "xaxis": 0},
+ {"value": 1.0, "xaxis": 1},
+ {"value": 1.0, "xaxis": 2},
+ ],
+ "subtest": "TimeToDisplayed",
+ }
+ ]
+ assert actual_result == expected_result
+
+
+def test_logcat_transform_no_groups(tfm, logcat_data):
+ re_w_group = r".*Displayed.*org\.mozilla\.fennec_aurora.*"
+ opts = {
+ "first-timestamp": re_w_group,
+ "transform-subtest-name": "TimeToDisplayed",
+ }
+
+ with pytest.raises(NotebookTransformOptionsError):
+ tfm.transform(logcat_data, **opts)
+
+
+def test_logcat_transform_too_many_groups(tfm, logcat_data):
+ restart = r".*Activity.*Manager.*START.*org\.mozilla\.fennec_aurora/org\.mozilla\.fenix\.HomeActivity.*" # noqa
+ reend = r".*Displayed.*org\.mozilla\.fennec_aurora.*\+([\d]+)s([\d]+)ms.*"
+ opts = {
+ "first-timestamp": restart,
+ "second-timestamp": reend,
+ "transform-subtest-name": "HANOOBish",
+ }
+
+ with pytest.raises(NotebookTransformError):
+ tfm.transform(logcat_data, **opts)
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozperftest/mozperftest/tests/test_mach_commands.py b/python/mozperftest/mozperftest/tests/test_mach_commands.py
new file mode 100644
index 0000000000..d677ac52bd
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/test_mach_commands.py
@@ -0,0 +1,331 @@
+#!/usr/bin/env python
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import shutil
+import sys
+import tempfile
+from contextlib import contextmanager
+from pathlib import Path
+from unittest import mock
+
+import mozunit
+import pytest
+from mach.registrar import Registrar
+
+Registrar.categories = {"testing": []}
+Registrar.commands_by_category = {"testing": set()}
+
+from mozbuild.base import MachCommandBase # noqa
+
+import mozperftest.mach_commands # noqa
+from mozperftest.environment import MachEnvironment # noqa
+from mozperftest.tests.support import EXAMPLE_TEST, ROOT, running_on_try # noqa
+from mozperftest.utils import silence, temporary_env # noqa
+
+ITERATION_HOOKS = Path(__file__).parent / "data" / "hooks_iteration.py"
+STATE_HOOKS = Path(__file__).parent / "data" / "hooks_state.py"
+
+
+class _TestMachEnvironment(MachEnvironment):
+ def __init__(self, mach_cmd, flavor="desktop-browser", hooks=None, **kwargs):
+ MachEnvironment.__init__(self, mach_cmd, flavor, hooks, **kwargs)
+ self.runs = 0
+
+ def run(self, metadata):
+ self.runs += 1
+ return metadata
+
+ def __enter__(self):
+ pass
+
+ def __exit__(self, type, value, traceback):
+ pass
+
+
+@contextmanager
+def _get_command(command=mozperftest.mach_commands.run_perftest):
+ from mozbuild.base import MozbuildObject
+
+ from mozperftest.argparser import PerftestArgumentParser
+
+ config = MozbuildObject.from_environment()
+
+ class context:
+ topdir = config.topobjdir
+ cwd = os.getcwd()
+ settings = {}
+ log_manager = mock.Mock()
+ state_dir = tempfile.mkdtemp()
+
+ # used to make arguments passed by the test as
+ # being set by the user.
+ def _run_perftest(func):
+ def _run(command_context, **kwargs):
+ parser.set_by_user = list(kwargs.keys())
+ return func(command_context, **kwargs)
+
+ return _run
+
+ try:
+ command_context = MachCommandBase(context())
+
+ if command == mozperftest.mach_commands.run_perftest:
+ parser = PerftestArgumentParser()
+ command = _run_perftest(command)
+
+ with mock.patch("mozperftest.mach_commands.get_parser", new=lambda: parser):
+ yield command, command_context
+ finally:
+ shutil.rmtree(context.state_dir)
+
+
+@contextmanager
+def _get_tools_command(tool="side-by-side"):
+ from mozbuild.base import MozbuildObject
+
+ config = MozbuildObject.from_environment()
+
+ class context:
+ topdir = config.topobjdir
+ cwd = os.getcwd()
+ settings = {}
+ log_manager = mock.Mock()
+ state_dir = tempfile.mkdtemp()
+
+ # used to make arguments passed by the test as
+ # being set by the user.
+ def _run_tool(func):
+ def _run(command_context, **kwargs):
+ parser.set_by_user = list(kwargs.keys())
+ return func(command_context, **kwargs)
+
+ return _run
+
+ try:
+ command_context = MachCommandBase(context())
+
+ command = _run_tool(mozperftest.mach_commands.run_side_by_side)
+ parser = mozperftest.mach_commands.get_perftest_tools_parser(tool)
+
+ with mock.patch(
+ "mozperftest.mach_commands.get_perftest_tools_parser", new=lambda: parser
+ ):
+ yield command, command_context
+ finally:
+ shutil.rmtree(context.state_dir)
+
+
+@mock.patch("mozperftest.MachEnvironment", new=_TestMachEnvironment)
+@mock.patch("mozbuild.base.MachCommandBase.activate_virtualenv")
+def test_command(mocked_func):
+ with _get_command() as (cmd, command_context), silence(command_context):
+ cmd(command_context, tests=[EXAMPLE_TEST], flavor="desktop-browser")
+
+
+@mock.patch("mozperftest.MachEnvironment")
+@mock.patch("mozbuild.base.MachCommandBase.activate_virtualenv")
+def test_command_iterations(venv, env):
+ kwargs = {
+ "tests": [EXAMPLE_TEST],
+ "hooks": ITERATION_HOOKS,
+ "flavor": "desktop-browser",
+ }
+ with _get_command() as (cmd, command_context), silence(command_context):
+ cmd(command_context, **kwargs)
+ # the hook changes the iteration value to 5.
+ # each iteration generates 5 calls, so we want to see 25
+ assert len(env.mock_calls) == 25
+
+
+@mock.patch("mozperftest.MachEnvironment")
+@mock.patch("mozbuild.base.MachCommandBase.activate_virtualenv")
+def test_hooks_state(venv, env):
+ kwargs = {
+ "tests": [EXAMPLE_TEST],
+ "hooks": STATE_HOOKS,
+ "flavor": "desktop-browser",
+ }
+ with _get_command() as (cmd, command_context), silence(command_context):
+ cmd(command_context, **kwargs)
+
+
+@mock.patch("mozperftest.MachEnvironment", new=_TestMachEnvironment)
+@mock.patch("mozbuild.base.MachCommandBase.activate_virtualenv")
+@mock.patch("tryselect.push.push_to_try")
+def test_push_command(push_to_try, venv):
+ with _get_command() as (cmd, command_context), silence(command_context):
+ cmd(
+ command_context,
+ tests=[EXAMPLE_TEST],
+ flavor="desktop-browser",
+ push_to_try=True,
+ try_platform="g5",
+ )
+ push_to_try.assert_called()
+ # XXX add assertions
+
+
+@mock.patch("mozperftest.MachEnvironment", new=_TestMachEnvironment)
+@mock.patch("mozbuild.base.MachCommandBase.activate_virtualenv")
+@mock.patch("tryselect.push.push_to_try")
+def test_push_command_unknown_platforms(push_to_try, venv):
+ # full stop when a platform is unknown
+ with _get_command() as (cmd, command_context), pytest.raises(NotImplementedError):
+ cmd(
+ command_context,
+ tests=[EXAMPLE_TEST],
+ flavor="desktop-browser",
+ push_to_try=True,
+ try_platform=["solaris", "linux", "mac"],
+ )
+
+
+@mock.patch("mozperftest.MachEnvironment", new=_TestMachEnvironment)
+@mock.patch("mozbuild.base.MachCommandBase.activate_virtualenv")
+@mock.patch("tryselect.push.push_to_try")
+def test_push_command_several_platforms(push_to_try, venv):
+ with running_on_try(False), _get_command() as (
+ cmd,
+ command_context,
+ ): # , silence(command_context):
+ cmd(
+ command_context,
+ tests=[EXAMPLE_TEST],
+ flavor="desktop-browser",
+ push_to_try=True,
+ try_platform=["linux", "mac"],
+ )
+ push_to_try.assert_called()
+ name, args, kwargs = push_to_try.mock_calls[0]
+ params = kwargs["try_task_config"]["parameters"]["try_task_config"]
+ assert "perftest-linux-try-browsertime" in params["tasks"]
+ assert "perftest-macosx-try-browsertime" in params["tasks"]
+
+
+@mock.patch("mozperftest.MachEnvironment", new=_TestMachEnvironment)
+@mock.patch("mozbuild.base.MachCommandBase.activate_virtualenv")
+def test_doc_flavor(mocked_func):
+ with _get_command() as (cmd, command_context), silence(command_context):
+ cmd(command_context, tests=[EXAMPLE_TEST], flavor="doc")
+
+
+@mock.patch("mozperftest.MachEnvironment", new=_TestMachEnvironment)
+@mock.patch("mozbuild.base.MachCommandBase.activate_virtualenv")
+@mock.patch("mozperftest.utils.run_script")
+def test_test_runner(*mocked):
+ from mozperftest.mach_commands import run_tests
+
+ with running_on_try(False), _get_command(run_tests) as (cmd, command_context):
+ cmd(command_context, tests=[EXAMPLE_TEST], verbose=True)
+
+
+@mock.patch("mozperftest.MachEnvironment", new=_TestMachEnvironment)
+@mock.patch("mozbuild.base.MachCommandBase.activate_virtualenv")
+@mock.patch("mozperftest.utils.run_python_script")
+def test_test_runner_on_try(*mocked):
+ from mozperftest.mach_commands import run_tests
+
+ # simulating on try to run the paths parser
+ with running_on_try(), _get_command(run_tests) as (cmd, command_context):
+ cmd(command_context, tests=[EXAMPLE_TEST])
+
+
+@mock.patch("mozperftest.MachEnvironment", new=_TestMachEnvironment)
+@mock.patch("mozbuild.base.MachCommandBase.activate_virtualenv")
+@mock.patch("mozperftest.utils.run_script")
+def test_test_runner_coverage(*mocked):
+ from mozperftest.mach_commands import run_tests
+
+ # simulating with coverage not installed
+ with running_on_try(False), _get_command(run_tests) as (cmd, command_context):
+ old = list(sys.meta_path)
+ sys.meta_path = []
+ try:
+ cmd(command_context, tests=[EXAMPLE_TEST])
+ finally:
+ sys.meta_path = old
+
+
+def fzf_selection(*args):
+ try:
+ full_path = args[-1][-1]["path"]
+ except IndexError:
+ return []
+
+ path = Path(full_path.replace(str(ROOT), ""))
+ return [f"[bt][sometag] {path.name} in {path.parent}"]
+
+
+def resolve_tests(tests=None):
+ if tests is None:
+ tests = [{"path": str(EXAMPLE_TEST)}]
+
+ def _resolve(*args, **kw):
+ return tests
+
+ return _resolve
+
+
+@mock.patch("mozperftest.MachEnvironment", new=_TestMachEnvironment)
+@mock.patch("mozbuild.base.MachCommandBase.activate_virtualenv")
+@mock.patch("mozperftest.fzf.fzf.select", new=fzf_selection)
+@mock.patch("moztest.resolve.TestResolver.resolve_tests", new=resolve_tests())
+def test_fzf_flavor(*mocked):
+ with running_on_try(False), _get_command() as (
+ cmd,
+ command_context,
+ ): # , silence():
+ cmd(command_context, flavor="desktop-browser")
+
+
+@mock.patch("mozperftest.MachEnvironment", new=_TestMachEnvironment)
+@mock.patch("mozbuild.base.MachCommandBase.activate_virtualenv")
+@mock.patch("mozperftest.fzf.fzf.select", new=fzf_selection)
+@mock.patch("moztest.resolve.TestResolver.resolve_tests", new=resolve_tests([]))
+def test_fzf_nothing_selected(*mocked):
+ with running_on_try(False), _get_command() as (cmd, command_context), silence():
+ cmd(command_context, flavor="desktop-browser")
+
+
+@mock.patch("mozperftest.MachEnvironment", new=_TestMachEnvironment)
+@mock.patch("mozbuild.base.MachCommandBase.activate_virtualenv")
+@mock.patch("mozperftest.utils.run_python_script")
+@mock.patch("mozperftest.utils.install_package")
+def test_side_by_side(mock1, mock2, mock3, patched_mozperftest_tools):
+ with mock.patch(
+ "mozperftest.utils.create_path", return_value="fake_path"
+ ) as _, mock.patch(
+ "mozperftest.runner._create_artifacts_dir", return_value="fake_path"
+ ) as _, mock.patch(
+ "mozperftest.runner._save_params", return_value="fake_path"
+ ) as _:
+ with _get_tools_command() as (cmd, command_context), silence(command_context):
+ cmd(command_context)
+ patched_mozperftest_tools.run.assert_called()
+
+
+@mock.patch("mozperftest.MachEnvironment", new=_TestMachEnvironment)
+@mock.patch("mozbuild.base.MachCommandBase.activate_virtualenv")
+@mock.patch("mozperftest.utils.run_python_script")
+@mock.patch("mozperftest.utils.install_package")
+def test_change_detector(mock1, mock2, mock3, patched_mozperftest_tools):
+ with mock.patch(
+ "mozperftest.utils.create_path", return_value="fake_path"
+ ) as _, mock.patch(
+ "mozperftest.runner._create_artifacts_dir", return_value="fake_path"
+ ) as _, mock.patch(
+ "mozperftest.runner._save_params", return_value="fake_path"
+ ) as _:
+ with _get_tools_command(tool="change-detector") as (
+ cmd,
+ command_context,
+ ), silence(command_context):
+ cmd(command_context)
+ patched_mozperftest_tools.run.assert_called()
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozperftest/mozperftest/tests/test_macos.py b/python/mozperftest/mozperftest/tests/test_macos.py
new file mode 100644
index 0000000000..6999f4792d
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/test_macos.py
@@ -0,0 +1,94 @@
+#!/usr/bin/env python
+import os
+import platform
+import subprocess
+from pathlib import Path
+from unittest import mock
+
+import mozunit
+import pytest
+
+from mozperftest.system.macos import MacosDevice
+from mozperftest.tests.support import DMG, get_running_env
+
+
+def run_proc(*args, **kw):
+ if args[0][1] == "attach":
+ where = args[0][4]
+ bindir = Path(where, "firefox.app", "Contents", "MacOS")
+ os.makedirs(str(bindir))
+ firefox_bin = bindir / "firefox"
+ with firefox_bin.open("w") as f:
+ f.write("OK")
+
+
+def mock_calls(test):
+ # on macOS we don't mock the system calls
+ # so we're mounting for real using hdiutil
+ if platform.system() == "Darwin":
+ return test
+
+ # on other platforms, we're unsing run_proc
+ @mock.patch("mozperftest.system.macos.MacosDevice._run_process", new=run_proc)
+ def wrapped(*args, **kw):
+ return test(*args, **kw)
+
+
+@mock_calls
+def test_mount_dmg():
+ mach_cmd, metadata, env = get_running_env(browsertime_binary=str(DMG))
+ device = MacosDevice(env, mach_cmd)
+ try:
+ device.run(metadata)
+ finally:
+ device.teardown()
+
+ target = Path(DMG.parent, "firefox", "Contents", "MacOS", "firefox")
+ assert env.get_arg("browsertime-binary") == str(target)
+
+
+def run_fail(cmd):
+ def _run_fail(self, args):
+ run_cmd = " ".join(args)
+ if cmd not in run_cmd:
+ run_proc(args)
+ return
+ raise subprocess.CalledProcessError(returncode=2, cmd=" ".join(args))
+
+ return _run_fail
+
+
+@mock.patch("mozperftest.system.macos.MacosDevice._run_process", new=run_fail("attach"))
+def test_attach_fails():
+ mach_cmd, metadata, env = get_running_env(browsertime_binary=str(DMG))
+ device = MacosDevice(env, mach_cmd)
+
+ with pytest.raises(subprocess.CalledProcessError):
+ try:
+ device.run(metadata)
+ finally:
+ device.teardown()
+
+
+@mock.patch("mozperftest.system.macos.MacosDevice._run_process", new=run_fail("detach"))
+def test_detach_fails():
+ mach_cmd, metadata, env = get_running_env(browsertime_binary=str(DMG))
+ device = MacosDevice(env, mach_cmd)
+ # detaching will be swallowed
+ try:
+ device.run(metadata)
+ finally:
+ device.teardown()
+
+ target = Path(DMG.parent, "firefox", "Contents", "MacOS", "firefox")
+ assert env.get_arg("browsertime-binary") == str(target)
+
+
+def test_no_op():
+ mach_cmd, metadata, env = get_running_env(browsertime_binary="notadmg")
+ device = MacosDevice(env, mach_cmd)
+ device.run(metadata)
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozperftest/mozperftest/tests/test_metrics_utils.py b/python/mozperftest/mozperftest/tests/test_metrics_utils.py
new file mode 100644
index 0000000000..0e6bdd71ac
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/test_metrics_utils.py
@@ -0,0 +1,97 @@
+#!/usr/bin/env python
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import json
+
+import mozunit
+import pytest
+
+from mozperftest.metrics.utils import metric_fields, open_file
+from mozperftest.tests.support import temp_file
+
+
+def test_open_file():
+ data = json.dumps({"1": 2})
+
+ with temp_file(name="data.json", content=data) as f:
+ res = open_file(f)
+ assert res == {"1": 2}
+
+ with temp_file(name="data.txt", content="yeah") as f:
+ assert open_file(f) == "yeah"
+
+
+def test_metric_fields_old_format():
+ assert metric_fields("firstPaint") == {"name": "firstPaint"}
+
+
+@pytest.mark.parametrize(
+ "metrics, expected",
+ [
+ [
+ "name:foo,extraOptions:bar",
+ {"name": "foo", "extraOptions": "bar"},
+ ],
+ ["name:foo", {"name": "foo"}],
+ ],
+)
+def test_metric_fields_simple(metrics, expected):
+ assert metric_fields(metrics) == expected
+
+
+@pytest.mark.parametrize(
+ "metrics, expected",
+ [
+ [
+ "name:foo,extraOptions:['1', '2', '3', 2]",
+ {"name": "foo", "extraOptions": ["1", "2", "3", 2]},
+ ],
+ [
+ """name:foo,extraOptions:['1', '2', '3', 2, "3", "hello,world"] """,
+ {"name": "foo", "extraOptions": ["1", "2", "3", 2, "3", "hello,world"]},
+ ],
+ [
+ """name:foo,extraOptions:['1', '2', '3', 2, "3", "hello,world"],"""
+ """alertThreshold:['1',2,"hello"] """,
+ {
+ "name": "foo",
+ "extraOptions": ["1", "2", "3", 2, "3", "hello,world"],
+ "alertThreshold": ["1", 2, "hello"],
+ },
+ ],
+ [
+ """name:foo,extraOptions:['1', '2', '3', 2, "3", "hello,world"],"""
+ """value:foo,alertThreshold:['1',2,"hello"],framework:99 """,
+ {
+ "name": "foo",
+ "extraOptions": ["1", "2", "3", 2, "3", "hello,world"],
+ "alertThreshold": ["1", 2, "hello"],
+ "value": "foo",
+ "framework": 99,
+ },
+ ],
+ ],
+)
+def test_metric_fields_complex(metrics, expected):
+ assert metric_fields(metrics) == expected
+
+
+@pytest.mark.parametrize(
+ "metrics",
+ [
+ """name:foo,extraOptions:['1', '2', '3', 2, "3", "hello,world"],"""
+ """value:foo,alertThreshold:['1',2,"hello"],framework:99,"""
+ """shouldAlert:[99,100,["hello", "world"],0] """,
+ """name:foo,extraOptions:['1', '2', '3', 2, "3", "hello,world"],"""
+ """value:foo,alertThreshold:['1',2,"hello"],framework:99,"""
+ """shouldAlert:[99,100,["hello:", "world:"],0] """,
+ ],
+)
+def test_metric_fields_complex_failures(metrics):
+ with pytest.raises(Exception):
+ metric_fields(metrics)
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozperftest/mozperftest/tests/test_notebookupload.py b/python/mozperftest/mozperftest/tests/test_notebookupload.py
new file mode 100644
index 0000000000..9defae8ad2
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/test_notebookupload.py
@@ -0,0 +1,121 @@
+#!/usr/bin/env python
+from unittest import mock
+
+import mozunit
+import pytest
+
+from mozperftest.environment import METRICS
+from mozperftest.metrics.utils import metric_fields
+from mozperftest.tests.support import BT_DATA, EXAMPLE_TEST, get_running_env, temp_file
+from mozperftest.utils import silence
+
+
+def setup_env(options):
+ mach_cmd, metadata, env = get_running_env(**options)
+ runs = []
+
+ def _run_process(*args, **kw):
+ runs.append((args, kw))
+
+ mach_cmd.run_process = _run_process
+ metrics = env.layers[METRICS]
+ env.set_arg("tests", [EXAMPLE_TEST])
+ metadata.add_result({"results": str(BT_DATA), "name": "browsertime"})
+ return metrics, metadata, env
+
+
+@pytest.mark.parametrize("no_filter", [True, False])
+@mock.patch("mozperftest.metrics.notebookupload.PerftestNotebook")
+def test_notebookupload_with_filter(notebook, no_filter):
+
+ options = {
+ "notebook-metrics": [],
+ "notebook-prefix": "",
+ "notebook": True,
+ "notebook-analysis": ["scatterplot"],
+ "notebook-analyze-strings": no_filter,
+ }
+
+ metrics, metadata, env = setup_env(options)
+
+ with temp_file() as output:
+ env.set_arg("output", output)
+ with metrics as m, silence():
+ m(metadata)
+
+ if no_filter:
+ args, kwargs = notebook.call_args_list[0]
+ assert type(kwargs["data"][0]["data"][0]["value"]) == str
+ else:
+ for call in notebook.call_args_list:
+ args, kwargs = call
+ for a in args:
+ for data_dict in a:
+ for data in data_dict["data"]:
+ assert type(data["value"]) in (int, float)
+
+ notebook.assert_has_calls(
+ [mock.call().post_to_iodide(["scatterplot"], start_local_server=True)]
+ )
+
+
+@pytest.mark.parametrize("stats", [False, True])
+@mock.patch("mozperftest.metrics.notebookupload.PerftestNotebook")
+def test_compare_to_success(notebook, stats):
+ options = {
+ "notebook-metrics": [metric_fields("firstPaint")],
+ "notebook-prefix": "",
+ "notebook-analysis": [],
+ "notebook": True,
+ "notebook-compare-to": [str(BT_DATA.parent)],
+ "notebook-stats": stats,
+ }
+
+ metrics, metadata, env = setup_env(options)
+
+ with temp_file() as output:
+ env.set_arg("output", output)
+ with metrics as m, silence():
+ m(metadata)
+
+ args, kwargs = notebook.call_args_list[0]
+
+ if not stats:
+ assert len(kwargs["data"]) == 2
+ assert kwargs["data"][0]["name"] == "browsertime- newest run"
+ assert kwargs["data"][1]["name"] == "browsertime-results"
+ else:
+ assert any("statistics" in element["subtest"] for element in kwargs["data"])
+
+ notebook.assert_has_calls(
+ [mock.call().post_to_iodide(["compare"], start_local_server=True)]
+ )
+
+
+@pytest.mark.parametrize("filepath", ["invalidPath", str(BT_DATA)])
+@mock.patch("mozperftest.metrics.notebookupload.PerftestNotebook")
+def test_compare_to_invalid_parameter(notebook, filepath):
+ options = {
+ "notebook-metrics": [metric_fields("firstPaint")],
+ "notebook-prefix": "",
+ "notebook-analysis": [],
+ "notebook": True,
+ "notebook-compare-to": [filepath],
+ }
+
+ metrics, metadata, env = setup_env(options)
+
+ with pytest.raises(Exception) as einfo:
+ with temp_file() as output:
+ env.set_arg("output", output)
+ with metrics as m, silence():
+ m(metadata)
+
+ if filepath == "invalidPath":
+ assert "does not exist" in str(einfo.value)
+ else:
+ assert "not a directory" in str(einfo.value)
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozperftest/mozperftest/tests/test_perfherder.py b/python/mozperftest/mozperftest/tests/test_perfherder.py
new file mode 100644
index 0000000000..7093d6d701
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/test_perfherder.py
@@ -0,0 +1,620 @@
+#!/usr/bin/env python
+import json
+import pathlib
+
+import jsonschema
+import mozunit
+import pytest
+
+from mozperftest.environment import METRICS
+from mozperftest.metrics.exceptions import PerfherderValidDataError
+from mozperftest.metrics.notebook.transforms.single_json import SingleJsonRetriever
+from mozperftest.metrics.utils import metric_fields
+from mozperftest.tests.support import (
+ BT_DATA,
+ EXAMPLE_TEST,
+ HERE,
+ get_running_env,
+ temp_file,
+)
+from mozperftest.utils import silence, temp_dir
+
+
+class PerfherderTransformer(SingleJsonRetriever):
+ """Used for testing the summarization transforms."""
+
+ def summary(self, suite):
+ return 0
+
+ def subtest_summary(self, subtest):
+ return -1
+
+
+def setup_env(options):
+ mach_cmd, metadata, env = get_running_env(**options)
+ runs = []
+
+ def _run_process(*args, **kw):
+ runs.append((args, kw))
+
+ mach_cmd.run_process = _run_process
+ metrics = env.layers[METRICS]
+ env.set_arg("tests", [EXAMPLE_TEST])
+ metadata.add_result({"results": str(BT_DATA), "name": "browsertime"})
+ return metrics, metadata, env
+
+
+def test_perfherder():
+ options = {
+ "perfherder": True,
+ "perfherder-stats": True,
+ "perfherder-prefix": "",
+ "perfherder-metrics": [metric_fields("firstPaint")],
+ "perfherder-timestamp": 1.0,
+ }
+
+ metrics, metadata, env = setup_env(options)
+
+ with temp_file() as output:
+ env.set_arg("output", output)
+ with metrics as m, silence():
+ m(metadata)
+ output_file = metadata.get_output()
+ with open(output_file) as f:
+ output = json.loads(f.read())
+
+ # Check some metadata
+ assert output["application"]["name"] == "firefox"
+ assert output["framework"]["name"] == "mozperftest"
+ assert output["pushTimestamp"] == 1.0
+
+ # Check some numbers in our data
+ assert len(output["suites"]) == 1
+ assert len(output["suites"][0]["subtests"]) == 10
+ assert not any("value" in suite for suite in output["suites"])
+
+ # Check if only firstPaint metrics were obtained
+ for subtest in output["suites"][0]["subtests"]:
+ assert "firstPaint" in subtest["name"]
+
+
+def test_perfherder_simple_names():
+ options = {
+ "perfherder": True,
+ "perfherder-stats": True,
+ "perfherder-prefix": "",
+ "perfherder-metrics": [metric_fields("firstPaint"), metric_fields("resource")],
+ "perfherder-simplify-names": True,
+ "perfherder-simplify-exclude": ["statistics"],
+ }
+
+ metrics, metadata, env = setup_env(options)
+
+ with temp_file() as output:
+ env.set_arg("output", output)
+ with metrics as m, silence():
+ m(metadata)
+ output_file = metadata.get_output()
+ with open(output_file) as f:
+ output = json.loads(f.read())
+
+ # Check some metadata
+ assert output["application"]["name"] == "firefox"
+ assert output["framework"]["name"] == "mozperftest"
+
+ # Check some numbers in our data
+ assert len(output["suites"]) == 1
+ assert "value" not in output["suites"][0]
+ assert any(r > 0 for r in output["suites"][0]["subtests"][0]["replicates"])
+
+ # Check if only firstPaint/resource metrics were obtained and
+ # that simplifications occurred
+ assert all(
+ [
+ "firstPaint" in subtest["name"]
+ or "duration" in subtest["name"]
+ or "count" in subtest["name"]
+ for subtest in output["suites"][0]["subtests"]
+ ]
+ )
+
+ found_all = {"firstPaint": False, "count": False, "duration": False}
+ for subtest in output["suites"][0]["subtests"]:
+ if subtest["name"] in found_all:
+ found_all[subtest["name"]] = True
+ continue
+ assert any([name in subtest["name"] for name in found_all.keys()])
+ # Statistics are not simplified so any metric that isn't
+ # in the list of known metrics must be a statistic
+ assert "statistics" in subtest["name"]
+
+ for entry, value in found_all.items():
+ assert found_all[entry], f"Failed finding metric simplification for {entry}"
+
+ # Statistics are not simplified by default
+ assert (
+ len(
+ [
+ subtest
+ for subtest in output["suites"][0]["subtests"]
+ if "statistics" in subtest["name"]
+ ]
+ )
+ == 27
+ )
+ assert (
+ len(
+ [
+ subtest
+ for subtest in output["suites"][0]["subtests"]
+ if "statistics" not in subtest["name"]
+ ]
+ )
+ == 3
+ )
+
+
+def test_perfherder_names_simplified_with_no_exclusions():
+ options = {
+ "perfherder": True,
+ "perfherder-stats": True,
+ "perfherder-prefix": "",
+ "perfherder-metrics": [metric_fields("firstPaint"), metric_fields("resource")],
+ "perfherder-simplify-names": True,
+ }
+
+ metrics, metadata, env = setup_env(options)
+
+ with temp_file() as output:
+ env.set_arg("output", output)
+ with metrics as m, silence():
+ m(metadata)
+ output_file = metadata.get_output()
+ with open(output_file) as f:
+ output = json.loads(f.read())
+
+ # Check some metadata
+ assert output["application"]["name"] == "firefox"
+ assert output["framework"]["name"] == "mozperftest"
+
+ # Check some numbers in our data
+ assert len(output["suites"]) == 1
+ assert "value" not in output["suites"][0]
+ assert any(r > 0 for r in output["suites"][0]["subtests"][0]["replicates"])
+
+ # In this case, some metrics will be called "median", "mean", etc.
+ # since those are the simplifications of the first statistics entries
+ # that were found.
+ assert not all(
+ [
+ "firstPaint" in subtest["name"]
+ or "duration" in subtest["name"]
+ or "count" in subtest["name"]
+ for subtest in output["suites"][0]["subtests"]
+ ]
+ )
+
+ found_all = {"firstPaint": False, "count": False, "duration": False}
+ for subtest in output["suites"][0]["subtests"]:
+ if subtest["name"] in found_all:
+ found_all[subtest["name"]] = True
+ continue
+
+ for entry, value in found_all.items():
+ assert found_all[entry], f"Failed finding metric simplification for {entry}"
+
+ # Only a portion of the metrics should still have statistics in
+ # their name due to a naming conflict that only emits a warning
+ assert (
+ len(
+ [
+ subtest
+ for subtest in output["suites"][0]["subtests"]
+ if "statistics" in subtest["name"]
+ ]
+ )
+ == 18
+ )
+ assert (
+ len(
+ [
+ subtest
+ for subtest in output["suites"][0]["subtests"]
+ if "statistics" not in subtest["name"]
+ ]
+ )
+ == 12
+ )
+
+
+def test_perfherder_with_extra_options():
+ options = {
+ "perfherder": True,
+ "perfherder-stats": True,
+ "perfherder-prefix": "",
+ "perfherder-metrics": [
+ metric_fields("name:firstPaint,extraOptions:['option']"),
+ metric_fields("name:resource,extraOptions:['second-option']"),
+ ],
+ }
+
+ metrics, metadata, env = setup_env(options)
+
+ with temp_file() as output:
+ env.set_arg("output", output)
+ with metrics as m, silence():
+ m(metadata)
+ output_file = metadata.get_output()
+ with open(output_file) as f:
+ output = json.loads(f.read())
+
+ assert len(output["suites"]) == 1
+ assert sorted(output["suites"][0]["extraOptions"]) == sorted(
+ ["option", "second-option"]
+ )
+
+
+def test_perfherder_with_alerting():
+ options = {
+ "perfherder": True,
+ "perfherder-stats": True,
+ "perfherder-prefix": "",
+ "perfherder-metrics": [
+ metric_fields("name:firstPaint,extraOptions:['option']"),
+ metric_fields("name:resource,shouldAlert:True"),
+ ],
+ }
+
+ metrics, metadata, env = setup_env(options)
+
+ with temp_file() as output:
+ env.set_arg("output", output)
+ with metrics as m, silence():
+ m(metadata)
+ output_file = metadata.get_output()
+ with open(output_file) as f:
+ output = json.loads(f.read())
+
+ assert len(output["suites"]) == 1
+ assert sorted(output["suites"][0]["extraOptions"]) == sorted(["option"])
+ assert all(
+ [
+ subtest["shouldAlert"]
+ for subtest in output["suites"][0]["subtests"]
+ if "resource" in subtest["name"]
+ ]
+ )
+ assert not all(
+ [
+ subtest["shouldAlert"]
+ for subtest in output["suites"][0]["subtests"]
+ if "firstPaint" in subtest["name"]
+ ]
+ )
+
+
+def test_perfherder_with_subunits():
+ options = {
+ "perfherder": True,
+ "perfherder-stats": True,
+ "perfherder-prefix": "",
+ "perfherder-metrics": [
+ metric_fields("name:firstPaint,extraOptions:['option']"),
+ metric_fields("name:resource,shouldAlert:True,unit:a-unit"),
+ ],
+ }
+
+ metrics, metadata, env = setup_env(options)
+
+ with temp_file() as output:
+ env.set_arg("output", output)
+ with metrics as m, silence():
+ m(metadata)
+ output_file = metadata.get_output()
+ with open(output_file) as f:
+ output = json.loads(f.read())
+
+ assert len(output["suites"]) == 1
+ assert all(
+ [
+ subtest["unit"] == "a-unit"
+ for subtest in output["suites"][0]["subtests"]
+ if "resource" in subtest["name"]
+ ]
+ )
+ assert all(
+ [
+ subtest["unit"] == "ms"
+ for subtest in output["suites"][0]["subtests"]
+ if "firstPaint" in subtest["name"]
+ ]
+ )
+
+
+def test_perfherder_with_supraunits():
+ options = {
+ "perfherder": True,
+ "perfherder-stats": True,
+ "perfherder-prefix": "",
+ "perfherder-metrics": [
+ metric_fields("name:browsertime,unit:new-unit"),
+ metric_fields("name:firstPaint,extraOptions:['option']"),
+ metric_fields("name:resource,shouldAlert:True,unit:a-unit"),
+ ],
+ }
+
+ metrics, metadata, env = setup_env(options)
+
+ with temp_file() as output:
+ env.set_arg("output", output)
+ with metrics as m, silence():
+ m(metadata)
+ output_file = metadata.get_output()
+ with open(output_file) as f:
+ output = json.loads(f.read())
+
+ assert len(output["suites"]) == 1
+ assert output["suites"][0]["unit"] == "new-unit"
+ assert all(
+ [
+ subtest["unit"] == "a-unit"
+ for subtest in output["suites"][0]["subtests"]
+ if "resource" in subtest["name"]
+ ]
+ )
+ assert all(
+ [
+ subtest["unit"] == "new-unit"
+ for subtest in output["suites"][0]["subtests"]
+ if "firstPaint" in subtest["name"]
+ ]
+ )
+
+
+def test_perfherder_transforms():
+ options = {
+ "perfherder": True,
+ "perfherder-stats": True,
+ "perfherder-prefix": "",
+ "perfherder-metrics": [metric_fields("name:firstPaint")],
+ "perfherder-transformer": "mozperftest.tests.test_perfherder:PerfherderTransformer",
+ }
+
+ metrics, metadata, env = setup_env(options)
+
+ with temp_file() as output:
+ env.set_arg("output", output)
+ with metrics as m, silence():
+ m(metadata)
+ output_file = metadata.get_output()
+ with open(output_file) as f:
+ output = json.loads(f.read())
+
+ assert len(output["suites"]) == 1
+ assert output["suites"][0]["unit"] == "ms"
+ assert all([subtest["value"] == -1 for subtest in output["suites"][0]["subtests"]])
+ assert "value" in output["suites"][0]
+ assert output["suites"][0]["value"] == 0
+
+
+def test_perfherder_logcat():
+ options = {
+ "perfherder": True,
+ "perfherder-prefix": "",
+ "perfherder-metrics": [metric_fields("TimeToDisplayed")],
+ }
+
+ metrics, metadata, env = setup_env(options)
+ metadata.clear_results()
+
+ def processor(groups):
+ """Parses the time from a displayed time string into milliseconds."""
+ return (float(groups[0]) * 1000) + float(groups[1])
+
+ re_w_group = r".*Displayed.*org\.mozilla\.fennec_aurora.*\+([\d]+)s([\d]+)ms.*"
+ metadata.add_result(
+ {
+ "results": str(HERE / "data" / "home_activity.txt"),
+ "transformer": "LogCatTimeTransformer",
+ "transformer-options": {
+ "first-timestamp": re_w_group,
+ "processor": processor,
+ "transform-subtest-name": "TimeToDisplayed",
+ },
+ "name": "LogCat",
+ }
+ )
+
+ with temp_file() as output:
+ env.set_arg("output", output)
+ with metrics as m: # , silence():
+ m(metadata)
+ output_file = metadata.get_output()
+ with open(output_file) as f:
+ output = json.loads(f.read())
+
+ # Check some metadata
+ assert output["application"]["name"] == "firefox"
+ assert output["framework"]["name"] == "mozperftest"
+
+ # Check some numbers in our data
+ assert len(output["suites"]) == 1
+ assert len(output["suites"][0]["subtests"]) == 1
+ assert "value" not in output["suites"][0]
+ assert any(r > 0 for r in output["suites"][0]["subtests"][0]["replicates"])
+
+ # Check if only the TimeToDisplayd metric was obtained
+ for subtest in output["suites"][0]["subtests"]:
+ assert "TimeToDisplayed" in subtest["name"]
+
+
+def test_perfherder_validation_failure():
+ options = {"perfherder": True, "perfherder-prefix": ""}
+
+ metrics, metadata, env = setup_env(options)
+
+ # Perfherder schema has limits on min/max data values. Having
+ # no metrics in the options will cause a failure because of the
+ # timestamps that are picked up from browsertime.
+ with pytest.raises(jsonschema.ValidationError):
+ with temp_dir() as output:
+ env.set_arg("output", output)
+ with metrics as m, silence():
+ m(metadata)
+
+
+def test_perfherder_missing_data_failure():
+ options = {"perfherder": True, "perfherder-prefix": ""}
+
+ metrics, metadata, env = setup_env(options)
+ metadata.clear_results()
+
+ with temp_dir() as tmpdir:
+ nodatajson = pathlib.Path(tmpdir, "baddata.json")
+ with nodatajson.open("w") as f:
+ json.dump({"bad data": "here"}, f)
+
+ metadata.add_result({"results": str(nodatajson), "name": "browsertime"})
+
+ with pytest.raises(PerfherderValidDataError):
+ with temp_file() as output:
+ env.set_arg("output", output)
+ with metrics as m, silence():
+ m(metadata)
+
+
+def test_perfherder_metrics_filtering():
+ options = {
+ "perfherder": True,
+ "perfherder-prefix": "",
+ "perfherder-metrics": [metric_fields("I shouldn't match a metric")],
+ }
+
+ metrics, metadata, env = setup_env(options)
+ metadata.clear_results()
+
+ with temp_dir() as tmpdir:
+ nodatajson = pathlib.Path(tmpdir, "nodata.json")
+ with nodatajson.open("w") as f:
+ json.dump({}, f)
+
+ metadata.add_result({"results": str(nodatajson), "name": "browsertime"})
+
+ with temp_dir() as output:
+ env.set_arg("output", output)
+ with metrics as m, silence():
+ m(metadata)
+
+ assert not pathlib.Path(output, "perfherder-data.json").exists()
+
+
+def test_perfherder_exlude_stats():
+ options = {
+ "perfherder": True,
+ "perfherder-prefix": "",
+ "perfherder-metrics": [metric_fields("firstPaint")],
+ }
+
+ metrics, metadata, env = setup_env(options)
+
+ with temp_file() as output:
+ env.set_arg("output", output)
+ with metrics as m, silence():
+ m(metadata)
+ output_file = metadata.get_output()
+ with open(output_file) as f:
+ output = json.loads(f.read())
+
+ # Check some numbers in our data
+ assert len(output["suites"]) == 1
+ assert len(output["suites"][0]["subtests"]) == 1
+ assert "value" not in output["suites"][0]
+ assert any(r > 0 for r in output["suites"][0]["subtests"][0]["replicates"])
+
+ # Check if only firstPaint metric was obtained with 2 replicates
+ assert len(output["suites"][0]["subtests"][0]["replicates"]) == 2
+ assert (
+ "browserScripts.timings.firstPaint"
+ == output["suites"][0]["subtests"][0]["name"]
+ )
+
+
+def test_perfherder_app_name():
+ options = {
+ "perfherder": True,
+ "perfherder-prefix": "",
+ "perfherder-app": "fenix",
+ "perfherder-metrics": [metric_fields("firstPaint")],
+ }
+
+ metrics, metadata, env = setup_env(options)
+
+ with temp_file() as output:
+ env.set_arg("output", output)
+ with metrics as m, silence():
+ m(metadata)
+ output_file = metadata.get_output()
+ with open(output_file) as f:
+ output = json.loads(f.read())
+
+ # Make sure that application setting is correct
+ assert output["application"]["name"] == "fenix"
+ assert "version" not in output["application"]
+
+
+def test_perfherder_split_by():
+ options = {
+ "perfherder": True,
+ "perfherder-prefix": "",
+ "perfherder-app": "fenix",
+ "perfherder-metrics": [metric_fields("firstPaint")],
+ "perfherder-split-by": "browserScripts.pageinfo.url",
+ }
+
+ metrics, metadata, env = setup_env(options)
+
+ with temp_file() as output:
+ env.set_arg("output", output)
+ with metrics as m, silence():
+ m(metadata)
+ output_file = metadata.get_output()
+ with open(output_file) as f:
+ output = json.loads(f.read())
+
+ # Sanity check
+ assert len(output["suites"]) == 1
+
+ # We should have 2 subtests (1 per URL)
+ assert len(output["suites"][0]["subtests"]) == 2
+
+ # Check to make sure that they were properly split
+ names = [subtest["name"] for subtest in output["suites"][0]["subtests"]]
+ assert sorted(names) == [
+ "browserScripts.timings.firstPaint https://www.mozilla.org/en-US/",
+ "browserScripts.timings.firstPaint https://www.sitespeed.io/",
+ ]
+ for i in range(2):
+ assert len(output["suites"][0]["subtests"][i]["replicates"]) == 1
+
+
+def test_perfherder_bad_app_name():
+ options = {
+ "perfherder": True,
+ "perfherder-prefix": "",
+ "perfherder-app": "this is not an app",
+ "perfherder-metrics": [metric_fields("firstPaint")],
+ }
+
+ metrics, metadata, env = setup_env(options)
+
+ # This will raise an error because the options method
+ # we use in tests skips the `choices` checks.
+ with pytest.raises(jsonschema.ValidationError):
+ with temp_file() as output:
+ env.set_arg("output", output)
+ with metrics as m, silence():
+ m(metadata)
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozperftest/mozperftest/tests/test_perftestetl.py b/python/mozperftest/mozperftest/tests/test_perftestetl.py
new file mode 100644
index 0000000000..2c08179293
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/test_perftestetl.py
@@ -0,0 +1,106 @@
+import json
+import pathlib
+
+import mozunit
+import pytest
+
+from mozperftest.metrics.notebook.constant import Constant
+from mozperftest.metrics.notebook.transformer import Transformer
+from mozperftest.metrics.notebook.transforms.single_json import SingleJsonRetriever
+
+
+def test_init(ptetls):
+ for ptetl in ptetls.values():
+ assert isinstance(ptetl.fmt_data, dict)
+ assert isinstance(ptetl.file_groups, dict)
+ assert isinstance(ptetl.config, dict)
+ assert isinstance(ptetl.sort_files, bool)
+ assert isinstance(ptetl.const, Constant)
+ assert isinstance(ptetl.transformer, Transformer)
+
+
+def test_parse_file_grouping(ptetls):
+ def _check_files_created(ptetl, expected_files):
+ actual_files = set(ptetl.parse_file_grouping(expected_files))
+ expected_files = set(expected_files)
+
+ # Check all parsed files are regular files.
+ assert all([pathlib.Path(file).is_file for file in actual_files])
+ # Check parse_file_grouping function returns correct result.
+ assert actual_files - expected_files == set()
+
+ # If file_grouping is a list of files.
+ ptetl = ptetls["ptetl_list"]
+ expected_files = ptetl.file_groups["group_1"]
+ _check_files_created(ptetl, expected_files)
+
+ # If file_grouping is a directory string.
+ ptetl = ptetls["ptetl_str"]
+ expected_path = ptetl.file_groups["group_1"]
+ expected_files = [
+ f.resolve().as_posix() for f in pathlib.Path(expected_path).iterdir()
+ ]
+ _check_files_created(ptetl, expected_files)
+
+
+def test_process(ptetls, files):
+ # Temporary resource files.
+ files, output = files["resources"], files["output"]
+ file_1 = files["file_1"]
+ file_2 = files["file_2"]
+
+ # Create expected output.
+ expected_output = [
+ {
+ "data": [
+ {"value": 101, "xaxis": 1, "file": file_1},
+ {"value": 102, "xaxis": 1, "file": file_1},
+ {"value": 103, "xaxis": 1, "file": file_1},
+ {"value": 201, "xaxis": 2, "file": file_2},
+ {"value": 202, "xaxis": 2, "file": file_2},
+ {"value": 203, "xaxis": 2, "file": file_2},
+ ],
+ "name": "group_1",
+ "subtest": "browserScripts.timings.firstPaint",
+ }
+ ]
+
+ ptetl = ptetls["ptetl_str"]
+
+ # Set a custom transformer.
+ ptetl.transformer = Transformer([], SingleJsonRetriever())
+
+ # Create expected result.
+ expected_result = {
+ "data": expected_output,
+ "file-output": output,
+ }
+
+ # Check return value.
+ actual_result = ptetl.process()
+ assert actual_result == expected_result
+
+ # Check output file.
+ with pathlib.Path(output).open() as f:
+ actual_output = json.load(f)
+
+ assert expected_output == actual_output
+
+
+def test_process_fail_artifact_downloading(ptetls, files):
+ ptetl = ptetls["ptetl_list"]
+ ptetl.file_groups = {"group-name": {"artifact_downloader_setting": False}}
+
+ # Set a custom transformer.
+ ptetl.transformer = Transformer([], SingleJsonRetriever())
+ with pytest.raises(Exception) as exc_info:
+ ptetl.process()
+
+ assert (
+ str(exc_info.value)
+ == "Artifact downloader tooling is disabled for the time being."
+ )
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozperftest/mozperftest/tests/test_perftestnotebook.py b/python/mozperftest/mozperftest/tests/test_perftestnotebook.py
new file mode 100644
index 0000000000..55c3acabe1
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/test_perftestnotebook.py
@@ -0,0 +1,76 @@
+#!/usr/bin/env python
+from pathlib import Path
+from unittest import mock
+
+import mozunit
+import pytest
+
+from mozperftest.metrics.notebook.constant import Constant
+
+
+def test_init(ptnb, standarized_data):
+ assert isinstance(ptnb.data, dict)
+ assert isinstance(ptnb.const, Constant)
+
+
+def test_get_notebook_section(ptnb):
+ func = "scatterplot"
+ with (ptnb.const.here / "notebook-sections" / func).open() as f:
+ assert ptnb.get_notebook_section(func) == f.read()
+
+
+def test_get_notebook_section_unknown_analysis(ptnb):
+ func = "unknown"
+ assert ptnb.get_notebook_section(func) == ""
+
+
+@pytest.mark.parametrize("analysis", [["scatterplot"], None])
+def test_post_to_iodide(ptnb, standarized_data, analysis):
+
+ opener = mock.mock_open()
+
+ def mocked_open(self, *args, **kwargs):
+ return opener(self, *args, **kwargs)
+
+ with mock.patch.object(Path, "open", mocked_open), mock.patch(
+ "mozperftest.metrics.notebook.perftestnotebook.webbrowser.open_new_tab"
+ ) as browser, mock.patch(
+ "mozperftest.metrics.notebook.perftestnotebook.HTTPServer"
+ ) as server:
+ ptnb.post_to_iodide(analysis=analysis)
+
+ list_of_calls = opener.mock_calls
+
+ header_path = ptnb.const.here / "notebook-sections" / "header"
+ assert mock.call(header_path) in list_of_calls
+ index1 = list_of_calls.index(mock.call(header_path))
+ assert list_of_calls[index1 + 2] == mock.call().read()
+
+ template_upload_file_path = ptnb.const.here / "template_upload_file.html"
+ assert mock.call(template_upload_file_path) in list_of_calls
+ index2 = list_of_calls.index(mock.call(template_upload_file_path))
+ assert list_of_calls[index2 + 2] == mock.call().read()
+
+ upload_file_path = ptnb.const.here / "upload_file.html"
+ assert mock.call(upload_file_path, "w") in list_of_calls
+ index3 = list_of_calls.index(mock.call(upload_file_path, "w"))
+ assert list_of_calls[index3 + 2] == mock.call().write("")
+
+ assert index1 < index2 < index3
+
+ if analysis:
+ section_path = ptnb.const.here / "notebook-sections" / analysis[0]
+ assert mock.call(section_path) in list_of_calls
+ index4 = list_of_calls.index(mock.call(section_path))
+ assert index1 < index4 < index2
+ else:
+ assert list_of_calls.count(mock.call().__enter__()) == 3
+
+ browser.assert_called_with(str(upload_file_path))
+ server.assert_has_calls(
+ [mock.call().serve_forever(), mock.call().server_close()]
+ )
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozperftest/mozperftest/tests/test_pingserver.py b/python/mozperftest/mozperftest/tests/test_pingserver.py
new file mode 100644
index 0000000000..aa485f3a19
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/test_pingserver.py
@@ -0,0 +1,38 @@
+#!/usr/bin/env python
+import json
+import sys
+from pathlib import Path
+
+import mozunit
+import requests
+
+from mozperftest.system.pingserver import PingServer
+from mozperftest.tests.support import get_running_env
+from mozperftest.utils import ON_TRY, temp_dir
+
+
+def test_ping_server():
+ if ON_TRY and sys.platform == "darwin":
+ # macos slave in the CI are restricted
+ return
+ ping_data = {"some": "data"}
+ with temp_dir() as output:
+ args = {"verbose": True, "output": output}
+ mach_cmd, metadata, env = get_running_env(**args)
+ layer = PingServer(env, mach_cmd)
+ layer.setup()
+ try:
+ metadata = layer.run(metadata)
+ # simulates a ping
+ requests.post(
+ layer.endpoint + "/submit/something", data=json.dumps(ping_data)
+ )
+ finally:
+ layer.teardown()
+
+ with Path(output, "telemetry.json").open() as f:
+ assert json.loads(f.read()) == [ping_data]
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozperftest/mozperftest/tests/test_profile.py b/python/mozperftest/mozperftest/tests/test_profile.py
new file mode 100644
index 0000000000..fc0bf76eb8
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/test_profile.py
@@ -0,0 +1,52 @@
+#!/usr/bin/env python
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import os
+import tempfile
+from unittest import mock
+
+import mozunit
+
+from mozperftest.system.profile import Profile, ProfileNotFoundError
+from mozperftest.tests.support import get_running_env
+
+
+def test_profile():
+ mach_cmd, metadata, env = get_running_env()
+
+ with Profile(env, mach_cmd) as profile:
+ profile(metadata)
+ profile_dir = env.get_arg("profile-directory")
+ assert os.path.exists(profile_dir)
+
+ assert not os.path.exists(profile_dir)
+
+
+CALLS = [0]
+
+
+def _return_profile(*args, **kw):
+ if CALLS[0] == 0:
+ CALLS[0] = 1
+ raise ProfileNotFoundError()
+
+ tempdir = tempfile.mkdtemp()
+
+ return tempdir
+
+
+@mock.patch("mozperftest.system.profile.get_profile", new=_return_profile)
+def test_conditionedprofile():
+ mach_cmd, metadata, env = get_running_env(profile_conditioned=True)
+
+ with Profile(env, mach_cmd) as profile:
+ profile(metadata)
+ profile_dir = env.get_arg("profile-directory")
+ assert os.path.exists(profile_dir)
+
+ assert not os.path.exists(profile_dir)
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozperftest/mozperftest/tests/test_proxy.py b/python/mozperftest/mozperftest/tests/test_proxy.py
new file mode 100644
index 0000000000..bfe1220e2a
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/test_proxy.py
@@ -0,0 +1,231 @@
+#!/usr/bin/env python
+import json
+import os
+import shutil
+import tempfile
+from unittest import mock
+
+import mozunit
+import pytest
+
+from mozperftest.environment import SYSTEM
+from mozperftest.system.proxy import OutputHandler
+from mozperftest.tests.support import get_running_env
+from mozperftest.utils import silence
+
+here = os.path.abspath(os.path.dirname(__file__))
+example_dump = os.path.join(here, "..", "system", "example.zip")
+
+
+class FakeOutputHandler:
+ def finished(self):
+ pass
+
+ def wait_for_port(self):
+ return 1234
+
+
+class FakeOutputHandlerFail:
+ def finished(self):
+ pass
+
+ def wait_for_port(self):
+ return None
+
+
+class ProcHandler:
+ def __init__(self, *args, **kw):
+ self.args = args
+ self.kw = kw
+ self.pid = 1234
+
+ def wait(self, *args):
+ return
+
+ run = wait
+
+ @property
+ def proc(self):
+ return self
+
+
+class ProcHandlerError:
+ def __init__(self, *args, **kw):
+ self.args = args
+ self.kw = kw
+ self.pid = 1234
+
+ def wait(self, *args):
+ return 1
+
+ run = wait
+
+ @property
+ def proc(self):
+ return self
+
+
+class FakeDevice:
+ def create_socket_connection(self, direction, local, remote):
+ return "A Fake socket"
+
+
+def running_env():
+ return get_running_env(proxy=True)
+
+
+def mock_download_file(url, dest):
+ shutil.copyfile(example_dump, dest)
+
+
+@mock.patch("mozperftest.system.proxy.download_file", mock_download_file)
+@mock.patch("mozperftest.system.proxy.ProcessHandler", new=ProcHandler)
+@mock.patch("mozperftest.system.proxy.OutputHandler", new=FakeOutputHandlerFail)
+@mock.patch("os.kill")
+def test_port_error(killer):
+ mach_cmd, metadata, env = running_env()
+ system = env.layers[SYSTEM]
+ with tempfile.TemporaryDirectory() as tmpdir:
+ recording = os.path.join(tmpdir, "recording.zip")
+ env.set_arg("proxy-mode", "record")
+ env.set_arg("proxy-file", recording)
+
+ with system as proxy, pytest.raises(ValueError) as excinfo, silence():
+ proxy(metadata)
+ assert "Unable to retrieve the port number from mozproxy" in str(excinfo.value)
+
+
+@mock.patch("mozperftest.system.proxy.download_file", mock_download_file)
+@mock.patch("mozperftest.system.proxy.ProcessHandler", new=ProcHandlerError)
+@mock.patch("mozperftest.system.proxy.OutputHandler", new=FakeOutputHandler)
+@mock.patch("os.kill")
+def test_proxy_error(killer):
+ mach_cmd, metadata, env = running_env()
+ system = env.layers[SYSTEM]
+ with tempfile.TemporaryDirectory() as tmpdir:
+ recording = os.path.join(tmpdir, "recording.zip")
+ env.set_arg("proxy-mode", "record")
+ env.set_arg("proxy-file", recording)
+
+ with pytest.raises(ValueError) as excinfo:
+ with system as proxy, silence():
+ proxy(metadata)
+ assert "mozproxy terminated early with return code 1" in str(excinfo.value)
+
+
+@mock.patch("mozperftest.system.proxy.download_file", mock_download_file)
+@mock.patch("mozperftest.system.proxy.ProcessHandler", new=ProcHandler)
+@mock.patch("mozperftest.system.proxy.OutputHandler", new=FakeOutputHandler)
+@mock.patch("os.kill")
+def test_playback_no_file(killer):
+ mach_cmd, metadata, env = running_env()
+ system = env.layers[SYSTEM]
+ env.set_arg("proxy-mode", "playback")
+
+ with system as proxy, pytest.raises(ValueError) as excinfo, silence():
+ proxy(metadata)
+ assert "Proxy file not provided!!" in str(excinfo.value)
+
+
+@mock.patch("mozperftest.system.proxy.download_file", mock_download_file)
+@mock.patch("mozperftest.system.proxy.ProcessHandler", new=ProcHandler)
+@mock.patch("mozperftest.system.proxy.OutputHandler", new=FakeOutputHandler)
+@mock.patch("os.kill")
+def test_playback_no_mode(killer):
+ mach_cmd, metadata, env = running_env()
+ system = env.layers[SYSTEM]
+ env.set_arg("proxy-file", example_dump)
+
+ with system as proxy, pytest.raises(ValueError) as excinfo, silence():
+ proxy(metadata)
+ assert "Proxy mode not provided please provide proxy mode" in str(excinfo.value)
+
+
+@mock.patch("mozperftest.system.proxy.OutputHandler", new=FakeOutputHandler)
+@mock.patch("mozperftest.system.proxy.ProcessHandler", new=ProcHandler)
+@mock.patch("mozperftest.system.proxy.ADBDevice", new=FakeDevice)
+@mock.patch("os.kill")
+def test_android_proxy(killer):
+ mach_cmd, metadata, env = running_env()
+ metadata.flavor = "mobile-browser"
+ system = env.layers[SYSTEM]
+ env.set_arg("proxy-mode", "playback")
+ env.set_arg("proxy-file", example_dump)
+
+ with system as proxy, silence():
+ proxy(metadata)
+
+ browser_prefs = metadata.get_options("browser_prefs")
+ assert browser_prefs["network.proxy.http_port"] == 1234
+
+
+@mock.patch("mozperftest.system.proxy.OutputHandler", new=FakeOutputHandler)
+@mock.patch("mozperftest.system.proxy.ProcessHandler", new=ProcHandler)
+@mock.patch("os.kill")
+def test_replay(killer):
+ mach_cmd, metadata, env = running_env()
+ system = env.layers[SYSTEM]
+ env.set_arg("proxy-mode", "playback")
+ env.set_arg("proxy-file", example_dump)
+
+ with system as proxy, silence():
+ proxy(metadata)
+
+ browser_prefs = metadata.get_options("browser_prefs")
+ assert browser_prefs["network.proxy.http_port"] == 1234
+
+
+@mock.patch("mozperftest.system.proxy.download_file", mock_download_file)
+@mock.patch("mozperftest.system.proxy.ProcessHandler", new=ProcHandler)
+@mock.patch("mozperftest.system.proxy.OutputHandler", new=FakeOutputHandler)
+@mock.patch("os.kill")
+def test_replay_url(killer):
+ mach_cmd, metadata, env = running_env()
+ system = env.layers[SYSTEM]
+ env.set_arg("proxy-mode", "playback")
+ env.set_arg("proxy-file", "http://example.dump")
+
+ with system as proxy, silence():
+ proxy(metadata)
+
+ browser_prefs = metadata.get_options("browser_prefs")
+ assert browser_prefs["network.proxy.http_port"] == 1234
+
+
+@mock.patch("mozperftest.system.proxy.download_file", mock_download_file)
+@mock.patch("mozperftest.system.proxy.ProcessHandler", new=ProcHandler)
+@mock.patch("mozperftest.system.proxy.OutputHandler", new=FakeOutputHandler)
+@mock.patch("os.kill")
+def test_record(killer):
+ mach_cmd, metadata, env = running_env()
+ system = env.layers[SYSTEM]
+ with tempfile.TemporaryDirectory() as tmpdir:
+ recording = os.path.join(tmpdir, "recording.zip")
+ env.set_arg("proxy-mode", "record")
+ env.set_arg("proxy-file", recording)
+
+ with system as proxy, silence():
+ proxy(metadata)
+
+ browser_prefs = metadata.get_options("browser_prefs")
+ assert browser_prefs["network.proxy.http_port"] == 1234
+
+
+@mock.patch("mozperftest.system.proxy.LOG")
+def test_output_handler(logged):
+ hdlr = OutputHandler()
+
+ hdlr(b"")
+ hdlr(b"simple line")
+ hdlr(json.dumps({"not": "expected data"}).encode())
+
+ hdlr.finished()
+ assert hdlr.wait_for_port() is None
+
+ # this catches the port
+ hdlr(json.dumps({"action": "", "message": "Proxy running on port 1234"}).encode())
+ assert hdlr.wait_for_port() == 1234
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozperftest/mozperftest/tests/test_runner.py b/python/mozperftest/mozperftest/tests/test_runner.py
new file mode 100644
index 0000000000..a6581757dd
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/test_runner.py
@@ -0,0 +1,48 @@
+#!/usr/bin/env python
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+from unittest import mock
+
+import mozunit
+import pytest
+from mozbuild.base import MachCommandBase # noqa
+
+from mozperftest.runner import main
+from mozperftest.utils import silence
+
+
+def test_main():
+ with pytest.raises(SystemExit), silence():
+ main(["--help"])
+
+
+def test_tools():
+ with mock.patch(
+ "mozperftest.runner._activate_mach_virtualenv", return_value="fake_path"
+ ) as _:
+ with pytest.raises(SystemExit), silence():
+ main(["tools"])
+
+
+@mock.patch("mozperftest.PerftestToolsArgumentParser")
+def test_side_by_side(arg, patched_mozperftest_tools):
+ with mock.patch(
+ "mozperftest.runner._activate_mach_virtualenv", return_value="fake_path"
+ ) as _, mock.patch(
+ "mozperftest.runner._create_artifacts_dir", return_value="fake_path"
+ ) as _, mock.patch(
+ "mozperftest.runner._save_params", return_value="fake_path"
+ ) as _:
+ main(
+ [
+ "tools",
+ "side-by-side",
+ "-t",
+ "fake-test-name",
+ ]
+ )
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozperftest/mozperftest/tests/test_script.py b/python/mozperftest/mozperftest/tests/test_script.py
new file mode 100644
index 0000000000..3947646fdb
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/test_script.py
@@ -0,0 +1,99 @@
+#!/usr/bin/env python
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import mozunit
+import pytest
+
+from mozperftest.script import (
+ BadOptionTypeError,
+ MissingFieldError,
+ ParseError,
+ ScriptInfo,
+ ScriptType,
+)
+from mozperftest.tests.support import (
+ EXAMPLE_TEST,
+ EXAMPLE_XPCSHELL_TEST,
+ EXAMPLE_XPCSHELL_TEST2,
+ HERE,
+)
+
+
+def check_options(info):
+ assert info["options"]["default"]["perfherder"]
+ assert info["options"]["linux"]["perfherder_metrics"] == [
+ {"name": "speed", "unit": "bps_lin"}
+ ]
+ assert info["options"]["win"]["perfherder_metrics"] == [
+ {"name": "speed", "unit": "bps_win"}
+ ]
+ assert info["options"]["mac"]["perfherder_metrics"] == [
+ {"name": "speed", "unit": "bps_mac"}
+ ]
+
+
+def test_scriptinfo_bt():
+ info = ScriptInfo(EXAMPLE_TEST)
+ assert info["author"] == "N/A"
+ display = str(info)
+ assert "The description of the example test." in display
+ assert info.script_type == ScriptType.browsertime
+ check_options(info)
+
+
+@pytest.mark.parametrize("script", [EXAMPLE_XPCSHELL_TEST, EXAMPLE_XPCSHELL_TEST2])
+def test_scriptinfo_xpcshell(script):
+ info = ScriptInfo(script)
+ assert info["author"] == "N/A"
+
+ display = str(info)
+ assert "The description of the example test." in display
+ assert info.script_type == ScriptType.xpcshell
+ check_options(info)
+
+
+def test_scriptinfo_failure():
+ bad_example = HERE / "data" / "failing-samples" / "perftest_doc_failure_example.js"
+ with pytest.raises(MissingFieldError):
+ ScriptInfo(bad_example)
+
+
+def test_parserror():
+ exc = Exception("original")
+ error = ParseError("script", exc)
+ assert error.exception is exc
+ assert "original" in str(error)
+
+
+def test_update_args():
+ args = {"perfherder_metrics": [{"name": "yey"}]}
+ info = ScriptInfo(EXAMPLE_TEST)
+ new_args = info.update_args(**args)
+
+ # arguments should not be overriden
+ assert new_args["perfherder_metrics"] == [{"name": "yey"}]
+
+ # arguments in platform-specific options should
+ # override default options
+ assert new_args["verbose"]
+
+
+def test_update_args_metrics_list_failure():
+ args = {"perfherder_metrics": "yey"}
+ info = ScriptInfo(EXAMPLE_TEST)
+
+ with pytest.raises(BadOptionTypeError):
+ info.update_args(**args)
+
+
+def test_update_args_metrics_json_failure():
+ args = {"perfherder_metrics": ["yey"]}
+ info = ScriptInfo(EXAMPLE_TEST)
+
+ with pytest.raises(BadOptionTypeError):
+ info.update_args(**args)
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozperftest/mozperftest/tests/test_single_json_transformer.py b/python/mozperftest/mozperftest/tests/test_single_json_transformer.py
new file mode 100644
index 0000000000..9509948760
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/test_single_json_transformer.py
@@ -0,0 +1,80 @@
+#!/usr/bin/env python
+import mozunit
+
+from mozperftest.metrics.notebook.transformer import Transformer
+from mozperftest.metrics.notebook.transforms.single_json import SingleJsonRetriever
+
+
+def test_transform(data):
+ tfm = SingleJsonRetriever()
+
+ expected_result = [
+ {
+ "data": [
+ {"value": 101, "xaxis": 1},
+ {"value": 102, "xaxis": 1},
+ {"value": 103, "xaxis": 1},
+ ],
+ "subtest": "browserScripts.timings.firstPaint",
+ }
+ ]
+
+ actual_result = tfm.transform(data["data_1"])
+
+ assert actual_result == expected_result
+
+
+def test_merge(data):
+ tfm = SingleJsonRetriever()
+ sde = tfm.transform(data["data_1"])
+ sde.extend(tfm.transform(data["data_2"]))
+
+ expected_result = [
+ {
+ "data": [
+ {"value": 101, "xaxis": 1},
+ {"value": 102, "xaxis": 1},
+ {"value": 103, "xaxis": 1},
+ {"value": 201, "xaxis": 2},
+ {"value": 202, "xaxis": 2},
+ {"value": 203, "xaxis": 2},
+ ],
+ "subtest": "browserScripts.timings.firstPaint",
+ }
+ ]
+
+ actual_result = tfm.merge(sde)
+
+ assert actual_result == expected_result
+
+
+def test_process(files):
+ files = files["resources"]
+ file_1 = files["file_1"]
+ file_2 = files["file_2"]
+
+ tfm = Transformer([], SingleJsonRetriever())
+ tfm.files = [file_1, file_2]
+
+ expected_result = [
+ {
+ "data": [
+ {"value": 101, "xaxis": 1, "file": file_1},
+ {"value": 102, "xaxis": 1, "file": file_1},
+ {"value": 103, "xaxis": 1, "file": file_1},
+ {"value": 201, "xaxis": 2, "file": file_2},
+ {"value": 202, "xaxis": 2, "file": file_2},
+ {"value": 203, "xaxis": 2, "file": file_2},
+ ],
+ "subtest": "browserScripts.timings.firstPaint",
+ "name": "group_1",
+ }
+ ]
+
+ actual_result = tfm.process("group_1")
+
+ assert actual_result == expected_result
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozperftest/mozperftest/tests/test_transformer.py b/python/mozperftest/mozperftest/tests/test_transformer.py
new file mode 100644
index 0000000000..b5323eabe6
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/test_transformer.py
@@ -0,0 +1,161 @@
+import mozunit
+import pytest
+from jsonschema import ValidationError
+
+from mozperftest.metrics.exceptions import (
+ NotebookDuplicateTransformsError,
+ NotebookInvalidPathError,
+ NotebookInvalidTransformError,
+)
+from mozperftest.metrics.notebook.transformer import (
+ Transformer,
+ get_transformer,
+ get_transformers,
+)
+from mozperftest.tests.data.perftestetl_plugin import (
+ test_transformer_perftestetl_plugin_1,
+ test_transformer_perftestetl_plugin_2,
+)
+from mozperftest.tests.support import HERE, get_running_env
+
+_, metadata, _ = get_running_env()
+prefix = "PerftestNotebook"
+
+
+def test_init_failure():
+ class TempClass(object):
+ def temp_fun():
+ return 1
+
+ with pytest.raises(NotebookInvalidTransformError):
+ Transformer(custom_transformer=TempClass())
+
+
+def test_files_getter(files):
+ files = files["resources"]
+ assert files == Transformer(files, logger=metadata, prefix=prefix).files
+
+
+def test_files_setter(files):
+ files = files["resources"]
+ files = list(files.values())
+ tfm = Transformer(logger=metadata, prefix=prefix)
+ tfm.files = files
+ assert files == tfm.files
+
+
+def test_files_setter_failure():
+ tfm = Transformer(logger=metadata, prefix=prefix)
+ tfm.files = "fail"
+ assert not tfm.files
+
+
+def test_open_data(data, files):
+ tfm = Transformer(logger=metadata, prefix=prefix)
+
+ files = files["resources"]
+ json_1 = files["file_1"]
+ json_2 = files["file_2"]
+ txt_3 = files["file_3"]
+
+ # If a json file is open.
+ assert data["data_1"] == tfm.open_data(json_1)
+ assert data["data_2"] == tfm.open_data(json_2)
+ # If an other type file is open.
+ assert [str(data["data_3"])] == tfm.open_data(txt_3)
+
+ # Test failure
+ with pytest.raises(Exception):
+ tfm.open_data("fail")
+
+
+def test_jsonschema_valitate_failure(files):
+ class BadTransformer:
+ def transform(self, data):
+ return {"bad data": "bad data"}
+
+ def merge(self, sde):
+ return {"bad data": "bad data"}
+
+ files = files["resources"]
+ file_1 = files["file_1"]
+ file_2 = files["file_2"]
+
+ tfm = Transformer([], BadTransformer(), logger=metadata, prefix=prefix)
+ tfm.files = [file_1, file_2]
+ with pytest.raises(ValidationError):
+ tfm.process("name")
+
+
+def test_get_transformer():
+ path_1 = (
+ HERE
+ / "data"
+ / "perftestetl_plugin"
+ / "test_transformer_perftestetl_plugin_1.py"
+ )
+ assert (
+ get_transformer(path_1.as_posix()).__name__
+ == test_transformer_perftestetl_plugin_1.TestTransformer1.__name__
+ )
+
+ path_2 = (
+ "mozperftest.tests.data.perftestetl_plugin."
+ + "test_transformer_perftestetl_plugin_2:TestTransformer2"
+ )
+ assert (
+ get_transformer(path_2).__name__
+ == test_transformer_perftestetl_plugin_2.TestTransformer2.__name__
+ )
+
+
+def test_get_transformer_failure():
+ path_1 = HERE / "data" / "does-not-exist.py"
+ with pytest.raises(NotebookInvalidPathError):
+ get_transformer(path_1.as_posix())
+
+ path_2 = HERE / "data" / "does-not-exist"
+ with pytest.raises(ImportError):
+ get_transformer(path_2.as_posix())
+
+ path_3 = (
+ "mozperftest.tests.data.perftestetl_plugin."
+ + "test_transformer_perftestetl_plugin_2:TestTransformer3"
+ )
+ with pytest.raises(ImportError):
+ get_transformer(path_3)
+
+ path_4 = (
+ "mozperftest.tests.data.perftestetl_plugin."
+ + "test_transformer_perftestetl_plugin_3:TestTransformer3"
+ )
+ with pytest.raises(ImportError):
+ get_transformer(path_4)
+
+ with pytest.raises(NotebookInvalidTransformError):
+ get_transformer(__file__)
+
+
+def test_get_transformers():
+ dirpath = HERE / "data" / "perftestetl_plugin"
+ tfms = get_transformers(dirpath)
+ assert test_transformer_perftestetl_plugin_1.TestTransformer1.__name__ in tfms
+ assert test_transformer_perftestetl_plugin_2.TestTransformer2.__name__ in tfms
+
+
+def test_get_transformers_failure():
+ dirpath = HERE / "data" / "does-not-exist"
+ with pytest.raises(NotebookInvalidPathError):
+ get_transformers(dirpath)
+
+ dirpath = HERE / "data" / "perftestetl_plugin" / "test_transformer_1.py"
+ with pytest.raises(NotebookInvalidPathError):
+ get_transformers(dirpath)
+
+ dirpath = HERE / "data" / "multiple_transforms_error"
+ with pytest.raises(NotebookDuplicateTransformsError):
+ get_transformers(dirpath)
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozperftest/mozperftest/tests/test_utils.py b/python/mozperftest/mozperftest/tests/test_utils.py
new file mode 100644
index 0000000000..75c4a431a6
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/test_utils.py
@@ -0,0 +1,233 @@
+#!/usr/bin/env python
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import shutil
+import sys
+from datetime import date, timedelta
+from pathlib import Path
+from subprocess import CalledProcessError
+from unittest import mock
+
+import mozunit
+import pytest
+
+from mozperftest.tests.support import EXAMPLE_TESTS_DIR, requests_content, temp_file
+from mozperftest.utils import (
+ build_test_list,
+ checkout_python_script,
+ convert_day,
+ create_path,
+ download_file,
+ get_multi_tasks_url,
+ get_output_dir,
+ get_revision_namespace_url,
+ host_platform,
+ install_package,
+ load_class,
+ silence,
+)
+
+
+def test_silence():
+ with silence():
+ print("HIDDEN")
+
+
+def test_host_platform():
+ plat = host_platform()
+
+ # a bit useless... :)
+ if sys.platform.startswith("darwin"):
+ assert plat == "darwin"
+ else:
+ if sys.maxsize > 2 ** 32:
+ assert "64" in plat
+ else:
+ assert "64" not in plat
+
+
+def get_raise(*args, **kw):
+ raise Exception()
+
+
+@mock.patch("mozperftest.utils.requests.get", new=get_raise)
+def test_download_file_fails():
+ with temp_file() as target, silence(), pytest.raises(Exception):
+ download_file("http://I don't exist", Path(target), retry_sleep=0.1)
+
+
+@mock.patch("mozperftest.utils.requests.get", new=requests_content())
+def test_download_file_success():
+ with temp_file() as target:
+ download_file("http://content", Path(target), retry_sleep=0.1)
+ with open(target) as f:
+ assert f.read() == "some content"
+
+
+def _req(package):
+ class Req:
+ location = "nowhere"
+
+ @property
+ def satisfied_by(self):
+ return self
+
+ def check_if_exists(self, **kw):
+ pass
+
+ return Req()
+
+
+@mock.patch("pip._internal.req.constructors.install_req_from_line", new=_req)
+def test_install_package():
+ vem = mock.Mock()
+ vem.bin_path = "someplace"
+ with mock.patch("subprocess.check_call") as mock_check_call:
+ assert install_package(vem, "foo")
+ mock_check_call.assert_called_once_with(
+ [
+ vem.python_path,
+ "-m",
+ "pip",
+ "install",
+ "foo",
+ ]
+ )
+
+
+@mock.patch("pip._internal.req.constructors.install_req_from_line", new=_req)
+def test_install_package_failures():
+ vem = mock.Mock()
+ vem.bin_path = "someplace"
+
+ def check_call(*args):
+ raise CalledProcessError(1, "")
+
+ with pytest.raises(CalledProcessError):
+ with mock.patch("subprocess.check_call", new=check_call):
+ install_package(vem, "foo")
+
+ # we can also absorb the error, and just return False
+ assert not install_package(vem, "foo", ignore_failure=True)
+
+
+@mock.patch("mozperftest.utils.requests.get", requests_content())
+def test_build_test_list():
+ tests = [EXAMPLE_TESTS_DIR, "https://some/location/perftest_one.js"]
+ try:
+ files, tmp_dir = build_test_list(tests)
+ assert len(files) == 2
+ finally:
+ shutil.rmtree(tmp_dir)
+
+
+def test_convert_day():
+ day = "2020.06.08"
+ assert convert_day(day) == day
+ with pytest.raises(ValueError):
+ convert_day("2020-06-08")
+ today = date.today()
+ assert convert_day("today"), today.strftime("%Y.%m.%d")
+ yesterday = today - timedelta(days=1)
+ assert convert_day("yesterday") == yesterday.strftime("%Y.%m.%d")
+
+
+def test_revision_namespace_url():
+ route = "FakeBuildRoute"
+ day = "2020.06.08"
+ buildurl = get_revision_namespace_url(route, day=day)
+ assert day in buildurl and route in buildurl
+ assert buildurl.endswith(".revision")
+
+
+def test_multibuild_url():
+ route = "FakeBuildRoute"
+ day = "2020.06.08"
+ revision = "deadbeef"
+ buildurl = get_multi_tasks_url(route, revision, day=day)
+ assert all(item in buildurl for item in (route, day, revision))
+
+ with mock.patch("mozperftest.utils.date") as mockeddate:
+ mockeddate.today.return_value = mockeddate
+ mockeddate.strftime.return_value = "2020.07.09"
+ buildurl = get_multi_tasks_url(route, revision, day="today")
+ assert "2020.07.09" in buildurl and route in buildurl
+
+ with mock.patch("mozperftest.utils.timedelta"):
+ mockeddate.__sub__.return_value = mockeddate
+ mockeddate.strftime.return_value = "2020.08.09"
+ buildurl = get_multi_tasks_url(route, revision)
+ assert "2020.08.09" in buildurl and route in buildurl
+
+
+class ImportMe:
+ pass
+
+
+def test_load_class():
+
+ with pytest.raises(ImportError):
+ load_class("notimportable")
+
+ with pytest.raises(ImportError):
+ load_class("notim:por:table")
+
+ with pytest.raises(ImportError):
+ load_class("notim:portable")
+
+ with pytest.raises(ImportError):
+ load_class("mozperftest.tests.test_utils:NOEXIST")
+
+ klass = load_class("mozperftest.tests.test_utils:ImportMe")
+ assert klass is ImportMe
+
+
+class _Venv:
+ python_path = sys.executable
+
+
+def test_checkout_python_script():
+ with silence() as captured:
+ assert checkout_python_script(_Venv(), "lib2to3", ["--help"])
+
+ stdout, stderr = captured
+ stdout.seek(0)
+ assert stdout.read() == "=> lib2to3 [OK]\n"
+
+
+def test_run_python_script_failed():
+ with silence() as captured:
+ assert not checkout_python_script(_Venv(), "nothing")
+
+ stdout, stderr = captured
+ stdout.seek(0)
+ assert stdout.read().endswith("[FAILED]\n")
+
+
+def test_get_output_dir():
+ with temp_file() as temp_dir:
+ output_dir = get_output_dir(temp_dir)
+ assert output_dir.exists()
+ assert output_dir.is_dir()
+
+ output_dir = get_output_dir(output=temp_dir, folder="artifacts")
+ assert output_dir.exists()
+ assert output_dir.is_dir()
+ assert "artifacts" == output_dir.parts[-1]
+
+
+def test_create_path():
+ path = Path("path/doesnt/exist").resolve()
+ if path.exists():
+ shutil.rmtree(path.parent.parent)
+ try:
+ path = create_path(path)
+
+ assert path.exists()
+ finally:
+ shutil.rmtree(path.parent.parent)
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozperftest/mozperftest/tests/test_visualmetrics.py b/python/mozperftest/mozperftest/tests/test_visualmetrics.py
new file mode 100644
index 0000000000..6a017c00e3
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/test_visualmetrics.py
@@ -0,0 +1,103 @@
+#!/usr/bin/env python
+import json
+import os
+from unittest import mock
+
+import mozunit
+import pytest
+
+from mozperftest.environment import METRICS
+from mozperftest.tests.support import (
+ BT_DATA_VIDEO,
+ EXAMPLE_TEST,
+ get_running_env,
+ temp_file,
+)
+
+VM_RES = {
+ "SpeedIndex": 1031,
+ "FirstVisualChange": 533,
+ "LastVisualChange": 3166,
+ "VisualProgress": (
+ "0=0, 533=63, 700=63, 733=63, 900=63, 933=63, 1233=54,"
+ "1333=54, 1366=56, 1500=56, 1633=96, 1800=96, 1933=96,"
+ "2133=96, 2200=96, 2366=96, 2533=96, 2566=96, 2600=96,"
+ "2733=96, 2833=96, 2933=96, 3000=96, 3133=96,3166=100"
+ ),
+ "videoRecordingStart": 0,
+}
+
+
+def get_res(*args, **kw):
+ return json.dumps(VM_RES)
+
+
+def mocked_executable():
+ return ("ok", "ok")
+
+
+@mock.patch("mozperftest.test.browsertime.runner.install_package")
+@mock.patch(
+ "mozperftest.test.noderunner.NodeRunner.verify_node_install", new=lambda x: True
+)
+@mock.patch(
+ "mozperftest.test.browsertime.runner.BrowsertimeRunner._setup_node_packages",
+ new=lambda x, y: None,
+)
+@mock.patch("mozperftest.metrics.visualmetrics.which", new=lambda path: "ok")
+@mock.patch("mozbuild.nodeutil.find_node_executable", new=mocked_executable)
+@mock.patch("subprocess.check_output", new=get_res)
+def test_visual_metrics(device):
+ os.environ["VISUALMETRICS_PY"] = ""
+ mach_cmd, metadata, env = get_running_env(
+ visualmetrics=True,
+ perfherder=True,
+ verbose=True,
+ tests=[EXAMPLE_TEST],
+ )
+ metrics = env.layers[METRICS]
+
+ metadata.add_result({"results": str(BT_DATA_VIDEO.parent), "name": "browsertime"})
+
+ with temp_file() as output:
+ env.set_arg("output", output)
+ with metrics as m:
+ metadata = m(metadata)
+
+ output_file = metadata.get_output()
+ with open(output_file) as f:
+ output = json.loads(f.read())
+
+ # Check some metadata
+ assert output["application"]["name"] == "firefox"
+ visual_metrics = [i["name"] for i in output["suites"][1]["subtests"]]
+ assert "VisualProgress96" in visual_metrics
+
+
+@mock.patch("mozperftest.test.browsertime.runner.install_package")
+@mock.patch(
+ "mozperftest.test.noderunner.NodeRunner.verify_node_install", new=lambda x: True
+)
+@mock.patch(
+ "mozperftest.test.browsertime.runner.BrowsertimeRunner._setup_node_packages",
+ new=lambda x, y: None,
+)
+@mock.patch("mozperftest.metrics.visualmetrics.which", new=lambda path: None)
+def test_visual_metrics_no_ffmpeg(device):
+ os.environ["VISUALMETRICS_PY"] = ""
+ mach_cmd, metadata, env = get_running_env(
+ visualmetrics=True,
+ perfherder=True,
+ verbose=True,
+ tests=[EXAMPLE_TEST],
+ )
+ metrics = env.layers[METRICS]
+ metadata.add_result({"results": str(BT_DATA_VIDEO.parent), "name": "browsertime"})
+
+ with pytest.raises(FileNotFoundError):
+ with metrics as m:
+ metadata = m(metadata)
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozperftest/mozperftest/tests/test_visualtools.py b/python/mozperftest/mozperftest/tests/test_visualtools.py
new file mode 100644
index 0000000000..9e5af583d4
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/test_visualtools.py
@@ -0,0 +1,47 @@
+#!/usr/bin/env python
+import os
+from unittest import mock
+
+import mozunit
+import pytest
+
+from mozperftest.test.browsertime.visualtools import get_dependencies, xvfb
+from mozperftest.utils import temporary_env
+
+
+@mock.patch(
+ "mozperftest.test.browsertime.visualtools.find_executable", new=lambda name: "Xvfb"
+)
+def test_xvfb(*mocked):
+ with temporary_env(DISPLAY="ME"):
+ with mock.patch("subprocess.Popen") as mocked, xvfb():
+ mocked.assert_called()
+ assert os.environ["DISPLAY"] == "ME"
+
+
+@mock.patch(
+ "mozperftest.test.browsertime.visualtools.find_executable", new=lambda name: "Xvfb"
+)
+def test_xvfb_env(*mocked):
+ with temporary_env(DISPLAY=None):
+ with mock.patch("subprocess.Popen") as mocked, xvfb():
+ mocked.assert_called()
+ assert "DISPLAY" not in os.environ
+
+
+@mock.patch(
+ "mozperftest.test.browsertime.visualtools.find_executable", new=lambda name: None
+)
+def test_xvfb_none(*mocked):
+ with pytest.raises(FileNotFoundError), xvfb():
+ pass
+
+
+def test_get_dependencies():
+ # Making sure we get a list on all supported platforms.
+ # If we miss one, this raises a KeyError.
+ get_dependencies()
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozperftest/mozperftest/tests/test_webpagetest.py b/python/mozperftest/mozperftest/tests/test_webpagetest.py
new file mode 100644
index 0000000000..1c2c51ca0b
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/test_webpagetest.py
@@ -0,0 +1,271 @@
+import json
+import random
+from unittest import mock
+
+import pytest
+import requests
+
+import mozperftest.test.webpagetest as webpagetest
+from mozperftest.test.webpagetest import (
+ ACCEPTED_STATISTICS,
+ WPT_API_EXPIRED_MESSAGE,
+ WPTBrowserSelectionError,
+ WPTDataProcessingError,
+ WPTExpiredAPIKeyError,
+ WPTInvalidConnectionSelection,
+ WPTInvalidStatisticsError,
+ WPTInvalidURLError,
+ WPTLocationSelectionError,
+ WPTTimeOutError,
+)
+from mozperftest.tests.support import EXAMPLE_WPT_TEST, get_running_env
+
+WPT_METRICS = [
+ "firstContentfulPaint",
+ "timeToContentfulPaint",
+ "visualComplete90",
+ "firstPaint",
+ "visualComplete99",
+ "visualComplete",
+ "SpeedIndex",
+ "bytesIn",
+ "bytesOut",
+ "TTFB",
+ "fullyLoadedCPUms",
+ "fullyLoadedCPUpct",
+ "domElements",
+ "domContentLoadedEventStart",
+ "domContentLoadedEventEnd",
+ "loadEventStart",
+ "loadEventEnd",
+]
+
+
+class WPTTests:
+ def __init__(self, log):
+ self.log = log
+
+ def runTests(self, args):
+ return True
+
+
+def running_env(**kw):
+ return get_running_env(flavor="webpagetest", **kw)
+
+
+def init_placeholder_wpt_data(fvonly=False, invalid_results=False):
+ views = {"firstView": {}}
+ if not fvonly:
+ views["repeatView"] = {}
+ placeholder_data = {
+ "data": {
+ "summary": "websitelink.com",
+ "location": "ec2-us-east-1:Firefox",
+ "testRuns": 3,
+ "successfulFVRuns": 3,
+ "successfulRVRuns": 3,
+ "fvonly": fvonly,
+ "average": views,
+ "standardDeviation": views,
+ "median": views,
+ "runs": {"1": {"firstView": {"browserVersion": 101.1}}},
+ "url": "testurl.ca",
+ },
+ "webPagetestVersion": 21.0,
+ }
+ exclude_metrics = 0 if not invalid_results else 2
+ for metric in WPT_METRICS[exclude_metrics:]:
+ for view in views:
+ for stat in ACCEPTED_STATISTICS:
+ placeholder_data["data"][stat][view][metric] = random.randint(0, 10000)
+ placeholder_data["data"][stat][view][metric] = random.randint(0, 10000)
+ placeholder_data["data"][stat][view][metric] = random.randint(0, 10000)
+ return placeholder_data
+
+
+def init_mocked_request(
+ status_code, WPT_test_status_code=200, WPT_test_status_text="Ok", **kwargs
+):
+ mock_data = {
+ "data": {
+ "ec2-us-east-1": {"PendingTests": {"Queued": 3}, "Label": "California"},
+ "jsonUrl": "mock_test.com",
+ "summary": "Just a pageload test",
+ "url": "testurl.ca",
+ "remaining": 2000,
+ },
+ "statusCode": WPT_test_status_code,
+ "statusText": WPT_test_status_text,
+ }
+ for key, value in kwargs.items():
+ mock_data["data"][key] = value
+ mock_request = requests.Response()
+ mock_request.status_code = status_code
+ mock_request._content = json.dumps(mock_data).encode("utf-8")
+ return mock_request
+
+
+@mock.patch(
+ "mozperftest.test.webpagetest.WebPageTest.get_WPT_results",
+ return_value=init_placeholder_wpt_data(),
+)
+@mock.patch("mozperftest.utils.get_tc_secret", return_value={"wpt_key": "fake_key"})
+@mock.patch(
+ "mozperftest.test.webpagetest.WebPageTest.location_queue", return_value=None
+)
+@mock.patch("requests.get", return_value=init_mocked_request(200))
+@mock.patch("mozperftest.test.webpagetest.WPT_KEY_FILE", "tests/data/WPT_fakekey.txt")
+def test_webpagetest_no_issues_mocked_results(*mocked):
+ mach_cmd, metadata, env = running_env(tests=[str(EXAMPLE_WPT_TEST)])
+ test = webpagetest.WebPageTest(env, mach_cmd)
+ metadata.script["options"]["test_parameters"]["wait_between_requests"] = 1
+ metadata.script["options"]["test_parameters"]["first_view_only"] = 0
+ metadata.script["options"]["test_parameters"]["test_list"] = ["google.ca"]
+ test.run(metadata)
+
+
+@mock.patch("mozperftest.utils.get_tc_secret", return_value={"wpt_key": "fake_key"})
+@mock.patch(
+ "mozperftest.test.webpagetest.WebPageTest.location_queue", return_value=None
+)
+@mock.patch("requests.get", return_value=init_mocked_request(200))
+@mock.patch("mozperftest.test.webpagetest.WPT_KEY_FILE", "tests/data/WPT_fakekey.txt")
+def test_webpagetest_test_invalid_browser(*mocked):
+ mach_cmd, metadata, env = running_env(tests=[str(EXAMPLE_WPT_TEST)])
+ metadata.script["options"]["test_parameters"]["browser"] = "Invalid Browser"
+ test = webpagetest.WebPageTest(env, mach_cmd)
+ with pytest.raises(WPTBrowserSelectionError):
+ test.run(metadata)
+
+
+@mock.patch("mozperftest.utils.get_tc_secret", return_value={"wpt_key": "fake_key"})
+@mock.patch(
+ "mozperftest.test.webpagetest.WebPageTest.location_queue", return_value=None
+)
+@mock.patch("requests.get", return_value=init_mocked_request(200))
+@mock.patch("mozperftest.test.webpagetest.WPT_KEY_FILE", "tests/data/WPT_fakekey.txt")
+def test_webpagetest_test_invalid_connection(*mocked):
+ mach_cmd, metadata, env = running_env(tests=[str(EXAMPLE_WPT_TEST)])
+ test = webpagetest.WebPageTest(env, mach_cmd)
+ metadata.script["options"]["test_parameters"]["connection"] = "Invalid Connection"
+ with pytest.raises(WPTInvalidConnectionSelection):
+ test.run(metadata)
+
+
+@mock.patch("mozperftest.utils.get_tc_secret", return_value={"wpt_key": "fake_key"})
+@mock.patch(
+ "mozperftest.test.webpagetest.WebPageTest.location_queue", return_value=None
+)
+@mock.patch("requests.get", return_value=init_mocked_request(200))
+@mock.patch("mozperftest.test.webpagetest.WPT_KEY_FILE", "tests/data/WPT_fakekey.txt")
+def test_webpagetest_test_invalid_url(*mocked):
+ mach_cmd, metadata, env = running_env(tests=[str(EXAMPLE_WPT_TEST)])
+ test = webpagetest.WebPageTest(env, mach_cmd)
+ metadata.script["options"]["test_list"] = ["InvalidUrl"]
+ with pytest.raises(WPTInvalidURLError):
+ test.run(metadata)
+
+
+@mock.patch("mozperftest.utils.get_tc_secret", return_value={"wpt_key": "fake_key"})
+@mock.patch(
+ "mozperftest.test.webpagetest.WebPageTest.location_queue", return_value=None
+)
+@mock.patch("requests.get", return_value=init_mocked_request(200))
+@mock.patch("mozperftest.test.webpagetest.WPT_KEY_FILE", "tests/data/WPT_fakekey.txt")
+def test_webpagetest_test_invalid_statistic(*mocked):
+ mach_cmd, metadata, env = running_env(tests=[str(EXAMPLE_WPT_TEST)])
+ test = webpagetest.WebPageTest(env, mach_cmd)
+ metadata.script["options"]["test_parameters"]["statistics"] = ["Invalid Statistic"]
+ with pytest.raises(WPTInvalidStatisticsError):
+ test.run(metadata)
+ assert True
+
+
+@mock.patch("requests.get", return_value=init_mocked_request(200))
+@mock.patch("mozperftest.utils.get_tc_secret", return_value={"wpt_key": "fake_key"})
+@mock.patch(
+ "mozperftest.test.webpagetest.WebPageTest.request_with_timeout",
+ return_value={"data": {}},
+)
+@mock.patch("mozperftest.test.webpagetest.WPT_KEY_FILE", "tests/data/WPT_fakekey.txt")
+def test_webpagetest_test_invalid_location(*mocked):
+ mach_cmd, metadata, env = running_env(tests=[str(EXAMPLE_WPT_TEST)])
+ test = webpagetest.WebPageTest(env, mach_cmd)
+ metadata.script["options"]["test_parameters"]["location"] = "Invalid Location"
+ with pytest.raises(WPTLocationSelectionError):
+ test.run(metadata)
+
+
+@mock.patch("requests.get", return_value=init_mocked_request(200, 101))
+@mock.patch("mozperftest.utils.get_tc_secret", return_value={"wpt_key": "fake_key"})
+@mock.patch(
+ "mozperftest.test.webpagetest.WebPageTest.location_queue", return_value=None
+)
+@mock.patch("mozperftest.test.webpagetest.WPT_KEY_FILE", "tests/data/WPT_fakekey.txt")
+def test_webpagetest_test_timeout(*mocked):
+ mach_cmd, metadata, env = running_env(tests=[str(EXAMPLE_WPT_TEST)])
+ test = webpagetest.WebPageTest(env, mach_cmd)
+ metadata.script["options"]["test_parameters"]["timeout_limit"] = 2
+ metadata.script["options"]["test_parameters"]["wait_between_requests"] = 1
+ with pytest.raises(WPTTimeOutError):
+ test.run(metadata)
+ assert True
+
+
+@mock.patch(
+ "requests.get",
+ return_value=init_mocked_request(
+ 200, testRuns=3, successfulFVRuns=3, fvonly=True, location="BadLocation"
+ ),
+)
+@mock.patch("mozperftest.utils.get_tc_secret", return_value={"wpt_key": "fake_key"})
+@mock.patch(
+ "mozperftest.test.webpagetest.WebPageTest.location_queue", return_value=None
+)
+@mock.patch("mozperftest.test.webpagetest.WPT_KEY_FILE", "tests/data/WPT_fakekey.txt")
+def test_webpagetest_test_wrong_browserlocation(*mocked):
+ mach_cmd, metadata, env = running_env(tests=[str(EXAMPLE_WPT_TEST)])
+ metadata.script["options"]["test_list"] = ["google.ca"]
+ metadata.script["options"]["test_parameters"]["wait_between_requests"] = 1
+ test = webpagetest.WebPageTest(env, mach_cmd)
+ with pytest.raises(WPTBrowserSelectionError):
+ test.run(metadata)
+
+
+@mock.patch(
+ "mozperftest.test.webpagetest.WebPageTest.get_WPT_results",
+ return_value=init_placeholder_wpt_data(invalid_results=True),
+)
+@mock.patch("mozperftest.utils.get_tc_secret", return_value={"wpt_key": "fake_key"})
+@mock.patch(
+ "mozperftest.test.webpagetest.WebPageTest.location_queue", return_value=None
+)
+@mock.patch("requests.get", return_value=init_mocked_request(200))
+@mock.patch("mozperftest.test.webpagetest.WPT_KEY_FILE", "tests/data/WPT_fakekey.txt")
+def test_webpagetest_test_metric_not_found(*mocked):
+ mach_cmd, metadata, env = running_env(tests=[str(EXAMPLE_WPT_TEST)])
+ metadata.script["options"]["test_list"] = ["google.ca"]
+ metadata.script["options"]["test_parameters"]["wait_between_requests"] = 1
+ test = webpagetest.WebPageTest(env, mach_cmd)
+ with pytest.raises(WPTDataProcessingError):
+ test.run(metadata)
+
+
+@mock.patch("mozperftest.utils.get_tc_secret", return_value={"wpt_key": "fake_key"})
+@mock.patch(
+ "mozperftest.test.webpagetest.WebPageTest.location_queue", return_value=None
+)
+@mock.patch(
+ "requests.get",
+ return_value=init_mocked_request(
+ 200, WPT_test_status_code=400, WPT_test_status_text=WPT_API_EXPIRED_MESSAGE
+ ),
+)
+@mock.patch("mozperftest.test.webpagetest.WPT_KEY_FILE", "tests/data/WPT_fakekey.txt")
+def test_webpagetest_test_expired_api_key(*mocked):
+ mach_cmd, metadata, env = running_env(tests=[str(EXAMPLE_WPT_TEST)])
+ metadata.script["options"]["test_list"] = ["google.ca"]
+ metadata.script["options"]["test_parameters"]["wait_between_requests"] = 1
+ test = webpagetest.WebPageTest(env, mach_cmd)
+ with pytest.raises(WPTExpiredAPIKeyError):
+ test.run(metadata)
diff --git a/python/mozperftest/mozperftest/tests/test_xpcshell.py b/python/mozperftest/mozperftest/tests/test_xpcshell.py
new file mode 100644
index 0000000000..cf68d02744
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/test_xpcshell.py
@@ -0,0 +1,165 @@
+import json
+import shutil
+from unittest import mock
+
+import pytest
+
+from mozperftest import utils
+from mozperftest.environment import METRICS, SYSTEM, TEST
+from mozperftest.test import xpcshell
+from mozperftest.test.xpcshell import NoPerfMetricsError, XPCShellTestError
+from mozperftest.tests.support import (
+ EXAMPLE_XPCSHELL_TEST,
+ MOZINFO,
+ get_running_env,
+ temp_file,
+)
+
+
+class XPCShellTests:
+ def __init__(self, log):
+ self.log = log
+
+ def runTests(self, args):
+ self.log.suite_start("suite start")
+ self.log.test_start("test start")
+ self.log.process_output("1234", "line", "command")
+ self.log.log_raw({"action": "something"})
+ self.log.log_raw({"action": "log", "message": "message"})
+
+ # these are the metrics sent by the scripts
+ self.log.log_raw(
+ {
+ "action": "log",
+ "message": '"perfMetrics"',
+ "extra": {"metrics1": 1, "metrics2": 2},
+ }
+ )
+
+ self.log.log_raw(
+ {"action": "log", "message": '"perfMetrics"', "extra": {"metrics3": 3}}
+ )
+
+ self.log.test_end("test end")
+ self.log.suite_end("suite end")
+ return True
+
+
+class XPCShellTestsFail(XPCShellTests):
+ def runTests(self, args):
+ return False
+
+
+class XPCShellTestsNoPerfMetrics:
+ def __init__(self, log):
+ self.log = log
+
+ def runTests(self, args):
+ self.log.suite_start("suite start")
+ self.log.test_start("test start")
+ self.log.process_output("1234", "line", "command")
+ self.log.log_raw({"action": "something"})
+ self.log.log_raw({"action": "log", "message": "message"})
+
+ self.log.test_end("test end")
+ self.log.suite_end("suite end")
+ return True
+
+
+def running_env(**kw):
+ return get_running_env(flavor="xpcshell", xpcshell_mozinfo=MOZINFO, **kw)
+
+
+@mock.patch("runxpcshelltests.XPCShellTests", new=XPCShellTests)
+def test_xpcshell_metrics(*mocked):
+ mach_cmd, metadata, env = running_env(tests=[str(EXAMPLE_XPCSHELL_TEST)])
+
+ sys = env.layers[SYSTEM]
+ xpcshell = env.layers[TEST]
+
+ try:
+ with sys as s, xpcshell as x:
+ x(s(metadata))
+ finally:
+ shutil.rmtree(mach_cmd._mach_context.state_dir)
+
+ res = metadata.get_results()
+ assert len(res) == 1
+ assert res[0]["name"] == "test_xpcshell.js"
+ results = res[0]["results"]
+
+ assert results[0]["name"] == "metrics1"
+ assert results[0]["values"] == [1]
+
+
+def _test_xpcshell_fail(err, *mocked):
+ mach_cmd, metadata, env = running_env(tests=[str(EXAMPLE_XPCSHELL_TEST)])
+ sys = env.layers[SYSTEM]
+ xpcshell = env.layers[TEST]
+ try:
+ with sys as s, xpcshell as x, pytest.raises(err):
+ x(s(metadata))
+ finally:
+ shutil.rmtree(mach_cmd._mach_context.state_dir)
+
+
+@mock.patch("runxpcshelltests.XPCShellTests", new=XPCShellTestsFail)
+def test_xpcshell_metrics_fail(*mocked):
+ return _test_xpcshell_fail(XPCShellTestError, mocked)
+
+
+@mock.patch("runxpcshelltests.XPCShellTests", new=XPCShellTestsNoPerfMetrics)
+def test_xpcshell_no_perfmetrics(*mocked):
+ return _test_xpcshell_fail(NoPerfMetricsError, *mocked)
+
+
+@mock.patch("runxpcshelltests.XPCShellTests", new=XPCShellTests)
+def test_xpcshell_perfherder(*mocked):
+ return _test_xpcshell_perfherder(*mocked)
+
+
+@mock.patch("runxpcshelltests.XPCShellTests", new=XPCShellTests)
+def test_xpcshell_perfherder_on_try(*mocked):
+ old = utils.ON_TRY
+ utils.ON_TRY = xpcshell.ON_TRY = not utils.ON_TRY
+
+ try:
+ return _test_xpcshell_perfherder(*mocked)
+ finally:
+ utils.ON_TRY = old
+ xpcshell.ON_TRY = old
+
+
+def _test_xpcshell_perfherder(*mocked):
+ mach_cmd, metadata, env = running_env(
+ perfherder=True, xpcshell_cycles=10, tests=[str(EXAMPLE_XPCSHELL_TEST)]
+ )
+
+ sys = env.layers[SYSTEM]
+ xpcshell = env.layers[TEST]
+ metrics = env.layers[METRICS]
+
+ with temp_file() as output:
+ env.set_arg("output", output)
+ try:
+ with sys as s, xpcshell as x, metrics as m:
+ m(x(s(metadata)))
+ finally:
+ shutil.rmtree(mach_cmd._mach_context.state_dir)
+
+ output_file = metadata.get_output()
+ with open(output_file) as f:
+ output = json.loads(f.read())
+
+ # Check some metadata
+ assert output["application"]["name"] == "firefox"
+ assert output["framework"]["name"] == "mozperftest"
+
+ # Check some numbers in our data
+ assert len(output["suites"]) == 1
+ assert len(output["suites"][0]["subtests"]) == 3
+ assert "value" not in output["suites"][0]
+ assert any(r > 0 for r in output["suites"][0]["subtests"][0]["replicates"])
+
+ for subtest in output["suites"][0]["subtests"]:
+ assert subtest["name"].startswith("metrics")
diff --git a/python/mozperftest/mozperftest/tools.py b/python/mozperftest/mozperftest/tools.py
new file mode 100644
index 0000000000..660d43325e
--- /dev/null
+++ b/python/mozperftest/mozperftest/tools.py
@@ -0,0 +1,139 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import json
+import os
+import shutil
+import tempfile
+from pathlib import Path
+
+
+class PerformanceChangeDetected(Exception):
+ """Raised when a performance change is detected.
+
+ This failure happens with regressions, and improvements. There
+ is no unique failure for each of them.
+
+ TODO: We eventually need to be able to distinguish between these.
+ To do so, we would need to incorporate the "lower_is_better" settings
+ into the detection tooling.
+ """
+
+ pass
+
+
+def run_side_by_side(artifacts, kwargs):
+ from mozperftest_tools.side_by_side import SideBySide
+
+ if "output" in kwargs:
+ kwargs.pop("output")
+
+ tempdir = tempfile.mkdtemp()
+ s = SideBySide(str(tempdir))
+ s.run(**kwargs)
+
+ try:
+ for file in os.listdir(tempdir):
+ if file.startswith("cold-") or file.startswith("warm-"):
+ print(f"Copying from {tempdir}/{file} to {artifacts}")
+ shutil.copy(Path(tempdir, file), artifacts)
+ finally:
+ shutil.rmtree(tempdir)
+
+
+def _gather_task_names(kwargs):
+ task_names = kwargs.get("task_names", [])
+ if len(task_names) == 0:
+ if kwargs.get("test_name", None) is None:
+ raise Exception("No test, or task names given!")
+ if kwargs.get("platform", None) is None:
+ raise Exception("No platform, or task names given!")
+ task_names.append(kwargs["platform"] + "-" + kwargs["test_name"])
+ return task_names
+
+
+def _get_task_splitter(task):
+ splitter = "/opt-"
+ if splitter not in task:
+ splitter = "/" + task.split("/")[-1].split("-")[0] + "-"
+ return splitter
+
+
+def _format_changes_to_str(all_results):
+ changes_detected = None
+ for task, results in all_results.items():
+ for pltype, metrics in results["metrics-with-changes"].items():
+ for metric, changes in metrics.items():
+ for revision, diffs in changes.items():
+ if changes_detected is None:
+ changes_detected = "REVISION PL_TYPE METRIC %-DIFFERENCE\n"
+ changes_detected += f"{revision} {pltype} {metric} {str(diffs)}\n"
+ return changes_detected
+
+
+def run_change_detector(artifacts, kwargs):
+ from mozperftest_tools.regression_detector import ChangeDetector
+
+ tempdir = tempfile.mkdtemp()
+ detector = ChangeDetector(tempdir)
+
+ all_results = {}
+ results_path = Path(artifacts, "results.json")
+ try:
+ for task in _gather_task_names(kwargs):
+ splitter = _get_task_splitter(task)
+
+ platform, test_name = task.split(splitter)
+ platform += splitter[:-1]
+
+ new_test_name = test_name
+ new_platform_name = platform
+ if kwargs["new_test_name"] is not None:
+ new_test_name = kwargs["new_test_name"]
+ if kwargs["new_platform"] is not None:
+ new_platform_name = kwargs["new_platform_name"]
+
+ all_changed_revisions, changed_metric_revisions = detector.detect_changes(
+ test_name=test_name,
+ new_test_name=new_test_name,
+ platform=platform,
+ new_platform=new_platform_name,
+ base_revision=kwargs["base_revision"],
+ new_revision=kwargs["new_revision"],
+ base_branch=kwargs["base_branch"],
+ new_branch=kwargs["new_branch"],
+ # Depth of -1 means auto-computed (everything in between the two given revisions),
+ # None is direct comparisons, anything else uses the new_revision as a start
+ # and goes backwards from there.
+ depth=kwargs.get("depth", None),
+ skip_download=False,
+ overwrite=False,
+ )
+
+ # The task names are unique, so we don't need to worry about
+ # them overwriting each other
+ all_results[task] = {}
+ all_results[task]["revisions-with-changes"] = list(all_changed_revisions)
+ all_results[task]["metrics-with-changes"] = changed_metric_revisions
+
+ changes_detected = _format_changes_to_str(all_results)
+ if changes_detected is not None:
+ print(changes_detected)
+ raise PerformanceChangeDetected(
+ "[ERROR] A significant performance change was detected in your patch! "
+ "See the logs above, or the results.json artifact that was produced for "
+ "more information."
+ )
+
+ finally:
+ shutil.rmtree(tempdir)
+
+ print(f"Saving change detection results to {str(results_path)}")
+ with results_path.open("w") as f:
+ json.dump(all_results, f, indent=4)
+
+
+TOOL_RUNNERS = {
+ "side-by-side": run_side_by_side,
+ "change-detector": run_change_detector,
+}
diff --git a/python/mozperftest/mozperftest/utils.py b/python/mozperftest/mozperftest/utils.py
new file mode 100644
index 0000000000..ff2c0b5faa
--- /dev/null
+++ b/python/mozperftest/mozperftest/utils.py
@@ -0,0 +1,478 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+import contextlib
+import functools
+import importlib
+import logging
+import os
+import shlex
+import shutil
+import subprocess
+import sys
+import tempfile
+from collections import defaultdict
+from datetime import date, datetime, timedelta
+from io import StringIO
+from pathlib import Path
+
+import requests
+from redo import retry
+from requests.packages.urllib3.util.retry import Retry
+
+RETRY_SLEEP = 10
+API_ROOT = "https://firefox-ci-tc.services.mozilla.com/api/index/v1"
+MULTI_REVISION_ROOT = f"{API_ROOT}/namespaces"
+MULTI_TASK_ROOT = f"{API_ROOT}/tasks"
+ON_TRY = "MOZ_AUTOMATION" in os.environ
+DOWNLOAD_TIMEOUT = 30
+
+
+@contextlib.contextmanager
+def silence(layer=None):
+ if layer is None:
+ to_patch = (MachLogger,)
+ else:
+ to_patch = (MachLogger, layer)
+
+ meths = ("info", "debug", "warning", "error", "log")
+ patched = defaultdict(dict)
+
+ oldout, olderr = sys.stdout, sys.stderr
+ sys.stdout, sys.stderr = StringIO(), StringIO()
+
+ def _vacuum(*args, **kw):
+ sys.stdout.write(str(args))
+
+ for obj in to_patch:
+ for meth in meths:
+ if not hasattr(obj, meth):
+ continue
+ patched[obj][meth] = getattr(obj, meth)
+ setattr(obj, meth, _vacuum)
+
+ stdout = stderr = None
+ try:
+ sys.stdout.buffer = sys.stdout
+ sys.stderr.buffer = sys.stderr
+ sys.stdout.fileno = sys.stderr.fileno = lambda: -1
+ try:
+ yield sys.stdout, sys.stderr
+ except Exception:
+ sys.stdout.seek(0)
+ stdout = sys.stdout.read()
+ sys.stderr.seek(0)
+ stderr = sys.stderr.read()
+ raise
+ finally:
+ sys.stdout, sys.stderr = oldout, olderr
+ for obj, meths in patched.items():
+ for name, old_func in meths.items():
+ try:
+ setattr(obj, name, old_func)
+ except Exception:
+ pass
+ if stdout is not None:
+ print(stdout)
+ if stderr is not None:
+ print(stderr)
+
+
+def simple_platform():
+ plat = host_platform()
+
+ if plat.startswith("win"):
+ return "win"
+ elif plat.startswith("linux"):
+ return "linux"
+ else:
+ return "mac"
+
+
+def host_platform():
+ is_64bits = sys.maxsize > 2 ** 32
+
+ if sys.platform.startswith("win"):
+ if is_64bits:
+ return "win64"
+ elif sys.platform.startswith("linux"):
+ if is_64bits:
+ return "linux64"
+ elif sys.platform.startswith("darwin"):
+ return "darwin"
+
+ raise ValueError(f"platform not yet supported: {sys.platform}")
+
+
+class MachLogger:
+ """Wrapper around the mach logger to make logging simpler."""
+
+ def __init__(self, mach_cmd):
+ self._logger = mach_cmd.log
+
+ @property
+ def log(self):
+ return self._logger
+
+ def info(self, msg, name="mozperftest", **kwargs):
+ self._logger(logging.INFO, name, kwargs, msg)
+
+ def debug(self, msg, name="mozperftest", **kwargs):
+ self._logger(logging.DEBUG, name, kwargs, msg)
+
+ def warning(self, msg, name="mozperftest", **kwargs):
+ self._logger(logging.WARNING, name, kwargs, msg)
+
+ def error(self, msg, name="mozperftest", **kwargs):
+ self._logger(logging.ERROR, name, kwargs, msg)
+
+
+def install_package(virtualenv_manager, package, ignore_failure=False):
+ """Installs a package using the virtualenv manager.
+
+ Makes sure the package is really installed when the user already has it
+ in their local installation.
+
+ Returns True on success, or re-raise the error. If ignore_failure
+ is set to True, ignore the error and return False
+ """
+ from pip._internal.req.constructors import install_req_from_line
+
+ # Ensure that we are looking in the right places for packages. This
+ # is required in CI because pip installs in an area that is not in
+ # the search path.
+ venv_site_lib = str(Path(virtualenv_manager.bin_path, "..", "lib").resolve())
+ venv_site_packages = str(
+ Path(
+ venv_site_lib,
+ f"python{sys.version_info.major}.{sys.version_info.minor}",
+ "site-packages",
+ )
+ )
+ if venv_site_packages not in sys.path and ON_TRY:
+ sys.path.insert(0, venv_site_packages)
+
+ req = install_req_from_line(package)
+ req.check_if_exists(use_user_site=False)
+ # already installed, check if it's in our venv
+ if req.satisfied_by is not None:
+ site_packages = os.path.abspath(req.satisfied_by.location)
+ if site_packages.startswith(venv_site_lib):
+ # already installed in this venv, we can skip
+ return True
+ with silence():
+ try:
+ subprocess.check_call(
+ [virtualenv_manager.python_path, "-m", "pip", "install", package]
+ )
+ return True
+ except Exception:
+ if not ignore_failure:
+ raise
+ return False
+
+
+# on try, we create tests packages where tests, like
+# xpcshell tests, don't have the same path.
+# see - python/mozbuild/mozbuild/action/test_archive.py
+# this mapping will map paths when running there.
+# The key is the source path, and the value the ci path
+_TRY_MAPPING = {Path("netwerk"): Path("xpcshell", "tests", "netwerk")}
+
+
+def build_test_list(tests):
+ """Collects tests given a list of directories, files and URLs.
+
+ Returns a tuple containing the list of tests found and a temp dir for tests
+ that were downloaded from an URL.
+ """
+ temp_dir = None
+
+ if isinstance(tests, str):
+ tests = [tests]
+ res = []
+ for test in tests:
+ if test.startswith("http"):
+ if temp_dir is None:
+ temp_dir = tempfile.mkdtemp()
+ target = Path(temp_dir, test.split("/")[-1])
+ download_file(test, target)
+ res.append(str(target))
+ continue
+
+ p_test = Path(test)
+ if ON_TRY and not p_test.resolve().exists():
+ # until we have pathlib.Path.is_relative_to() (3.9)
+ for src_path, ci_path in _TRY_MAPPING.items():
+ src_path, ci_path = str(src_path), str(ci_path)
+ if test.startswith(src_path):
+ p_test = Path(test.replace(src_path, ci_path))
+ break
+
+ test = p_test.resolve()
+
+ if test.is_file():
+ res.append(str(test))
+ elif test.is_dir():
+ for file in test.rglob("perftest_*.js"):
+ res.append(str(file))
+ else:
+ raise FileNotFoundError(str(test))
+ res.sort()
+ return res, temp_dir
+
+
+def download_file(url, target, retry_sleep=RETRY_SLEEP, attempts=3):
+ """Downloads a file, given an URL in the target path.
+
+ The function will attempt several times on failures.
+ """
+
+ def _download_file(url, target):
+ req = requests.get(url, stream=True, timeout=30)
+ target_dir = target.parent.resolve()
+ if str(target_dir) != "":
+ target_dir.mkdir(exist_ok=True)
+
+ with target.open("wb") as f:
+ for chunk in req.iter_content(chunk_size=1024):
+ if not chunk:
+ continue
+ f.write(chunk)
+ f.flush()
+ return target
+
+ return retry(
+ _download_file,
+ args=(url, target),
+ attempts=attempts,
+ sleeptime=retry_sleep,
+ jitter=0,
+ )
+
+
+@contextlib.contextmanager
+def temporary_env(**env):
+ old = {}
+ for key, value in env.items():
+ old[key] = os.environ.get(key)
+ if value is None and key in os.environ:
+ del os.environ[key]
+ elif value is not None:
+ os.environ[key] = value
+ try:
+ yield
+ finally:
+ for key, value in old.items():
+ if value is None and key in os.environ:
+ del os.environ[key]
+ elif value is not None:
+ os.environ[key] = value
+
+
+def convert_day(day):
+ if day in ("yesterday", "today"):
+ curr = date.today()
+ if day == "yesterday":
+ curr = curr - timedelta(1)
+ day = curr.strftime("%Y.%m.%d")
+ else:
+ # verify that the user provided string is in the expected format
+ # if it can't parse it, it'll raise a value error
+ datetime.strptime(day, "%Y.%m.%d")
+
+ return day
+
+
+def get_revision_namespace_url(route, day="yesterday"):
+ """Builds a URL to obtain all the namespaces of a given build route for a single day."""
+ day = convert_day(day)
+ return f"""{MULTI_REVISION_ROOT}/{route}.{day}.revision"""
+
+
+def get_multi_tasks_url(route, revision, day="yesterday"):
+ """Builds a URL to obtain all the tasks of a given build route for a single day.
+
+ If previous is true, then we get builds from the previous day,
+ otherwise, we look at the current day.
+ """
+ day = convert_day(day)
+ return f"""{MULTI_TASK_ROOT}/{route}.{day}.revision.{revision}"""
+
+
+def strtobool(val):
+ if isinstance(val, (bool, int)):
+ return bool(val)
+ if not isinstance(bool, str):
+ raise ValueError(val)
+ val = val.lower()
+ if val in ("y", "yes", "t", "true", "on", "1"):
+ return 1
+ elif val in ("n", "no", "f", "false", "off", "0"):
+ return 0
+ else:
+ raise ValueError("invalid truth value %r" % (val,))
+
+
+@contextlib.contextmanager
+def temp_dir():
+ tempdir = tempfile.mkdtemp()
+ try:
+ yield tempdir
+ finally:
+ shutil.rmtree(tempdir)
+
+
+def load_class(path):
+ """Loads a class given its path and returns it.
+
+ The path is a string of the form `package.module:class` that points
+ to the class to be imported.
+
+ If if can't find it, or if the path is malformed,
+ an ImportError is raised.
+ """
+ if ":" not in path:
+ raise ImportError(f"Malformed path '{path}'")
+ elmts = path.split(":")
+ if len(elmts) != 2:
+ raise ImportError(f"Malformed path '{path}'")
+ mod_name, klass_name = elmts
+ try:
+ mod = importlib.import_module(mod_name)
+ except ModuleNotFoundError:
+ raise ImportError(f"Can't find '{mod_name}'")
+ try:
+ klass = getattr(mod, klass_name)
+ except AttributeError:
+ raise ImportError(f"Can't find '{klass_name}' in '{mod_name}'")
+ return klass
+
+
+def run_script(cmd, cmd_args=None, verbose=False, display=False, label=None):
+ """Used to run a command in a subprocess."""
+ if isinstance(cmd, str):
+ cmd = shlex.split(cmd)
+ try:
+ joiner = shlex.join
+ except AttributeError:
+ # Python < 3.8
+ joiner = subprocess.list2cmdline
+
+ if label is None:
+ label = joiner(cmd)
+ sys.stdout.write(f"=> {label} ")
+ if cmd_args is None:
+ args = cmd
+ else:
+ args = cmd + list(cmd_args)
+ sys.stdout.flush()
+ try:
+ if verbose:
+ sys.stdout.write(f"\nRunning {' '.join(args)}\n")
+ sys.stdout.flush()
+ output = subprocess.check_output(args)
+ if display:
+ sys.stdout.write("\n")
+ for line in output.split(b"\n"):
+ sys.stdout.write(line.decode("utf8") + "\n")
+ sys.stdout.write("[OK]\n")
+ sys.stdout.flush()
+ return True, output
+ except subprocess.CalledProcessError as e:
+ for line in e.output.split(b"\n"):
+ sys.stdout.write(line.decode("utf8") + "\n")
+ sys.stdout.write("[FAILED]\n")
+ sys.stdout.flush()
+ return False, e
+
+
+def run_python_script(
+ virtualenv_manager,
+ module,
+ module_args=None,
+ verbose=False,
+ display=False,
+ label=None,
+):
+ """Used to run a Python script in isolation."""
+ if label is None:
+ label = module
+ cmd = [virtualenv_manager.python_path, "-m", module]
+ return run_script(cmd, module_args, verbose=verbose, display=display, label=label)
+
+
+def checkout_script(cmd, cmd_args=None, verbose=False, display=False, label=None):
+ return run_script(cmd, cmd_args, verbose, display, label)[0]
+
+
+def checkout_python_script(
+ virtualenv_manager,
+ module,
+ module_args=None,
+ verbose=False,
+ display=False,
+ label=None,
+):
+ return run_python_script(
+ virtualenv_manager, module, module_args, verbose, display, label
+ )[0]
+
+
+_URL = (
+ "{0}/secrets/v1/secret/project"
+ "{1}releng{1}gecko{1}build{1}level-{2}{1}conditioned-profiles"
+)
+_WPT_URL = "{0}/secrets/v1/secret/project/perftest/gecko/level-{1}/perftest-login"
+_DEFAULT_SERVER = "https://firefox-ci-tc.services.mozilla.com"
+
+
+@functools.lru_cache()
+def get_tc_secret(wpt=False):
+ """Returns the Taskcluster secret.
+
+ Raises an OSError when not running on try
+ """
+ if not ON_TRY:
+ raise OSError("Not running in Taskcluster")
+ session = requests.Session()
+ retry = Retry(total=5, backoff_factor=0.1, status_forcelist=[500, 502, 503, 504])
+ http_adapter = requests.adapters.HTTPAdapter(max_retries=retry)
+ session.mount("https://", http_adapter)
+ session.mount("http://", http_adapter)
+ secrets_url = _URL.format(
+ os.environ.get("TASKCLUSTER_PROXY_URL", _DEFAULT_SERVER),
+ "%2F",
+ os.environ.get("MOZ_SCM_LEVEL", "1"),
+ )
+ if wpt:
+ secrets_url = _WPT_URL.format(
+ os.environ.get("TASKCLUSTER_PROXY_URL", _DEFAULT_SERVER),
+ os.environ.get("MOZ_SCM_LEVEL", "1"),
+ )
+ res = session.get(secrets_url, timeout=DOWNLOAD_TIMEOUT)
+ res.raise_for_status()
+ return res.json()["secret"]
+
+
+def get_output_dir(output, folder=None):
+ if output is None:
+ raise Exception("Output path was not provided.")
+
+ result_dir = Path(output)
+ if folder is not None:
+ result_dir = Path(result_dir, folder)
+
+ result_dir.mkdir(parents=True, exist_ok=True)
+ result_dir = result_dir.resolve()
+
+ return result_dir
+
+
+def create_path(path):
+ if path.exists():
+ return path
+ else:
+ create_path(path.parent)
+ path.mkdir(exist_ok=True)
+ return path
diff --git a/python/mozperftest/perfdocs/config.yml b/python/mozperftest/perfdocs/config.yml
new file mode 100644
index 0000000000..f9f15e102c
--- /dev/null
+++ b/python/mozperftest/perfdocs/config.yml
@@ -0,0 +1,44 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+---
+name: mozperftest
+manifest: None
+static-only: False
+suites:
+ netwerk/test/perf:
+ description: "Performance tests from the 'network/test/perf' folder."
+ tests:
+ youtube-scroll: ""
+ facebook-scroll: ""
+ cloudflare: ""
+ controlled: ""
+ g-search: ""
+ g-image: ""
+ lq-fetch: ""
+ youtube-noscroll: ""
+ netwerk/test/unit:
+ description: "Performance tests from the 'netwerk/test/unit' folder."
+ tests:
+ http3 raw: ""
+ testing/performance:
+ description: "Performance tests from the 'testing/performance' folder."
+ tests:
+ Politico Link: ""
+ BBC Link: ""
+ JSConf (cold): ""
+ VIEW: ""
+ main: ""
+ Facebook: ""
+ YouTube Link: ""
+ pageload: ""
+ JSConf (warm): ""
+ perfstats: ""
+ webpagetest-firefox: ""
+ webpagetest-chrome: ""
+ android-startup: ""
+
+ browser/base/content/test:
+ description: "Performance tests from the 'browser/base/content/test' folder."
+ tests:
+ Dom-size: ""
diff --git a/python/mozperftest/perfdocs/developing.rst b/python/mozperftest/perfdocs/developing.rst
new file mode 100644
index 0000000000..97125d8729
--- /dev/null
+++ b/python/mozperftest/perfdocs/developing.rst
@@ -0,0 +1,154 @@
+Developing in mozperftest
+=========================
+
+Architecture overview
+---------------------
+
+`mozperftest` implements a mach command that is a thin wrapper on the
+top of `runner.py`, which allows us to run the tool without having to go through
+a mach call. Command arguments are prepared in `argparser.py` and then made
+available for the runner.
+
+The runner creates a `MachEnvironment` instance (see `environment.py`) and a
+`Metadata` instance (see `metadata.py`). These two objects are shared during the
+whole test and used to share data across all parts.
+
+The runner then calls `MachEnvironment.run`, which is in charge of running the test.
+The `MachEnvironment` instance runs a sequence of **layers**.
+
+Layers are classes responsible of one single aspect of a performance test. They
+are organized in three categories:
+
+- **system**: anything that sets up and tears down some resources or services
+ on the system. Existing system layers: **android**, **proxy**
+- **test**: layers that are in charge of running a test to collect metrics.
+ Existing test layers: **browsertime** and **androidlog**
+- **metrics**: all layers that process the metrics to turn them into usable
+ metrics. Existing system layers: **perfherder** and **console**
+
+The MachEnvironment instance collects a series of layers for each category and
+runs them sequentially.
+
+The goal of this organization is to allow adding new performance tests runners
+that will be based on a specific combination of layers. To avoid messy code,
+we need to make sure that each layer represents a single aspect of the process
+and that is completely independent from other layers (besides sharing the data
+through the common environment.)
+
+For instance, we could use `perftest` to run a C++ benchmark by implementing a
+new **test** layer.
+
+
+Layer
+-----
+
+A layer is a class that inherits from `mozperftest.layers.Layer` and implements
+a few methods and class variables.
+
+List of methods and variables:
+
+- `name`: name of the layer (class variable, mandatory)
+- `activated`: boolean to activate by default the layer (class variable, False)
+- `user_exception`: will trigger the `on_exception` hook when an exception occurs
+- `arguments`: dict containing arguments. Each argument is following
+ the `argparser` standard
+- `run(self, medatata)`: called to execute the layer
+- `setup(self)`: called when the layer is about to be executed
+- `teardown(self)`: called when the layer is exiting
+
+Example::
+
+ class EmailSender(Layer):
+ """Sends an email with the results
+ """
+ name = "email"
+ activated = False
+
+ arguments = {
+ "recipient": {
+ "type": str,
+ "default": "tarek@mozilla.com",
+ "help": "Recipient",
+ },
+ }
+
+ def setup(self):
+ self.server = smtplib.SMTP(smtp_server,port)
+
+ def teardown(self):
+ self.server.quit()
+
+ def __call__(self, metadata):
+ self.server.send_email(self.get_arg("recipient"), metadata.results())
+
+
+It can then be added to one of the top functions that are used to create a list
+of layers for each category:
+
+- **mozperftest.metrics.pick_metrics** for the metrics category
+- **mozperftest.system.pick_system** for the system category
+- **mozperftest.test.pick_browser** for the test category
+
+And also added in each `get_layers` function in each of those category.
+The `get_layers` functions are invoked when building the argument parser.
+
+In our example, adding the `EmailSender` layer will add two new options:
+
+- **--email** a flag to activate the layer
+- **--email-recipient**
+
+
+Important layers
+----------------
+
+**mozperftest** can be used to run performance tests against browsers using the
+**browsertime** test layer. It leverages the `browsertime.js
+<https://www.sitespeed.io/documentation/browsertime/>`_ framework and provides
+a full integration into Mozilla's build and CI systems.
+
+Browsertime uses the selenium webdriver client to drive the browser, and
+provides some metrics to measure performance during a user journey.
+
+
+Coding style
+------------
+
+For the coding style, we want to:
+
+- Follow `PEP 257 <https://www.python.org/dev/peps/pep-0257/>`_ for docstrings
+- Avoid complexity as much as possible
+- Use modern Python 3 code (for instance `pathlib` instead of `os.path`)
+- Avoid dependencies on Mozilla build projects and frameworks as much as possible
+ (mozharness, mozbuild, etc), or make sure they are isolated and documented
+
+
+Landing patches
+---------------
+
+.. warning::
+
+ It is mandatory for each patch to have a test. Any change without a test
+ will be rejected.
+
+Before landing a patch for mozperftest, make sure you run `perftest-test`::
+
+ % ./mach perftest-test
+ => black [OK]
+ => flake8 [OK]
+ => remove old coverage data [OK]
+ => running tests [OK]
+ => coverage
+ Name Stmts Miss Cover Missing
+ ------------------------------------------------------------------------------------------
+ mozperftest/metrics/notebook/analyzer.py 29 20 31% 26-36, 39-42, 45-51
+ ...
+ mozperftest/system/proxy.py 37 0 100%
+ ------------------------------------------------------------------------------------------
+ TOTAL 1614 240 85%
+
+ [OK]
+
+The command will run `black`, `flake8` and also make sure that the test coverage has not regressed.
+
+You can use the `-s` option to bypass flake8/black to speed up your workflow, but make
+sure you do a full tests run. You can also pass the name of one single test module.
diff --git a/python/mozperftest/perfdocs/index.rst b/python/mozperftest/perfdocs/index.rst
new file mode 100644
index 0000000000..8c313197b3
--- /dev/null
+++ b/python/mozperftest/perfdocs/index.rst
@@ -0,0 +1,20 @@
+===========
+Mozperftest
+===========
+
+**Mozperftest** can be used to run performance tests.
+
+
+.. toctree::
+
+ running
+ tools
+ writing
+ developing
+ vision
+
+The following documents all testing we have for mozperftest.
+If the owner does not specify the Usage and Description, it's marked N/A.
+
+{documentation}
+If you have any questions, please see this `wiki page <https://wiki.mozilla.org/TestEngineering/Performance#Where_to_find_us>`_.
diff --git a/python/mozperftest/perfdocs/running.rst b/python/mozperftest/perfdocs/running.rst
new file mode 100644
index 0000000000..ed8d9947a9
--- /dev/null
+++ b/python/mozperftest/perfdocs/running.rst
@@ -0,0 +1,51 @@
+Running a performance test
+==========================
+
+You can run `perftest` locally or in Mozilla's CI
+
+Running locally
+---------------
+
+Running a test is as simple as calling it using `mach perftest` in a mozilla-central source
+checkout::
+
+ $ ./mach perftest
+
+The `mach` command will bootstrap the installation of all required tools for the
+framework to run, and display a selection screen to pick a test. Once the
+selection is done, the performance test will run locally.
+
+If you know what test you want to run, you can use its path explicitly::
+
+ $ ./mach perftest perftest_script.js
+
+`mach perftest` comes with numerous options, and the test script should provide
+decent defaults so you don't have to bother with them. If you need to tweak some
+options, you can use `./mach perftest --help` to learn about them.
+
+
+Running in the CI
+-----------------
+
+.. warning::
+
+ If you are looking for how to run performance tests in CI and ended up here, you might want to checkout :ref:`Mach Try Perf`.
+
+.. warning::
+
+ If you plan to run tests often in the CI for android, you should contact the android
+ infra team to make sure there's availability in our pool of devices.
+
+You can run in the CI directly from the `mach perftest` command by adding the `--push-to-try` option
+to your locally working perftest call.
+
+This call will run the fuzzy selector and then send the job into our CI::
+
+ $ ./mach perftest --push-to-try
+
+We have phones on bitbar that can run your android tests. Tests are fairly fast
+to run in the CI because they use sparse profiles. Depending on the
+availability of workers, once the task starts, it takes around 15mn to start
+the test.
+
+
diff --git a/python/mozperftest/perfdocs/tools.rst b/python/mozperftest/perfdocs/tools.rst
new file mode 100644
index 0000000000..4bb975e9f9
--- /dev/null
+++ b/python/mozperftest/perfdocs/tools.rst
@@ -0,0 +1,21 @@
+Running a performance tool
+==========================
+
+You can run `perftest-tools` locally.
+
+Running locally
+---------------
+
+You can run `mach perftest-tools` in a mozilla-central source
+checkout::
+
+ $ ./mach perftest-tools side-by-side --help
+
+The `mach` command will bootstrap the installation of all required dependencies for the
+side-by-side tool to run.
+
+The following arguments are required: `-t/--test-name`, `--base-revision`, `--new-revision`,
+`--base-platform`
+
+The `--help` argument will explain more about what arguments you need to
+run in order to use the tool.
diff --git a/python/mozperftest/perfdocs/vision.rst b/python/mozperftest/perfdocs/vision.rst
new file mode 100644
index 0000000000..7f176cf6a7
--- /dev/null
+++ b/python/mozperftest/perfdocs/vision.rst
@@ -0,0 +1,66 @@
+Vision
+======
+
+The `mozperftest` project was created with the intention to replace all
+existing performance testing frameworks that exist in the mozilla central
+source tree with a single one, and make performance tests a standardized, first-class
+citizen, alongside mochitests and xpcshell tests.
+
+We want to give the ability to any developer to write performance tests in
+their component, both locally and in the CI, exactly like how they would do with
+`xpcshell` tests and `mochitests`.
+
+Historically, we have `Talos`, that provided a lot of different tests, from
+micro-benchmarks to page load tests. From there we had `Raptor`, that was a
+fork of Talos, focusing on page loads only. Then `mach browsertime` was added,
+which was a wrapper around the `browsertime` tool.
+
+All those frameworks besides `mach browsertime` were mainly focusing on working
+well in the CI, and were hard to use locally. `mach browsertime` worked locally but
+not on all platforms and was specific to the browsertime framework.
+
+`mozperftest` currently provides the `mach perftest` command, that will scan
+for all tests that are declared in ini files such as
+https://searchfox.org/mozilla-central/source/netwerk/test/perf/perftest.ini and
+registered under **PERFTESTS_MANIFESTS** in `moz.build` files such as
+https://searchfox.org/mozilla-central/source/netwerk/test/moz.build#17
+
+If you launch `./mach perftest` without any parameters, you will get a full list
+of available tests, and you can pick and run one. Adding `--push-to-try` will
+run it on try.
+
+The framework loads perf tests and read its metadata, that can be declared
+within the test. We have a parser that is currently able to recognize and load
+**xpcshell** tests and **browsertime** tests, and a runner for each one of those.
+
+But the framework can be extended to support more formats. We would like to add
+support for **jsshell** and any other format we have in m-c.
+
+A performance test is a script that perftest runs, and that returns metrics we
+can use. Right now we consume those metrics directly in the console, and
+also in perfherder, but other formats could be added. For instance, there's
+a new **influxdb** output that has been added, to push the data in an **influxdb**
+time series database.
+
+What is important is to make sure performance tests belong to the component it's
+testing in the source tree. We've learned with Talos that grouping all performance
+tests in a single place is problematic because there's no sense of ownership from
+developers once it's added there. It becomes the perf team problem. If the tests
+stay in each component alongside mochitests and xpcshell tests, the component
+maintainers will own and maintain it.
+
+
+Next steps
+----------
+
+We want to rewrite all Talos and Raptor tests into perftest. For Raptor, we need
+to have the ability to use proxy records, which is a work in progress. From there,
+running a **raptor** test will be a simple, one liner browsertime script.
+
+For Talos, we'll need to refactor the existing micro-benchmarks into xpchsell tests,
+and if that does not suffice, create a new runner.
+
+For JS benchmarks, once the **jsshell** runner is added into perftest, it will be
+straightforward.
+
+
diff --git a/python/mozperftest/perfdocs/writing.rst b/python/mozperftest/perfdocs/writing.rst
new file mode 100644
index 0000000000..14764b4a1f
--- /dev/null
+++ b/python/mozperftest/perfdocs/writing.rst
@@ -0,0 +1,176 @@
+Performance scripts
+===================
+
+Performance scripts are programs that drive the browser to run a specific
+benchmark (like a page load or a lower level call) and produce metrics.
+
+We support two flavors right now in `perftest` (but it's easy to add
+new ones):
+
+- **xpcshell** a classical xpcshell test, turned into a performance test
+- **browsertime** a browsertime script, which runs a full browser and controls
+ it via a Selenium client.
+
+In order to qualify as performance tests, both flavors require metadata.
+
+For our supported flavors that are both Javascript modules, those are
+provided in a `perfMetadata` mapping variable in the module, or in
+the `module.exports` variable when using Node.
+
+This is the list of fields:
+
+- **owner**: name of the owner (person or team) [mandatory]
+- **author**: author of the test
+- **name**: name of the test [mandatory]
+- **description**: short description [mandatory]
+- **longDescription**: longer description
+- **options**: options used to run the test
+- **supportedBrowsers**: list of supported browsers (or "Any")
+- **supportedPlatforms**: list of supported platforms (or "Any")
+- **tags** a list of tags that describe the test
+
+Tests are registered using tests manifests and the **PERFTESTS_MANIFESTS**
+variable in `moz.build` files - it's good practice to name this file
+`perftest.ini`.
+
+Example of such a file: https://searchfox.org/mozilla-central/source/testing/performance/perftest.ini
+
+
+xpcshell
+--------
+
+`xpcshell` tests are plain xpcshell tests, with two more things:
+
+- the `perfMetadata` variable, as described in the previous section
+- calls to `info("perfMetrics", ...)` to send metrics to the `perftest` framework.
+
+Here's an example of such a metrics call::
+
+ # compute some speed metrics
+ let speed = 12345;
+ info("perfMetrics", { speed });
+
+
+Browsertime
+-----------
+
+With the browsertime layer, performance scenarios are Node modules that
+implement at least one async function that will be called by the framework once
+the browser has started. The function gets a webdriver session and can interact
+with the browser.
+
+You can write complex, interactive scenarios to simulate a user journey,
+and collect various metrics.
+
+Full documentation is available `here <https://www.sitespeed.io/documentation/sitespeed.io/scripting/>`_
+
+The mozilla-central repository has a few performance tests script in
+`testing/performance` and more should be added in components in the future.
+
+By convention, a performance test is prefixed with **perftest_** to be
+recognized by the `perftest` command.
+
+A performance test implements at least one async function published in node's
+`module.exports` as `test`. The function receives two objects:
+
+- **context**, which contains:
+
+ - **options** - All the options sent from the CLI to Browsertime
+ - **log** - an instance to the log system so you can log from your navigation script
+ - **index** - the index of the runs, so you can keep track of which run you are currently on
+ - **storageManager** - The Browsertime storage manager that can help you read/store files to disk
+ - **selenium.webdriver** - The Selenium WebDriver public API object
+ - **selenium.driver** - The instantiated version of the WebDriver driving the current version of the browser
+
+- **command** provides API to interact with the browser. It's a wrapper
+ around the selenium client `Full documentation here <https://www.sitespeed.io/documentation/sitespeed.io/scripting/#commands>`_
+
+
+Below is an example of a test that visits the BBC homepage and clicks on a link.
+
+.. sourcecode:: javascript
+
+ "use strict";
+
+ async function setUp(context) {
+ context.log.info("setUp example!");
+ }
+
+ async function test(context, commands) {
+ await commands.navigate("https://www.bbc.com/");
+
+ // Wait for browser to settle
+ await commands.wait.byTime(10000);
+
+ // Start the measurement
+ await commands.measure.start("pageload");
+
+ // Click on the link and wait for page complete check to finish.
+ await commands.click.byClassNameAndWait("block-link__overlay-link");
+
+ // Stop and collect the measurement
+ await commands.measure.stop();
+ }
+
+ async function tearDown(context) {
+ context.log.info("tearDown example!");
+ }
+
+ module.exports = {
+ setUp,
+ test,
+ tearDown,
+ owner: "Performance Team",
+ test_name: "BBC",
+ description: "Measures pageload performance when clicking on a link from the bbc.com",
+ supportedBrowsers: "Any",
+ supportePlatforms: "Any",
+ };
+
+
+Besides the `test` function, scripts can implement a `setUp` and a `tearDown` function to run
+some code before and after the test. Those functions will be called just once, whereas
+the `test` function might be called several times (through the `iterations` option)
+
+
+Hooks
+-----
+
+A Python module can be used to run functions during a run lifecycle. Available hooks are:
+
+- **before_iterations(args)** runs before everything is started. Gets the args, which
+ can be changed. The **args** argument also contains a **virtualenv** variable that
+ can be used for installing Python packages (e.g. through `install_package <https://searchfox.org/mozilla-central/source/python/mozperftest/mozperftest/utils.py#115-144>`_).
+- **before_runs(env)** runs before the test is launched. Can be used to
+ change the running environment.
+- **after_runs(env)** runs after the test is done.
+- **on_exception(env, layer, exception)** called on any exception. Provides the
+ layer in which the exception occurred, and the exception. If the hook returns `True`
+ the exception is ignored and the test resumes. If the hook returns `False`, the
+ exception is ignored and the test ends immediately. The hook can also re-raise the
+ exception or raise its own exception.
+
+In the example below, the `before_runs` hook is setting the options on the fly,
+so users don't have to provide them in the command line::
+
+ from mozperftest.browser.browsertime import add_options
+
+ url = "'https://www.example.com'"
+
+ common_options = [("processStartTime", "true"),
+ ("firefox.disableBrowsertimeExtension", "true"),
+ ("firefox.android.intentArgument", "'-a'"),
+ ("firefox.android.intentArgument", "'android.intent.action.VIEW'"),
+ ("firefox.android.intentArgument", "'-d'"),
+ ("firefox.android.intentArgument", url)]
+
+
+ def before_runs(env, **kw):
+ add_options(env, common_options)
+
+
+To use this hook module, it can be passed to the `--hooks` option::
+
+ $ ./mach perftest --hooks hooks.py perftest_example.js
+
+
diff --git a/python/mozperftest/setup.cfg b/python/mozperftest/setup.cfg
new file mode 100644
index 0000000000..2a9acf13da
--- /dev/null
+++ b/python/mozperftest/setup.cfg
@@ -0,0 +1,2 @@
+[bdist_wheel]
+universal = 1
diff --git a/python/mozperftest/setup.py b/python/mozperftest/setup.py
new file mode 100644
index 0000000000..75ebab9305
--- /dev/null
+++ b/python/mozperftest/setup.py
@@ -0,0 +1,37 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from setuptools import setup
+
+PACKAGE_NAME = "mozperftest"
+PACKAGE_VERSION = "0.2"
+
+deps = [
+ "regex",
+ "jsonschema",
+ "attr",
+ "mozlog >= 6.0",
+ "mozdevice >= 4.0.0",
+ "mozproxy",
+ "mozinfo",
+ "mozfile",
+ "mozperftest-tools",
+]
+
+setup(
+ name=PACKAGE_NAME,
+ version=PACKAGE_VERSION,
+ description="Mozilla's mach perftest command",
+ classifiers=["Programming Language :: Python :: 3.6"],
+ # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
+ keywords="",
+ author="Mozilla Performance Test Engineering Team",
+ author_email="tools@lists.mozilla.org",
+ url="https://hg.mozilla.org/mozilla-central/file/tip/python/mozperftest",
+ license="MPL",
+ packages=["mozperftest"],
+ include_package_data=True,
+ zip_safe=False,
+ install_requires=deps,
+)
diff --git a/python/mozrelease/.ruff.toml b/python/mozrelease/.ruff.toml
new file mode 100644
index 0000000000..6459b1ce4a
--- /dev/null
+++ b/python/mozrelease/.ruff.toml
@@ -0,0 +1,4 @@
+extend = "../../pyproject.toml"
+
+[isort]
+known-first-party = ["mozrelease"]
diff --git a/python/mozrelease/mozrelease/__init__.py b/python/mozrelease/mozrelease/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/python/mozrelease/mozrelease/__init__.py
diff --git a/python/mozrelease/mozrelease/attribute_builds.py b/python/mozrelease/mozrelease/attribute_builds.py
new file mode 100644
index 0000000000..094c70e1bf
--- /dev/null
+++ b/python/mozrelease/mozrelease/attribute_builds.py
@@ -0,0 +1,214 @@
+#! /usr/bin/env python
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import argparse
+import json
+import logging
+import mmap
+import os
+import shutil
+import struct
+import sys
+import tempfile
+import urllib.parse
+from pathlib import Path
+
+logging.basicConfig(level=logging.INFO, format="%(levelname)s - %(message)s")
+log = logging.getLogger()
+
+
+def write_attribution_data(filepath, data):
+ """Insert data into a prepared certificate in a signed PE file.
+
+ Returns False if the file isn't a valid PE file, or if the necessary
+ certificate was not found.
+
+ This function assumes that somewhere in the given file's certificate table
+ there exists a 1024-byte space which begins with the tag "__MOZCUSTOM__:".
+ The given data will be inserted into the file following this tag.
+
+ We don't bother updating the optional header checksum.
+ Windows doesn't check it for executables, only drivers and certain DLL's.
+ """
+ with open(filepath, "r+b") as file:
+ mapped = mmap.mmap(file.fileno(), 0, access=mmap.ACCESS_WRITE)
+
+ # Get the location of the PE header and the optional header
+ pe_header_offset = struct.unpack("<I", mapped[0x3C:0x40])[0]
+ optional_header_offset = pe_header_offset + 24
+
+ # Look up the magic number in the optional header,
+ # so we know if we have a 32 or 64-bit executable.
+ # We need to know that so that we can find the data directories.
+ pe_magic_number = struct.unpack(
+ "<H", mapped[optional_header_offset : optional_header_offset + 2]
+ )[0]
+ if pe_magic_number == 0x10B:
+ # 32-bit
+ cert_dir_entry_offset = optional_header_offset + 128
+ elif pe_magic_number == 0x20B:
+ # 64-bit. Certain header fields are wider.
+ cert_dir_entry_offset = optional_header_offset + 144
+ else:
+ # Not any known PE format
+ mapped.close()
+ return False
+
+ # The certificate table offset and length give us the valid range
+ # to search through for where we should put our data.
+ cert_table_offset = struct.unpack(
+ "<I", mapped[cert_dir_entry_offset : cert_dir_entry_offset + 4]
+ )[0]
+ cert_table_size = struct.unpack(
+ "<I", mapped[cert_dir_entry_offset + 4 : cert_dir_entry_offset + 8]
+ )[0]
+
+ if cert_table_offset == 0 or cert_table_size == 0:
+ # The file isn't signed
+ mapped.close()
+ return False
+
+ tag = b"__MOZCUSTOM__:"
+ tag_index = mapped.find(
+ tag, cert_table_offset, cert_table_offset + cert_table_size
+ )
+ if tag_index == -1:
+ mapped.close()
+ return False
+
+ # convert to quoted-url byte-string for insertion
+ data = urllib.parse.quote(data).encode("utf-8")
+ mapped[tag_index + len(tag) : tag_index + len(tag) + len(data)] = data
+
+ return True
+
+
+def validate_attribution_code(attribution):
+ log.info("Checking attribution %s" % attribution)
+ return_code = True
+
+ if len(attribution) == 0:
+ log.error("Attribution code has 0 length")
+ return False
+
+ # Set to match https://searchfox.org/mozilla-central/rev/a92ed79b0bc746159fc31af1586adbfa9e45e264/browser/components/attribution/AttributionCode.jsm#24 # noqa
+ MAX_LENGTH = 1010
+ if len(attribution) > MAX_LENGTH:
+ log.error("Attribution code longer than %s chars" % MAX_LENGTH)
+ return_code = False
+
+ # this leaves out empty values like 'foo='
+ params = urllib.parse.parse_qsl(attribution)
+ used_keys = set()
+ for key, value in params:
+ # check for invalid keys
+ if key not in (
+ "source",
+ "medium",
+ "campaign",
+ "content",
+ "experiment",
+ "variation",
+ "ua",
+ "dlsource",
+ ):
+ log.error("Invalid key %s" % key)
+ return_code = False
+
+ # avoid ambiguity from repeated keys
+ if key in used_keys:
+ log.error("Repeated key %s" % key)
+ return_code = False
+ else:
+ used_keys.add(key)
+
+ # TODO the service checks for valid source, should we do that here too ?
+
+ # We have two types of attribution with different requirements:
+ # 1) Partner attribution, which requires a few UTM parameters sets
+ # 2) Attribution of vanilla builds, which only requires `dlsource`
+ #
+ # Perhaps in an ideal world we would check what type of build we're
+ # attributing to make sure that eg: partner builds don't get `dlsource`
+ # instead of what they actually want -- but the likelyhood of that
+ # happening is vanishingly small, so it's probably not worth doing.
+ if "dlsource" not in used_keys:
+ for key in ("source", "medium", "campaign", "content"):
+ if key not in used_keys:
+ return_code = False
+
+ if return_code is False:
+ log.error(
+ "Either 'dlsource' must be provided, or all of: 'source', 'medium', 'campaign', and 'content'. Use '(not set)' if one of the latter is not needed."
+ )
+ return return_code
+
+
+def main():
+ parser = argparse.ArgumentParser(
+ description="Add attribution to Windows installer(s).",
+ epilog="""
+ By default, configuration from envvar ATTRIBUTION_CONFIG is used, with
+ expected format
+ [{"input": "in/abc.exe", "output": "out/def.exe", "attribution": "abcdef"},
+ {"input": "in/ghi.exe", "output": "out/jkl.exe", "attribution": "ghijkl"}]
+ for 1 or more attributions. Or the script arguments may be used for a single attribution.
+
+ The attribution code should be a string which is not url-encoded.
+
+ If command line arguments are used instead, one or more `--input` parameters may be provided.
+ Each will be written to the `--output` directory provided to a file of the same name as the
+ input filename. All inputs will be attributed with the same `--attribution` code.
+ """,
+ formatter_class=argparse.RawDescriptionHelpFormatter,
+ )
+ parser.add_argument(
+ "--input",
+ default=[],
+ action="append",
+ help="Source installer to attribute; may be specified multiple times",
+ )
+ parser.add_argument("--output", help="Location to write the attributed installers")
+ parser.add_argument("--attribution", help="Attribution code")
+ args = parser.parse_args()
+
+ if os.environ.get("ATTRIBUTION_CONFIG"):
+ work = json.loads(os.environ["ATTRIBUTION_CONFIG"])
+ elif args.input and args.output and args.attribution:
+ work = []
+ for i in args.input:
+ fn = os.path.basename(i)
+ work.append(
+ {
+ "input": i,
+ "output": os.path.join(args.output, fn),
+ "attribution": args.attribution,
+ }
+ )
+ else:
+ log.error("No configuration found. Set ATTRIBUTION_CONFIG or pass arguments.")
+ return 1
+
+ cached_code_checks = []
+ for job in work:
+ if job["attribution"] not in cached_code_checks:
+ status = validate_attribution_code(job["attribution"])
+ if status:
+ cached_code_checks.append(job["attribution"])
+ else:
+ log.error("Failed attribution code check")
+ return 1
+
+ with tempfile.TemporaryDirectory() as td:
+ log.info("Attributing installer %s ..." % job["input"])
+ tf = shutil.copy(job["input"], td)
+ if write_attribution_data(tf, job["attribution"]):
+ Path(job["output"]).parent.mkdir(parents=True, exist_ok=True)
+ shutil.move(tf, job["output"])
+ log.info("Wrote %s" % job["output"])
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/python/mozrelease/mozrelease/balrog.py b/python/mozrelease/mozrelease/balrog.py
new file mode 100644
index 0000000000..31418d352e
--- /dev/null
+++ b/python/mozrelease/mozrelease/balrog.py
@@ -0,0 +1,72 @@
+# -*- coding: utf-8 -*-
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+def _generate_show_url(context, entry):
+ url = entry["url"]
+ return {
+ "actions": "showURL",
+ "openURL": url.format(**context),
+ }
+
+
+def _generate_product_details(context, entry):
+ url = entry["url"]
+ return {
+ "detailsURL": url.format(**context),
+ "type": "minor",
+ }
+
+
+_FIELD_TYPES = {
+ "show-url": _generate_show_url,
+ "product-details": _generate_product_details,
+}
+
+
+def _generate_conditions(context, entry):
+ if (
+ "release-types" in entry
+ and context["release-type"] not in entry["release-types"]
+ ):
+ return None
+ if "blob-types" in entry and context["blob-type"] not in entry["blob-types"]:
+ return None
+ if "products" in entry and context["product"] not in entry["products"]:
+ return None
+
+ conditions = {}
+ if "locales" in entry:
+ conditions["locales"] = entry["locales"]
+ if "versions" in entry:
+ conditions["versions"] = [
+ version.format(**context) for version in entry["versions"]
+ ]
+ if "update-channel" in entry:
+ conditions["channels"] = [
+ entry["update-channel"] + suffix
+ for suffix in ("", "-localtest", "-cdntest")
+ ]
+ if "build-ids" in entry:
+ conditions["buildIDs"] = [
+ buildid.format(**context) for buildid in entry["build-ids"]
+ ]
+ return conditions
+
+
+def generate_update_properties(context, config):
+ result = []
+ for entry in config:
+ fields = _FIELD_TYPES[entry["type"]](context, entry)
+ conditions = _generate_conditions(context, entry.get("conditions", {}))
+
+ if conditions is not None:
+ result.append(
+ {
+ "fields": fields,
+ "for": conditions,
+ }
+ )
+ return result
diff --git a/python/mozrelease/mozrelease/buglist_creator.py b/python/mozrelease/mozrelease/buglist_creator.py
new file mode 100644
index 0000000000..8c7b8d0391
--- /dev/null
+++ b/python/mozrelease/mozrelease/buglist_creator.py
@@ -0,0 +1,261 @@
+# -*- coding: utf-8 -*-
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import logging
+import os
+import re
+from operator import itemgetter
+
+import requests
+from mozilla_version.gecko import GeckoVersion
+from taskcluster import Notify, optionsFromEnvironment
+
+BUGLIST_TEMPLATE = "* [Bugs since previous changeset]({url})\n"
+BACKOUT_REGEX = re.compile(r"back(\s?)out|backed out|backing out", re.IGNORECASE)
+BACKOUT_TEMPLATE = "* [Backouts since previous changeset]({url})\n"
+BUGZILLA_BUGLIST_TEMPLATE = "https://bugzilla.mozilla.org/buglist.cgi?bug_id={bugs}"
+BUG_NUMBER_REGEX = re.compile(r"bug \d+", re.IGNORECASE)
+CHANGELOG_TO_FROM_STRING = "{product}_{version}_RELEASE"
+CHANGESET_URL_TEMPLATE = (
+ "{repo}/{logtype}" "?rev={to_version}+%25+{from_version}&revcount=1000"
+)
+FULL_CHANGESET_TEMPLATE = "* [Full Mercurial changelog]({url})\n"
+LIST_DESCRIPTION_TEMPLATE = "Comparing Mercurial tag {from_version} to {to_version}:\n"
+MAX_BUGS_IN_BUGLIST = 250
+MERCURIAL_TAGS_URL_TEMPLATE = "{repo}/json-tags"
+NO_BUGS = "" # Return this when bug list can't be created
+URL_SHORTENER_TEMPLATE = "https://bugzilla.mozilla.org/rest/bitly/shorten?url={url}"
+
+log = logging.getLogger(__name__)
+
+
+def create_bugs_url(product, current_version, current_revision, repo=None):
+ """
+ Creates list of bugs and backout bugs for release-drivers email
+
+ :param release: dict -> containing information about release, from Ship-It
+ :return: str -> description of compared releases, with Bugzilla links
+ containing all bugs in changeset
+ """
+ try:
+ # Extract the important data, ignore if beta1 release
+ if current_version.beta_number == 1:
+ # If the version is beta 1, don't make any links
+ return NO_BUGS
+
+ if repo is None:
+ repo = get_repo_by_version(current_version)
+ # Get the tag version, for display purposes
+ current_version_tag = tag_version(product, current_version)
+
+ # Get all Hg tags for this branch, determine the previous version
+ tag_url = MERCURIAL_TAGS_URL_TEMPLATE.format(repo=repo)
+ mercurial_tags_json = requests.get(tag_url).json()
+ previous_version_tag = get_previous_tag_version(
+ product, current_version, current_version_tag, mercurial_tags_json
+ )
+
+ # Get the changeset between these versions, parse for all unique bugs and backout bugs
+ resp = requests.get(
+ CHANGESET_URL_TEMPLATE.format(
+ repo=repo,
+ from_version=previous_version_tag,
+ to_version=current_revision,
+ logtype="json-log",
+ )
+ )
+ changeset_data = resp.json()
+ unique_bugs, unique_backout_bugs = get_bugs_in_changeset(changeset_data)
+
+ # Return a descriptive string with links if any relevant bugs are found
+ if unique_bugs or unique_backout_bugs:
+ description = LIST_DESCRIPTION_TEMPLATE.format(
+ from_version=previous_version_tag, to_version=current_version_tag
+ )
+
+ if unique_bugs:
+ description += BUGLIST_TEMPLATE.format(
+ url=create_buglist_url(unique_bugs)
+ )
+ if unique_backout_bugs:
+ description += BACKOUT_TEMPLATE.format(
+ url=create_buglist_url(unique_backout_bugs)
+ )
+
+ changeset_html = CHANGESET_URL_TEMPLATE.format(
+ repo=repo,
+ from_version=previous_version_tag,
+ to_version=current_revision,
+ logtype="log",
+ )
+ description += FULL_CHANGESET_TEMPLATE.format(url=changeset_html)
+
+ return description
+ else:
+ return NO_BUGS
+
+ except Exception as err:
+ log.info(err)
+ return NO_BUGS
+
+
+def get_bugs_in_changeset(changeset_data):
+ unique_bugs, unique_backout_bugs = set(), set()
+ for changeset in changeset_data["entries"]:
+ if is_excluded_change(changeset):
+ continue
+
+ changeset_desc = changeset["desc"]
+ bug_re = BUG_NUMBER_REGEX.search(changeset_desc)
+
+ if bug_re:
+ bug_number = bug_re.group().split(" ")[1]
+
+ if is_backout_bug(changeset_desc):
+ unique_backout_bugs.add(bug_number)
+ else:
+ unique_bugs.add(bug_number)
+
+ return unique_bugs, unique_backout_bugs
+
+
+def is_excluded_change(changeset):
+ excluded_change_keywords = [
+ "a=test-only",
+ "a=release",
+ ]
+ return any(keyword in changeset["desc"] for keyword in excluded_change_keywords)
+
+
+def is_backout_bug(changeset_description):
+ return bool(BACKOUT_REGEX.search(changeset_description))
+
+
+def create_buglist_url(buglist):
+ return BUGZILLA_BUGLIST_TEMPLATE.format(bugs="%2C".join(buglist))
+
+
+def tag_version(product, version):
+ underscore_version = str(version).replace(".", "_")
+ return CHANGELOG_TO_FROM_STRING.format(
+ product=product.upper(), version=underscore_version
+ )
+
+
+def parse_tag_version(tag):
+ dot_version = ".".join(tag.split("_")[1:-1])
+ return GeckoVersion.parse(dot_version)
+
+
+def get_previous_tag_version(
+ product,
+ current_version,
+ current_version_tag,
+ mercurial_tags_json,
+):
+ """
+ Gets the previous hg version tag for the product and branch, given the current version tag
+ """
+
+ def _invalid_tag_filter(tag):
+ """Filters by product and removes incorrect major version + base, end releases"""
+ prod_major_version_re = r"^{product}_{major_version}".format(
+ product=product.upper(), major_version=current_version.major_number
+ )
+
+ return (
+ "BASE" not in tag
+ and "END" not in tag
+ and "RELEASE" in tag
+ and re.match(prod_major_version_re, tag)
+ )
+
+ # Get rid of irrelevant tags, sort by date and extract the tag string
+ tags = {
+ (parse_tag_version(item["tag"]), item["tag"])
+ for item in mercurial_tags_json["tags"]
+ if _invalid_tag_filter(item["tag"])
+ }
+ # Add the current version to the list
+ tags.add((current_version, current_version_tag))
+ tags = sorted(tags, key=lambda tag: tag[0])
+
+ # Find where the current version is and go back one to get the previous version
+ next_version_index = list(map(itemgetter(0), tags)).index(current_version) - 1
+
+ return tags[next_version_index][1]
+
+
+def get_repo_by_version(version):
+ """
+ Get the repo a given version is found on.
+ """
+ if version.is_beta:
+ return "https://hg.mozilla.org/releases/mozilla-beta"
+ elif version.is_release:
+ return "https://hg.mozilla.org/releases/mozilla-release"
+ elif version.is_esr:
+ return "https://hg.mozilla.org/releases/mozilla-esr{}".format(
+ version.major_number
+ )
+ else:
+ raise Exception(
+ "Unsupported version type {}: {}".format(version.version_type.name, version)
+ )
+
+
+def email_release_drivers(
+ addresses,
+ product,
+ version,
+ build_number,
+ repo,
+ revision,
+ task_group_id,
+):
+ # Send an email to the mailing after the build
+ email_buglist_string = create_bugs_url(product, version, revision, repo=repo)
+
+ content = """\
+A new build has been started:
+
+Commit: [{revision}]({repo}/rev/{revision})
+Task group: [{task_group_id}]({root_url}/tasks/groups/{task_group_id})
+
+{email_buglist_string}
+""".format(
+ repo=repo,
+ revision=revision,
+ root_url=os.environ["TASKCLUSTER_ROOT_URL"],
+ task_group_id=task_group_id,
+ email_buglist_string=email_buglist_string,
+ )
+
+ # On r-d, we prefix the subject of the email in order to simplify filtering
+ subject_prefix = ""
+ if product in {"fennec"}:
+ subject_prefix = "[mobile] "
+ if product in {"firefox", "devedition"}:
+ subject_prefix = "[desktop] "
+
+ subject = "{} Build of {} {} build {}".format(
+ subject_prefix, product, version, build_number
+ )
+
+ # use proxy if configured, otherwise local credentials from env vars
+ if "TASKCLUSTER_PROXY_URL" in os.environ:
+ notify_options = {"rootUrl": os.environ["TASKCLUSTER_PROXY_URL"]}
+ else:
+ notify_options = optionsFromEnvironment()
+
+ notify = Notify(notify_options)
+ for address in addresses:
+ notify.email(
+ {
+ "address": address,
+ "subject": subject,
+ "content": content,
+ }
+ )
diff --git a/python/mozrelease/mozrelease/chunking.py b/python/mozrelease/mozrelease/chunking.py
new file mode 100644
index 0000000000..8c45f74354
--- /dev/null
+++ b/python/mozrelease/mozrelease/chunking.py
@@ -0,0 +1,27 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from copy import copy
+
+
+class ChunkingError(Exception):
+ pass
+
+
+def getChunk(things, chunks, thisChunk):
+ if thisChunk > chunks:
+ raise ChunkingError(
+ "thisChunk (%d) is greater than total chunks (%d)" % (thisChunk, chunks)
+ )
+ possibleThings = copy(things)
+ nThings = len(possibleThings)
+ for c in range(1, chunks + 1):
+ n = nThings // chunks
+ # If our things aren't evenly divisible by the number of chunks
+ # we need to append one more onto some of them
+ if c <= (nThings % chunks):
+ n += 1
+ if c == thisChunk:
+ return possibleThings[0:n]
+ del possibleThings[0:n]
diff --git a/python/mozrelease/mozrelease/l10n.py b/python/mozrelease/mozrelease/l10n.py
new file mode 100644
index 0000000000..1e2a15878d
--- /dev/null
+++ b/python/mozrelease/mozrelease/l10n.py
@@ -0,0 +1,17 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+def getPlatformLocales(shipped_locales, platform):
+ platform_locales = []
+ for line in shipped_locales.splitlines():
+ locale = line.strip().split()[0]
+ # ja-JP-mac locale is a MacOS only locale
+ if locale == "ja-JP-mac" and not platform.startswith("mac"):
+ continue
+ # Skip the "ja" locale on MacOS
+ if locale == "ja" and platform.startswith("mac"):
+ continue
+ platform_locales.append(locale)
+ return platform_locales
diff --git a/python/mozrelease/mozrelease/mach_commands.py b/python/mozrelease/mozrelease/mach_commands.py
new file mode 100644
index 0000000000..e7c8da59fe
--- /dev/null
+++ b/python/mozrelease/mozrelease/mach_commands.py
@@ -0,0 +1,141 @@
+# -*- coding: utf-8 -*-
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import logging
+import sys
+
+from mach.decorators import Command, CommandArgument, SubCommand
+from mozilla_version.gecko import GeckoVersion
+
+
+@Command(
+ "release",
+ category="release",
+ description="Task that are part of the release process.",
+)
+def release(command_context):
+ """
+ The release subcommands all relate to the release process.
+ """
+
+
+@SubCommand(
+ "release",
+ "buglist",
+ description="Generate list of bugs since the last release.",
+)
+@CommandArgument(
+ "--version",
+ required=True,
+ type=GeckoVersion.parse,
+ help="The version being built.",
+)
+@CommandArgument("--product", required=True, help="The product being built.")
+@CommandArgument("--repo", help="The repo being built.")
+@CommandArgument("--revision", required=True, help="The revision being built.")
+def buglist(command_context, version, product, revision, repo):
+ setup_logging(command_context)
+ from mozrelease.buglist_creator import create_bugs_url
+
+ print(
+ create_bugs_url(
+ product=product,
+ current_version=version,
+ current_revision=revision,
+ repo=repo,
+ )
+ )
+
+
+@SubCommand(
+ "release",
+ "send-buglist-email",
+ description="Send an email with the bugs since the last release.",
+)
+@CommandArgument(
+ "--address",
+ required=True,
+ action="append",
+ dest="addresses",
+ help="The email address to send the bug list to "
+ "(may be specified more than once.",
+)
+@CommandArgument(
+ "--version",
+ type=GeckoVersion.parse,
+ required=True,
+ help="The version being built.",
+)
+@CommandArgument("--product", required=True, help="The product being built.")
+@CommandArgument("--repo", required=True, help="The repo being built.")
+@CommandArgument("--revision", required=True, help="The revision being built.")
+@CommandArgument("--build-number", required=True, help="The build number")
+@CommandArgument("--task-group-id", help="The task group of the build.")
+def buglist_email(command_context, **options):
+ setup_logging(command_context)
+ from mozrelease.buglist_creator import email_release_drivers
+
+ email_release_drivers(**options)
+
+
+@SubCommand(
+ "release",
+ "push-scriptworker-canary",
+ description="Push tasks to try, to test new scriptworker deployments.",
+)
+@CommandArgument(
+ "--address",
+ required=True,
+ action="append",
+ dest="addresses",
+ help="The email address to send notifications to "
+ "(may be specified more than once).",
+)
+@CommandArgument(
+ "--scriptworker",
+ required=True,
+ action="append",
+ dest="scriptworkers",
+ help="Scriptworker to run canary for (may be specified more than once).",
+)
+@CommandArgument(
+ "--ssh-key-secret",
+ required=False,
+ help="Taskcluster secret with ssh-key to use for hg.mozilla.org",
+)
+def push_scriptworker_canary(command_context, scriptworkers, addresses, ssh_key_secret):
+ setup_logging(command_context)
+ from mozrelease.scriptworker_canary import push_canary
+
+ push_canary(
+ scriptworkers=scriptworkers,
+ addresses=addresses,
+ ssh_key_secret=ssh_key_secret,
+ )
+
+
+def setup_logging(command_context, quiet=False, verbose=True):
+ """
+ Set up Python logging for all loggers, sending results to stderr (so
+ that command output can be redirected easily) and adding the typical
+ mach timestamp.
+ """
+ # remove the old terminal handler
+ old = command_context.log_manager.replace_terminal_handler(None)
+
+ # re-add it, with level and fh set appropriately
+ if not quiet:
+ level = logging.DEBUG if verbose else logging.INFO
+ command_context.log_manager.add_terminal_logging(
+ fh=sys.stderr,
+ level=level,
+ write_interval=old.formatter.write_interval,
+ write_times=old.formatter.write_times,
+ )
+
+ # all of the taskgraph logging is unstructured logging
+ command_context.log_manager.enable_unstructured()
diff --git a/python/mozrelease/mozrelease/partner_repack.py b/python/mozrelease/mozrelease/partner_repack.py
new file mode 100644
index 0000000000..1d64f43cca
--- /dev/null
+++ b/python/mozrelease/mozrelease/partner_repack.py
@@ -0,0 +1,895 @@
+#!/usr/bin/env python
+# Documentation: https://firefox-source-docs.mozilla.org/taskcluster/partner-repacks.html
+
+import json
+import logging
+import os
+import re
+import stat
+import sys
+import tarfile
+import urllib.parse
+import urllib.request
+import zipfile
+from optparse import OptionParser
+from pathlib import Path
+from shutil import copy, copytree, move, rmtree, which
+from subprocess import Popen
+
+from redo import retry
+
+logging.basicConfig(
+ stream=sys.stdout,
+ level=logging.INFO,
+ format="%(asctime)-15s - %(levelname)s - %(message)s",
+)
+log = logging.getLogger(__name__)
+
+
+# Set default values.
+PARTNERS_DIR = Path("..") / ".." / "workspace" / "partners"
+# No platform in this path because script only supports repacking a single platform at once
+DEFAULT_OUTPUT_DIR = "%(partner)s/%(partner_distro)s/%(locale)s"
+TASKCLUSTER_ARTIFACTS = (
+ os.environ.get("TASKCLUSTER_ROOT_URL", "https://firefox-ci-tc.services.mozilla.com")
+ + "/api/queue/v1/task/{taskId}/artifacts"
+)
+UPSTREAM_ENUS_PATH = "public/build/{filename}"
+UPSTREAM_L10N_PATH = "public/build/{locale}/{filename}"
+
+WINDOWS_DEST_DIR = Path("firefox")
+MAC_DEST_DIR = Path("Contents/Resources")
+LINUX_DEST_DIR = Path("firefox")
+
+BOUNCER_PRODUCT_TEMPLATE = (
+ "partner-firefox-{release_type}-{partner}-{partner_distro}-latest"
+)
+
+
+class StrictFancyURLopener(urllib.request.FancyURLopener):
+ """Unlike FancyURLopener this class raises exceptions for generic HTTP
+ errors, like 404, 500. It reuses URLopener.http_error_default redefined in
+ FancyURLopener"""
+
+ def http_error_default(self, url, fp, errcode, errmsg, headers):
+ urllib.request.URLopener.http_error_default(
+ self, url, fp, errcode, errmsg, headers
+ )
+
+
+def rmdirRecursive(directory: Path):
+ """
+ This is similar to a call of shutil.rmtree(), except that it
+ should work better on Windows since it will more aggressively
+ attempt to remove files marked as "read-only".
+ """
+
+ def rmdir_including_read_only(func, path: str, exc_info):
+ """
+ Source: https://stackoverflow.com/a/4829285
+ path contains the path of the file that couldn't be removed.
+ Let's just assume that it's read-only and unlink it.
+ """
+ path = Path(path)
+
+ path.chmod(mode=stat.S_IWRITE)
+ path.unlink()
+
+ rmtree(str(directory), onerror=rmdir_including_read_only)
+
+
+def printSeparator():
+ log.info("##################################################")
+
+
+def shellCommand(cmd):
+ log.debug("Executing %s" % cmd)
+ log.debug(f"in {Path.cwd()}")
+ # Shell command output gets dumped immediately to stdout, whereas
+ # print statements get buffered unless we flush them explicitly.
+ sys.stdout.flush()
+ p = Popen(cmd, shell=True)
+ (_, ret) = os.waitpid(p.pid, 0)
+ if ret != 0:
+ ret_real = (ret & 0xFF00) >> 8
+ log.error("Error: shellCommand had non-zero exit status: %d" % ret_real)
+ log.error("Command: %s" % cmd, exc_info=True)
+ sys.exit(ret_real)
+ return True
+
+
+def isLinux(platform: str):
+ return "linux" in platform
+
+
+def isLinux32(platform: str):
+ return "linux32" in platform or "linux-i686" in platform or platform == "linux"
+
+
+def isLinux64(platform: str):
+ return "linux64" in platform or "linux-x86_64" in platform
+
+
+def isMac(platform: str):
+ return "mac" in platform
+
+
+def isWin(platform: str):
+ return "win" in platform
+
+
+def isWin32(platform: str):
+ return "win32" in platform
+
+
+def isWin64(platform: str):
+ return platform == "win64"
+
+
+def isWin64Aarch64(platform: str):
+ return platform == "win64-aarch64"
+
+
+def isValidPlatform(platform: str):
+ return (
+ isLinux64(platform)
+ or isLinux32(platform)
+ or isMac(platform)
+ or isWin64(platform)
+ or isWin64Aarch64(platform)
+ or isWin32(platform)
+ )
+
+
+def parseRepackConfig(file: Path, platform: str):
+ """Did you hear about this cool file format called yaml ? json ? Yeah, me neither"""
+ config = {}
+ config["platforms"] = []
+ for line in file.open():
+ line = line.rstrip("\n")
+ # Ignore empty lines
+ if line.strip() == "":
+ continue
+ # Ignore comments
+ if line.startswith("#"):
+ continue
+ [key, value] = line.split("=", 2)
+ value = value.strip('"')
+ # strings that don't need special handling
+ if key in ("dist_id", "replacement_setup_exe"):
+ config[key] = value
+ continue
+ # booleans that don't need special handling
+ if key in ("migrationWizardDisabled", "oem", "repack_stub_installer"):
+ if value.lower() == "true":
+ config[key] = True
+ continue
+ # special cases
+ if key == "locales":
+ config["locales"] = value.split(" ")
+ continue
+ if key.startswith("locale."):
+ config[key] = value
+ continue
+ if key == "deb_section":
+ config["deb_section"] = re.sub("/", "\/", value)
+ continue
+ if isValidPlatform(key):
+ ftp_platform = getFtpPlatform(key)
+ if ftp_platform == getFtpPlatform(platform) and value.lower() == "true":
+ config["platforms"].append(ftp_platform)
+ continue
+
+ # this only works for one locale because setup.exe is localised
+ if config.get("replacement_setup_exe") and len(config.get("locales", [])) > 1:
+ log.error(
+ "Error: replacement_setup_exe is only supported for one locale, got %s"
+ % config["locales"]
+ )
+ sys.exit(1)
+ # also only works for one platform because setup.exe is platform-specific
+
+ if config["platforms"]:
+ return config
+
+
+def getFtpPlatform(platform: str):
+ """Returns the platform in the format used in building package names.
+ Note: we rely on this code being idempotent
+ i.e. getFtpPlatform(getFtpPlatform(foo)) should work
+ """
+ if isLinux64(platform):
+ return "linux-x86_64"
+ if isLinux(platform):
+ return "linux-i686"
+ if isMac(platform):
+ return "mac"
+ if isWin64Aarch64(platform):
+ return "win64-aarch64"
+ if isWin64(platform):
+ return "win64"
+ if isWin32(platform):
+ return "win32"
+
+
+def getFileExtension(platform: str):
+ """The extension for the output file, which may be passed to the internal-signing task"""
+ if isLinux(platform):
+ return "tar.bz2"
+ elif isMac(platform):
+ return "tar.gz"
+ elif isWin(platform):
+ return "zip"
+
+
+def getFilename(platform: str):
+ """Returns the filename to be repacked for the platform"""
+ return f"target.{getFileExtension(platform)}"
+
+
+def getAllFilenames(platform: str, repack_stub_installer):
+ """Returns the full list of filenames we want to downlaod for each platform"""
+ file_names = [getFilename(platform)]
+ if isWin(platform):
+ # we want to copy forward setup.exe from upstream tasks to make it easier to repackage
+ # windows installers later
+ file_names.append("setup.exe")
+ # Same for the stub installer with setup-stub.exe, but only in win32 repack jobs
+ if isWin32(platform) and repack_stub_installer:
+ file_names.append("setup-stub.exe")
+ return tuple(file_names)
+
+
+def getTaskArtifacts(taskId):
+ try:
+ retrieveFile(
+ TASKCLUSTER_ARTIFACTS.format(taskId=taskId), Path("tc_artifacts.json")
+ )
+ tc_index = json.load(open("tc_artifacts.json"))
+ return tc_index["artifacts"]
+ except (ValueError, KeyError):
+ log.error("Failed to get task artifacts from TaskCluster")
+ raise
+
+
+def getUpstreamArtifacts(upstream_tasks, repack_stub_installer):
+ useful_artifacts = getAllFilenames(options.platform, repack_stub_installer)
+
+ artifact_ids = {}
+ for taskId in upstream_tasks:
+ for artifact in getTaskArtifacts(taskId):
+ name = artifact["name"]
+ if not name.endswith(useful_artifacts):
+ continue
+ if name in artifact_ids:
+ log.error(
+ "Duplicated artifact %s processing tasks %s & %s",
+ name,
+ taskId,
+ artifacts[name],
+ )
+ sys.exit(1)
+ else:
+ artifact_ids[name] = taskId
+ log.debug(
+ "Found artifacts: %s" % json.dumps(artifact_ids, indent=4, sort_keys=True)
+ )
+ return artifact_ids
+
+
+def getArtifactNames(platform: str, locale, repack_stub_installer):
+ file_names = getAllFilenames(platform, repack_stub_installer)
+ if locale == "en-US":
+ names = [UPSTREAM_ENUS_PATH.format(filename=f) for f in file_names]
+ else:
+ names = [
+ UPSTREAM_L10N_PATH.format(locale=locale, filename=f) for f in file_names
+ ]
+ return names
+
+
+def retrieveFile(url, file_path: Path):
+ success = True
+ url = urllib.parse.quote(url, safe=":/")
+ log.info(f"Downloading from {url}")
+ log.info(f"To: {file_path}")
+ log.info(f"CWD: {Path.cwd()}")
+ try:
+ # use URLopener, which handles errors properly
+ retry(
+ StrictFancyURLopener().retrieve,
+ kwargs=dict(url=url, filename=str(file_path)),
+ )
+ except IOError:
+ log.error("Error downloading %s" % url, exc_info=True)
+ success = False
+ try:
+ file_path.unlink()
+ except OSError:
+ log.info(f"Cannot remove {file_path}", exc_info=True)
+
+ return success
+
+
+def getBouncerProduct(partner, partner_distro):
+ if "RELEASE_TYPE" not in os.environ:
+ log.fatal("RELEASE_TYPE must be set in the environment")
+ sys.exit(1)
+ release_type = os.environ["RELEASE_TYPE"]
+ # For X.0 releases we get 'release-rc' but the alias should use 'release'
+ if release_type == "release-rc":
+ release_type = "release"
+ return BOUNCER_PRODUCT_TEMPLATE.format(
+ release_type=release_type,
+ partner=partner,
+ partner_distro=partner_distro,
+ )
+
+
+class RepackBase(object):
+ def __init__(
+ self,
+ build: str,
+ partner_dir: Path,
+ build_dir: Path,
+ final_dir: Path,
+ ftp_platform: str,
+ repack_info,
+ file_mode=0o644,
+ quiet=False,
+ source_locale=None,
+ locale=None,
+ ):
+ self.base_dir = Path.cwd()
+ self.build = build
+ self.full_build_path = build_dir / build
+ if not self.full_build_path.is_absolute():
+ self.full_build_path = self.base_dir / self.full_build_path
+ self.full_partner_path = self.base_dir / partner_dir
+ self.working_dir = final_dir / "working"
+ self.final_dir = final_dir
+ self.final_build = final_dir / Path(build).name
+ self.ftp_platform = ftp_platform
+ self.repack_info = repack_info
+ self.file_mode = file_mode
+ self.quiet = quiet
+ self.source_locale = source_locale
+ self.locale = locale
+ self.working_dir.mkdir(mode=0o755, exist_ok=True, parents=True)
+
+ def announceStart(self):
+ log.info(
+ "Repacking %s %s build %s" % (self.ftp_platform, self.locale, self.build)
+ )
+
+ def announceSuccess(self):
+ log.info(
+ "Done repacking %s %s build %s"
+ % (self.ftp_platform, self.locale, self.build)
+ )
+
+ def unpackBuild(self):
+ copy(str(self.full_build_path), ".")
+
+ def createOverrideIni(self, partner_path: Path):
+ """If this is a partner specific locale (like en-HK), set the
+ distribution.ini to use that locale, not the default locale.
+ """
+ if self.locale != self.source_locale:
+ file_path = partner_path / "distribution" / "distribution.ini"
+ with file_path.open(file_path.is_file() and "a" or "w") as open_file:
+ open_file.write("[Locale]\n")
+ open_file.write("locale=" + self.locale + "\n")
+
+ """ Some partners need to override the migration wizard. This is done
+ by adding an override.ini file to the base install dir.
+ """
+ # modify distribution.ini if 44 or later and we have migrationWizardDisabled
+ if int(options.version.split(".")[0]) >= 44:
+ file_path = partner_path / "distribution" / "distribution.ini"
+ with file_path.open() as open_file:
+ ini = open_file.read()
+
+ if ini.find("EnableProfileMigrator") >= 0:
+ return
+ else:
+ browser_dir = partner_path / "browser"
+ if not browser_dir.exists():
+ browser_dir.mkdir(mode=0o755, exist_ok=True, parents=True)
+ file_path = browser_dir / "override.ini"
+ if "migrationWizardDisabled" in self.repack_info:
+ log.info("Adding EnableProfileMigrator to %r" % (file_path,))
+ with file_path.open(file_path.is_file() and "a" or "w") as open_file:
+ open_file.write("[XRE]\n")
+ open_file.write("EnableProfileMigrator=0\n")
+
+ def copyFiles(self, platform_dir: Path):
+ log.info(f"Copying files into {platform_dir}")
+ # Check whether we've already copied files over for this partner.
+ if not platform_dir.exists():
+ platform_dir.mkdir(mode=0o755, exist_ok=True, parents=True)
+ for i in ["distribution", "extensions"]:
+ full_path = self.full_partner_path / i
+ if full_path.exists():
+ copytree(str(full_path), str(platform_dir / i))
+ self.createOverrideIni(platform_dir)
+
+ def repackBuild(self):
+ pass
+
+ def stage(self):
+ move(self.build, str(self.final_dir))
+ self.final_build.chmod(self.file_mode)
+
+ def cleanup(self):
+ self.final_build.unlink()
+
+ def doRepack(self):
+ self.announceStart()
+ os.chdir(self.working_dir)
+ self.unpackBuild()
+ self.copyFiles()
+ self.repackBuild()
+ self.stage()
+ os.chdir(self.base_dir)
+ rmdirRecursive(self.working_dir)
+ self.announceSuccess()
+
+
+class RepackLinux(RepackBase):
+ def __init__(
+ self,
+ build: str,
+ partner_dir: Path,
+ build_dir: Path,
+ final_dir: Path,
+ ftp_platform: str,
+ repack_info,
+ **kwargs,
+ ):
+ super(RepackLinux, self).__init__(
+ build,
+ partner_dir,
+ build_dir,
+ final_dir,
+ ftp_platform,
+ repack_info,
+ **kwargs,
+ )
+ self.uncompressed_build = build.replace(".bz2", "")
+
+ def unpackBuild(self):
+ super(RepackLinux, self).unpackBuild()
+ bunzip2_cmd = "bunzip2 %s" % self.build
+ shellCommand(bunzip2_cmd)
+ if not Path(self.uncompressed_build).exists():
+ log.error(f"Error: Unable to uncompress build {self.build}")
+ sys.exit(1)
+
+ def copyFiles(self):
+ super(RepackLinux, self).copyFiles(LINUX_DEST_DIR)
+
+ def repackBuild(self):
+ if options.quiet:
+ tar_flags = "rf"
+ else:
+ tar_flags = "rvf"
+ tar_cmd = "tar %s %s %s" % (tar_flags, self.uncompressed_build, LINUX_DEST_DIR)
+ shellCommand(tar_cmd)
+ bzip2_command = "bzip2 %s" % self.uncompressed_build
+ shellCommand(bzip2_command)
+
+
+class RepackMac(RepackBase):
+ def __init__(
+ self,
+ build: str,
+ partner_dir: Path,
+ build_dir: Path,
+ final_dir: Path,
+ ftp_platform: str,
+ repack_info,
+ **kwargs,
+ ):
+ super(RepackMac, self).__init__(
+ build,
+ partner_dir,
+ build_dir,
+ final_dir,
+ ftp_platform,
+ repack_info,
+ **kwargs,
+ )
+ self.uncompressed_build = build.replace(".gz", "")
+
+ def unpackBuild(self):
+ super(RepackMac, self).unpackBuild()
+ gunzip_cmd = "gunzip %s" % self.build
+ shellCommand(gunzip_cmd)
+ if not Path(self.uncompressed_build).exists():
+ log.error(f"Error: Unable to uncompress build {self.build}")
+ sys.exit(1)
+ self.appName = self.getAppName()
+
+ def getAppName(self):
+ # Cope with Firefox.app vs Firefox Nightly.app by returning the first root object/folder found
+ t = tarfile.open(self.build.rsplit(".", 1)[0])
+ for name in t.getnames():
+ root_object = name.split("/")[0]
+ if root_object.endswith(".app"):
+ log.info(f"Found app name in tarball: {root_object}")
+ return root_object
+ log.error(
+ f"Error: Unable to determine app name from tarball: {self.build} - Expected .app in root"
+ )
+ sys.exit(1)
+
+ def copyFiles(self):
+ super(RepackMac, self).copyFiles(Path(self.appName) / MAC_DEST_DIR)
+
+ def repackBuild(self):
+ if options.quiet:
+ tar_flags = "rf"
+ else:
+ tar_flags = "rvf"
+ # the final arg is quoted because it may contain a space, eg Firefox Nightly.app/....
+ tar_cmd = "tar %s %s '%s'" % (
+ tar_flags,
+ self.uncompressed_build,
+ Path(self.appName) / MAC_DEST_DIR,
+ )
+ shellCommand(tar_cmd)
+ gzip_command = "gzip %s" % self.uncompressed_build
+ shellCommand(gzip_command)
+
+
+class RepackWin(RepackBase):
+ def __init__(
+ self,
+ build: str,
+ partner_dir: Path,
+ build_dir: Path,
+ final_dir: Path,
+ ftp_platform: str,
+ repack_info,
+ **kwargs,
+ ):
+ super(RepackWin, self).__init__(
+ build,
+ partner_dir,
+ build_dir,
+ final_dir,
+ ftp_platform,
+ repack_info,
+ **kwargs,
+ )
+
+ def copyFiles(self):
+ super(RepackWin, self).copyFiles(WINDOWS_DEST_DIR)
+
+ def repackBuild(self):
+ if options.quiet:
+ zip_flags = "-rq"
+ else:
+ zip_flags = "-r"
+ zip_cmd = f"zip {zip_flags} {self.build} {WINDOWS_DEST_DIR}"
+ shellCommand(zip_cmd)
+
+ # we generate the stub installer during the win32 build, so repack it on win32 too
+ if isWin32(options.platform) and self.repack_info.get("repack_stub_installer"):
+ log.info("Creating target-stub.zip to hold custom urls")
+ dest = str(self.final_build).replace("target.zip", "target-stub.zip")
+ z = zipfile.ZipFile(dest, "w")
+ # load the partner.ini template and interpolate %LOCALE% to the actual locale
+ with (self.full_partner_path / "stub" / "partner.ini").open() as open_file:
+ partner_ini_template = open_file.readlines()
+ partner_ini = ""
+ for l in partner_ini_template:
+ l = l.replace("%LOCALE%", self.locale)
+ l = l.replace("%BOUNCER_PRODUCT%", self.repack_info["bouncer_product"])
+ partner_ini += l
+ z.writestr("partner.ini", partner_ini)
+ # we need an empty firefox directory to use the repackage code
+ d = zipfile.ZipInfo("firefox/")
+ # https://stackoverflow.com/a/6297838, zip's representation of drwxr-xr-x permissions
+ # is 040755 << 16L, bitwise OR with 0x10 for the MS-DOS directory flag
+ d.external_attr = 1106051088
+ z.writestr(d, "")
+ z.close()
+
+ def stage(self):
+ super(RepackWin, self).stage()
+ setup_dest = Path(str(self.final_build).replace("target.zip", "setup.exe"))
+ if "replacement_setup_exe" in self.repack_info:
+ log.info("Overriding setup.exe with custom copy")
+ retrieveFile(self.repack_info["replacement_setup_exe"], setup_dest)
+ else:
+ # otherwise copy forward the vanilla copy
+ log.info("Copying vanilla setup.exe forward for installer creation")
+ setup = str(self.full_build_path).replace("target.zip", "setup.exe")
+ copy(setup, str(setup_dest))
+ setup_dest.chmod(self.file_mode)
+
+ # we generate the stub installer in the win32 build, so repack it on win32 too
+ if isWin32(options.platform) and self.repack_info.get("repack_stub_installer"):
+ log.info(
+ "Copying vanilla setup-stub.exe forward for stub installer creation"
+ )
+ setup_dest = Path(
+ str(self.final_build).replace("target.zip", "setup-stub.exe")
+ )
+ setup_source = str(self.full_build_path).replace(
+ "target.zip", "setup-stub.exe"
+ )
+ copy(setup_source, str(setup_dest))
+ setup_dest.chmod(self.file_mode)
+
+
+if __name__ == "__main__":
+ error = False
+ partner_builds = {}
+ repack_build = {
+ "linux-i686": RepackLinux,
+ "linux-x86_64": RepackLinux,
+ "mac": RepackMac,
+ "win32": RepackWin,
+ "win64": RepackWin,
+ "win64-aarch64": RepackWin,
+ }
+
+ parser = OptionParser(usage="usage: %prog [options]")
+ parser.add_option(
+ "-d",
+ "--partners-dir",
+ dest="partners_dir",
+ default=str(PARTNERS_DIR),
+ help="Specify the directory where the partner config files are found",
+ )
+ parser.add_option(
+ "-p",
+ "--partner",
+ dest="partner",
+ help="Repack for a single partner, specified by name",
+ )
+ parser.add_option(
+ "-v", "--version", dest="version", help="Set the version number for repacking"
+ )
+ parser.add_option(
+ "-n",
+ "--build-number",
+ dest="build_number",
+ default=1,
+ help="Set the build number for repacking",
+ )
+ parser.add_option("--platform", dest="platform", help="Set the platform to repack")
+ parser.add_option(
+ "--include-oem",
+ action="store_true",
+ dest="include_oem",
+ default=False,
+ help="Process partners marked as OEM (these are usually one-offs)",
+ )
+ parser.add_option(
+ "-q",
+ "--quiet",
+ action="store_true",
+ dest="quiet",
+ default=False,
+ help="Suppress standard output from the packaging tools",
+ )
+ parser.add_option(
+ "--taskid",
+ action="append",
+ dest="upstream_tasks",
+ help="Specify taskIds for upstream artifacts, using 'internal sign' tasks. Multiples "
+ "expected, e.g. --taskid foo --taskid bar. Alternatively, use a space-separated list "
+ "stored in UPSTREAM_TASKIDS in the environment.",
+ )
+ parser.add_option(
+ "-l",
+ "--limit-locale",
+ action="append",
+ dest="limit_locales",
+ default=[],
+ )
+
+ (options, args) = parser.parse_args()
+
+ if not options.quiet:
+ log.setLevel(logging.DEBUG)
+ else:
+ log.setLevel(logging.WARNING)
+
+ options.partners_dir = Path(options.partners_dir.rstrip("/"))
+ if not options.partners_dir.is_dir():
+ log.error(f"Error: partners dir {options.partners_dir} is not a directory.")
+ error = True
+
+ if not options.version:
+ log.error("Error: you must specify a version number.")
+ error = True
+
+ if not options.platform:
+ log.error("No platform specified.")
+ error = True
+
+ if not isValidPlatform(options.platform):
+ log.error("Invalid platform %s." % options.platform)
+ error = True
+
+ upstream_tasks = options.upstream_tasks or os.getenv("UPSTREAM_TASKIDS")
+ if not upstream_tasks:
+ log.error(
+ "upstream tasks should be defined using --taskid args or "
+ "UPSTREAM_TASKIDS in env."
+ )
+ error = True
+
+ for tool in ("tar", "bunzip2", "bzip2", "gunzip", "gzip", "zip"):
+ if not which(tool):
+ log.error(f"Error: couldn't find the {tool} executable in PATH.")
+ error = True
+
+ if error:
+ sys.exit(1)
+
+ base_workdir = Path.cwd()
+
+ # Look up the artifacts available on our upstreams, but only if we need to
+ artifact_ids = {}
+
+ # Local directories for builds
+ script_directory = Path.cwd()
+ original_builds_dir = (
+ script_directory
+ / "original_builds"
+ / options.version
+ / f"build{options.build_number}"
+ )
+ repack_version = f"{options.version}-{options.build_number}"
+ if os.getenv("MOZ_AUTOMATION"):
+ # running in production
+ repacked_builds_dir = Path("/builds/worker/artifacts")
+ else:
+ # local development
+ repacked_builds_dir = script_directory / "artifacts"
+ original_builds_dir.mkdir(mode=0o755, exist_ok=True, parents=True)
+ repacked_builds_dir.mkdir(mode=0o755, exist_ok=True, parents=True)
+ printSeparator()
+
+ # For each partner in the partners dir
+ # Read/check the config file
+ # Download required builds (if not already on disk)
+ # Perform repacks
+
+ # walk the partner dirs, find valid repack.cfg configs, and load them
+ partner_dirs = []
+ need_stub_installers = False
+ for root, _, all_files in os.walk(options.partners_dir):
+ root = root.lstrip("/")
+ partner = root[len(str(options.partners_dir)) + 1 :].split("/")[0]
+ partner_distro = os.path.split(root)[-1]
+ if options.partner:
+ if (
+ options.partner != partner
+ and options.partner != partner_distro[: len(options.partner)]
+ ):
+ continue
+
+ for file in all_files:
+ if file == "repack.cfg":
+ log.debug(
+ "Found partner config: {} ['{}'] {}".format(
+ root, "', '".join(_), file
+ )
+ )
+ root = Path(root)
+ repack_cfg = root / file
+ repack_info = parseRepackConfig(repack_cfg, options.platform)
+ if not repack_info:
+ log.debug(
+ "no repack_info for platform %s in %s, skipping"
+ % (options.platform, repack_cfg)
+ )
+ continue
+ if repack_info.get("repack_stub_installer"):
+ need_stub_installers = True
+ repack_info["bouncer_product"] = getBouncerProduct(
+ partner, partner_distro
+ )
+ partner_dirs.append((partner, partner_distro, root, repack_info))
+
+ log.info("Retrieving artifact lists from upstream tasks")
+ artifact_ids = getUpstreamArtifacts(upstream_tasks, need_stub_installers)
+ if not artifact_ids:
+ log.fatal("No upstream artifacts were found")
+ sys.exit(1)
+
+ for partner, partner_distro, full_partner_dir, repack_info in partner_dirs:
+ log.info(
+ "Starting repack process for partner: %s/%s" % (partner, partner_distro)
+ )
+ if "oem" in repack_info and options.include_oem is False:
+ log.info(
+ "Skipping partner: %s - marked as OEM and --include-oem was not set"
+ % partner
+ )
+ continue
+
+ repack_stub_installer = repack_info.get("repack_stub_installer")
+ # where everything ends up
+ partner_repack_dir = repacked_builds_dir / DEFAULT_OUTPUT_DIR
+
+ # Figure out which base builds we need to repack.
+ for locale in repack_info["locales"]:
+ if options.limit_locales and locale not in options.limit_locales:
+ log.info("Skipping %s because it is not in limit_locales list", locale)
+ continue
+ source_locale = locale
+ # Partner has specified a different locale to
+ # use as the base for their custom locale.
+ if "locale." + locale in repack_info:
+ source_locale = repack_info["locale." + locale]
+ for platform in repack_info["platforms"]:
+ # ja-JP-mac only exists for Mac, so skip non-existent
+ # platform/locale combos.
+ if (source_locale == "ja" and isMac(platform)) or (
+ source_locale == "ja-JP-mac" and not isMac(platform)
+ ):
+ continue
+ ftp_platform = getFtpPlatform(platform)
+
+ local_filepath = original_builds_dir / ftp_platform / locale
+ local_filepath.mkdir(mode=0o755, exist_ok=True, parents=True)
+ final_dir = Path(
+ str(partner_repack_dir)
+ % dict(
+ partner=partner,
+ partner_distro=partner_distro,
+ locale=locale,
+ )
+ )
+ if final_dir.exists():
+ rmdirRecursive(final_dir)
+ final_dir.mkdir(mode=0o755, exist_ok=True, parents=True)
+
+ # for the main repacking artifact
+ file_name = getFilename(ftp_platform)
+ local_filename = local_filepath / file_name
+
+ # Check to see if this build is already on disk, i.e.
+ # has already been downloaded.
+ artifacts = getArtifactNames(platform, locale, repack_stub_installer)
+ for artifact in artifacts:
+ local_artifact = local_filepath / Path(artifact).name
+ if local_artifact.exists():
+ log.info(f"Found {local_artifact} on disk, not downloading")
+ continue
+
+ if artifact not in artifact_ids:
+ log.fatal(
+ "Can't determine what taskID to retrieve %s from", artifact
+ )
+ sys.exit(1)
+ original_build_url = "%s/%s" % (
+ TASKCLUSTER_ARTIFACTS.format(taskId=artifact_ids[artifact]),
+ artifact,
+ )
+ retrieveFile(original_build_url, local_artifact)
+
+ # Make sure we have the local file now
+ if not local_filename.exists():
+ log.info(f"Error: Unable to retrieve {file_name}\n")
+ sys.exit(1)
+
+ repackObj = repack_build[ftp_platform](
+ file_name,
+ full_partner_dir,
+ local_filepath,
+ final_dir,
+ ftp_platform,
+ repack_info,
+ locale=locale,
+ source_locale=source_locale,
+ )
+ repackObj.doRepack()
diff --git a/python/mozrelease/mozrelease/paths.py b/python/mozrelease/mozrelease/paths.py
new file mode 100644
index 0000000000..b3d48c4ac7
--- /dev/null
+++ b/python/mozrelease/mozrelease/paths.py
@@ -0,0 +1,85 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from six.moves.urllib.parse import urlunsplit
+
+product_ftp_map = {
+ "fennec": "mobile",
+}
+
+
+def product2ftp(product):
+ return product_ftp_map.get(product, product)
+
+
+def getCandidatesDir(product, version, buildNumber, protocol=None, server=None):
+ if protocol:
+ assert server is not None, "server is required with protocol"
+
+ product = product2ftp(product)
+ directory = "/{}/candidates/{}-candidates/build{}".format(
+ product,
+ str(version),
+ str(buildNumber),
+ )
+
+ if protocol:
+ return urlunsplit((protocol, server, directory, None, None))
+ else:
+ return directory
+
+
+def getReleasesDir(product, version=None, protocol=None, server=None):
+ if protocol:
+ assert server is not None, "server is required with protocol"
+
+ directory = "/{}/releases".format(product)
+ if version:
+ directory = "{}/{}".format(directory, version)
+
+ if protocol:
+ return urlunsplit((protocol, server, directory, None, None))
+ else:
+ return directory
+
+
+def getReleaseInstallerPath(productName, brandName, version, platform, locale="en-US"):
+ if productName not in ("fennec",):
+ if platform.startswith("linux"):
+ return "/".join(
+ [
+ p.strip("/")
+ for p in [
+ platform,
+ locale,
+ "%s-%s.tar.bz2" % (productName, version),
+ ]
+ ]
+ )
+ elif "mac" in platform:
+ return "/".join(
+ [
+ p.strip("/")
+ for p in [platform, locale, "%s %s.dmg" % (brandName, version)]
+ ]
+ )
+ elif platform.startswith("win"):
+ return "/".join(
+ [
+ p.strip("/")
+ for p in [
+ platform,
+ locale,
+ "%s Setup %s.exe" % (brandName, version),
+ ]
+ ]
+ )
+ else:
+ raise "Unsupported platform"
+ else:
+ if platform.startswith("android"):
+ filename = "%s-%s.%s.android-arm.apk" % (productName, version, locale)
+ return "/".join([p.strip("/") for p in [platform, locale, filename]])
+ else:
+ raise "Unsupported platform"
diff --git a/python/mozrelease/mozrelease/platforms.py b/python/mozrelease/mozrelease/platforms.py
new file mode 100644
index 0000000000..2970725a73
--- /dev/null
+++ b/python/mozrelease/mozrelease/platforms.py
@@ -0,0 +1,54 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+update_platform_map = {
+ "android": ["Android_arm-eabi-gcc3"],
+ "android-arm": ["Android_arm-eabi-gcc3"],
+ "android-x86": ["Android_x86-gcc3"],
+ "android-x86_64": ["Android_x86-64-gcc3"],
+ "android-aarch64": ["Android_aarch64-gcc3"],
+ "linux-i686": ["Linux_x86-gcc3"],
+ "linux-x86_64": ["Linux_x86_64-gcc3"],
+ "mac": [
+ "Darwin_x86_64-gcc3-u-i386-x86_64",
+ "Darwin_x86-gcc3-u-i386-x86_64",
+ "Darwin_aarch64-gcc3",
+ "Darwin_x86-gcc3",
+ "Darwin_x86_64-gcc3",
+ ],
+ "win32": ["WINNT_x86-msvc", "WINNT_x86-msvc-x86", "WINNT_x86-msvc-x64"],
+ "win64": ["WINNT_x86_64-msvc", "WINNT_x86_64-msvc-x64"],
+ "win64-aarch64": ["WINNT_aarch64-msvc-aarch64"],
+}
+
+# ftp -> shipped locales map
+sl_platform_map = {
+ "linux-i686": "linux",
+ "linux-x86_64": "linux",
+ "mac": "osx",
+ "win32": "win32",
+ "win64": "win64",
+}
+
+# ftp -> info file platform map
+info_file_platform_map = {
+ "linux-i686": "linux",
+ "linux-x86_64": "linux64",
+ "mac": "macosx64",
+ "win32": "win32",
+ "win64": "win64",
+ "win64-aarch64": "win64_aarch64",
+}
+
+
+def ftp2updatePlatforms(platform):
+ return update_platform_map[platform]
+
+
+def ftp2shippedLocales(platform):
+ return sl_platform_map.get(platform, platform)
+
+
+def ftp2infoFile(platform):
+ return info_file_platform_map.get(platform, platform)
diff --git a/python/mozrelease/mozrelease/scriptworker_canary.py b/python/mozrelease/mozrelease/scriptworker_canary.py
new file mode 100644
index 0000000000..dabdc6868d
--- /dev/null
+++ b/python/mozrelease/mozrelease/scriptworker_canary.py
@@ -0,0 +1,107 @@
+# -*- coding: utf-8 -*-
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import logging
+import os
+import shutil
+import subprocess
+import tempfile
+from contextlib import contextmanager
+from pathlib import Path
+
+import taskcluster
+from appdirs import user_config_dir
+from gecko_taskgraph import GECKO
+from mach.base import FailedCommandError
+
+logger = logging.getLogger(__name__)
+
+
+TASK_TYPES = {
+ "signing": ["linux-signing", "linux-signing-partial"],
+ "beetmover": ["beetmover-candidates"],
+ "bouncer": ["bouncer-submit"],
+ "balrog": ["balrog-submit"],
+ "tree": ["tree"],
+}
+
+
+def get_secret(secret):
+ # use proxy if configured, otherwise use local credentials from env vars
+ if "TASKCLUSTER_PROXY_URL" in os.environ:
+ secrets_options = {"rootUrl": os.environ["TASKCLUSTER_PROXY_URL"]}
+ else:
+ secrets_options = taskcluster.optionsFromEnvironment()
+ secrets = taskcluster.Secrets(secrets_options)
+ return secrets.get(secret)["secret"]
+
+
+@contextmanager
+def configure_ssh(ssh_key_secret):
+ if ssh_key_secret is None:
+ yield
+
+ # If we get here, we are running in automation.
+ # We use a user hgrc, so that we also get the system-wide hgrc settings.
+ hgrc = Path(user_config_dir("hg")) / "hgrc"
+ if hgrc.exists():
+ raise FailedCommandError(f"Not overwriting `{hgrc}`; cannot configure ssh.")
+
+ try:
+ ssh_key_dir = Path(tempfile.mkdtemp())
+
+ ssh_key = get_secret(ssh_key_secret)
+ ssh_key_file = ssh_key_dir / "id_rsa"
+ ssh_key_file.write_text(ssh_key["ssh_privkey"])
+ ssh_key_file.chmod(0o600)
+
+ hgrc_content = (
+ "[ui]\n"
+ "username = trybld\n"
+ "ssh = ssh -i {path} -l {user}\n".format(
+ path=ssh_key_file, user=ssh_key["user"]
+ )
+ )
+ hgrc.write_text(hgrc_content)
+
+ yield
+ finally:
+ shutil.rmtree(str(ssh_key_dir))
+ hgrc.unlink()
+
+
+def push_canary(scriptworkers, addresses, ssh_key_secret):
+ if ssh_key_secret and os.environ.get("MOZ_AUTOMATION", "0") != "1":
+ # We make assumptions about the layout of the docker image
+ # for creating the hgrc that we use for the key.
+ raise FailedCommandError("Cannot use ssh-key-secret outside of automation.")
+
+ # Collect the set of `mach try scriptworker` task sets to run.
+ tasks = []
+ for scriptworker in scriptworkers:
+ worker_tasks = TASK_TYPES.get(scriptworker)
+ if worker_tasks:
+ logger.info("Running tasks for {}: {}".format(scriptworker, worker_tasks))
+ tasks.extend(worker_tasks)
+ else:
+ logger.info("No tasks for {}.".format(scriptworker))
+
+ mach = Path(GECKO) / "mach"
+ base_command = [str(mach), "try", "scriptworker"]
+ for address in addresses:
+ base_command.extend(
+ [
+ "--route",
+ "notify.email.{}.on-failed".format(address),
+ "--route",
+ "notify.email.{}.on-exception".format(address),
+ ]
+ )
+
+ with configure_ssh(ssh_key_secret):
+ env = os.environ.copy()
+ for task in tasks:
+ subprocess.check_call(base_command + [task], env=env)
diff --git a/python/mozrelease/mozrelease/update_verify.py b/python/mozrelease/mozrelease/update_verify.py
new file mode 100644
index 0000000000..49fe21db15
--- /dev/null
+++ b/python/mozrelease/mozrelease/update_verify.py
@@ -0,0 +1,275 @@
+# -*- coding: utf-8 -*-
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import re
+
+from six import string_types
+
+from .chunking import getChunk
+
+
+class UpdateVerifyError(Exception):
+ pass
+
+
+class UpdateVerifyConfig(object):
+ comment_regex = re.compile("^#")
+ key_write_order = (
+ "release",
+ "product",
+ "platform",
+ "build_id",
+ "locales",
+ "channel",
+ "patch_types",
+ "from",
+ "aus_server",
+ "ftp_server_from",
+ "ftp_server_to",
+ "to",
+ "mar_channel_IDs",
+ "override_certs",
+ "to_build_id",
+ "to_display_version",
+ "to_app_version",
+ "updater_package",
+ )
+ global_keys = (
+ "product",
+ "channel",
+ "aus_server",
+ "to",
+ "to_build_id",
+ "to_display_version",
+ "to_app_version",
+ "override_certs",
+ )
+ release_keys = (
+ "release",
+ "build_id",
+ "locales",
+ "patch_types",
+ "from",
+ "ftp_server_from",
+ "ftp_server_to",
+ "mar_channel_IDs",
+ "platform",
+ "updater_package",
+ )
+ first_only_keys = (
+ "from",
+ "aus_server",
+ "to",
+ "to_build_id",
+ "to_display_version",
+ "to_app_version",
+ "override_certs",
+ )
+ compare_attrs = global_keys + ("releases",)
+
+ def __init__(
+ self,
+ product=None,
+ channel=None,
+ aus_server=None,
+ to=None,
+ to_build_id=None,
+ to_display_version=None,
+ to_app_version=None,
+ override_certs=None,
+ ):
+ self.product = product
+ self.channel = channel
+ self.aus_server = aus_server
+ self.to = to
+ self.to_build_id = to_build_id
+ self.to_display_version = to_display_version
+ self.to_app_version = to_app_version
+ self.override_certs = override_certs
+ self.releases = []
+
+ def __eq__(self, other):
+ self_list = [getattr(self, attr) for attr in self.compare_attrs]
+ other_list = [getattr(other, attr) for attr in self.compare_attrs]
+ return self_list == other_list
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def _parseLine(self, line):
+ entry = {}
+ items = re.findall(r"\w+=[\"'][^\"']*[\"']", line)
+ for i in items:
+ m = re.search(r"(?P<key>\w+)=[\"'](?P<value>.+)[\"']", i).groupdict()
+ if m["key"] not in self.global_keys and m["key"] not in self.release_keys:
+ raise UpdateVerifyError(
+ "Unknown key '%s' found on line:\n%s" % (m["key"], line)
+ )
+ if m["key"] in entry:
+ raise UpdateVerifyError(
+ "Multiple values found for key '%s' on line:\n%s" % (m["key"], line)
+ )
+ entry[m["key"]] = m["value"]
+ if not entry:
+ raise UpdateVerifyError("No parseable data in line '%s'" % line)
+ return entry
+
+ def _addEntry(self, entry, first):
+ releaseKeys = {}
+ for k, v in entry.items():
+ if k in self.global_keys:
+ setattr(self, k, entry[k])
+ elif k in self.release_keys:
+ # "from" is reserved in Python
+ if k == "from":
+ releaseKeys["from_path"] = v
+ else:
+ releaseKeys[k] = v
+ self.addRelease(**releaseKeys)
+
+ def read(self, config):
+ f = open(config)
+ # Only the first non-comment line of an update verify config should
+ # have a "from" and"ausServer". Ignore any subsequent lines with them.
+ first = True
+ for line in f.readlines():
+ # Skip comment lines
+ if self.comment_regex.search(line):
+ continue
+ self._addEntry(self._parseLine(line), first)
+ first = False
+
+ def write(self, fh):
+ first = True
+ for releaseInfo in self.releases:
+ for key in self.key_write_order:
+ if key in self.global_keys and (
+ first or key not in self.first_only_keys
+ ):
+ value = getattr(self, key)
+ elif key in self.release_keys:
+ value = releaseInfo[key]
+ else:
+ value = None
+ if value is not None:
+ fh.write(key.encode("utf-8"))
+ fh.write(b"=")
+ if isinstance(value, (list, tuple)):
+ fh.write(('"%s" ' % " ".join(value)).encode("utf-8"))
+ else:
+ fh.write(('"%s" ' % value).encode("utf-8"))
+ # Rewind one character to avoid having a trailing space
+ fh.seek(-1, os.SEEK_CUR)
+ fh.write(b"\n")
+ first = False
+
+ def addRelease(
+ self,
+ release=None,
+ build_id=None,
+ locales=[],
+ patch_types=["complete"],
+ from_path=None,
+ ftp_server_from=None,
+ ftp_server_to=None,
+ mar_channel_IDs=None,
+ platform=None,
+ updater_package=None,
+ ):
+ """Locales and patch_types can be passed as either a string or a list.
+ If a string is passed, they will be converted to a list for internal
+ storage"""
+ if self.getRelease(build_id, from_path):
+ raise UpdateVerifyError(
+ "Couldn't add release identified by build_id '%s' and from_path '%s': "
+ "already exists in config" % (build_id, from_path)
+ )
+ if isinstance(locales, string_types):
+ locales = sorted(list(locales.split()))
+ if isinstance(patch_types, string_types):
+ patch_types = list(patch_types.split())
+ self.releases.append(
+ {
+ "release": release,
+ "build_id": build_id,
+ "locales": locales,
+ "patch_types": patch_types,
+ "from": from_path,
+ "ftp_server_from": ftp_server_from,
+ "ftp_server_to": ftp_server_to,
+ "mar_channel_IDs": mar_channel_IDs,
+ "platform": platform,
+ "updater_package": updater_package,
+ }
+ )
+
+ def addLocaleToRelease(self, build_id, locale, from_path=None):
+ r = self.getRelease(build_id, from_path)
+ if not r:
+ raise UpdateVerifyError(
+ "Couldn't add '%s' to release identified by build_id '%s' and from_path '%s': "
+ "'%s' doesn't exist in this config."
+ % (locale, build_id, from_path, build_id)
+ )
+ r["locales"].append(locale)
+ r["locales"] = sorted(r["locales"])
+
+ def getRelease(self, build_id, from_path):
+ for r in self.releases:
+ if r["build_id"] == build_id and r["from"] == from_path:
+ return r
+ return {}
+
+ def getFullReleaseTests(self):
+ return [r for r in self.releases if r["from"] is not None]
+
+ def getQuickReleaseTests(self):
+ return [r for r in self.releases if r["from"] is None]
+
+ def getChunk(self, chunks, thisChunk):
+ fullTests = []
+ quickTests = []
+ for test in self.getFullReleaseTests():
+ for locale in test["locales"]:
+ fullTests.append([test["build_id"], locale, test["from"]])
+ for test in self.getQuickReleaseTests():
+ for locale in test["locales"]:
+ quickTests.append([test["build_id"], locale, test["from"]])
+ allTests = getChunk(fullTests, chunks, thisChunk)
+ allTests.extend(getChunk(quickTests, chunks, thisChunk))
+
+ newConfig = UpdateVerifyConfig(
+ self.product,
+ self.channel,
+ self.aus_server,
+ self.to,
+ self.to_build_id,
+ self.to_display_version,
+ self.to_app_version,
+ self.override_certs,
+ )
+ for t in allTests:
+ build_id, locale, from_path = t
+ if from_path == "None":
+ from_path = None
+ r = self.getRelease(build_id, from_path)
+ try:
+ newConfig.addRelease(
+ r["release"],
+ build_id,
+ locales=[],
+ ftp_server_from=r["ftp_server_from"],
+ ftp_server_to=r["ftp_server_to"],
+ patch_types=r["patch_types"],
+ from_path=from_path,
+ mar_channel_IDs=r["mar_channel_IDs"],
+ platform=r["platform"],
+ updater_package=r["updater_package"],
+ )
+ except UpdateVerifyError:
+ pass
+ newConfig.addLocaleToRelease(build_id, locale, from_path)
+ return newConfig
diff --git a/python/mozrelease/mozrelease/util.py b/python/mozrelease/mozrelease/util.py
new file mode 100644
index 0000000000..3858c40514
--- /dev/null
+++ b/python/mozrelease/mozrelease/util.py
@@ -0,0 +1,26 @@
+# -*- coding: utf-8 -*-
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from yaml.loader import SafeLoader
+
+
+class UnicodeLoader(SafeLoader):
+ def construct_yaml_str(self, node):
+ return self.construct_scalar(node)
+
+
+UnicodeLoader.add_constructor("tag:yaml.org,2002:str", UnicodeLoader.construct_yaml_str)
+
+
+def load(stream):
+ """
+ Parse the first YAML document in a stream
+ and produce the corresponding Python object.
+ """
+ loader = UnicodeLoader(stream)
+ try:
+ return loader.get_single_data()
+ finally:
+ loader.dispose()
diff --git a/python/mozrelease/mozrelease/versions.py b/python/mozrelease/mozrelease/versions.py
new file mode 100644
index 0000000000..e3e47d4e4a
--- /dev/null
+++ b/python/mozrelease/mozrelease/versions.py
@@ -0,0 +1,114 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import re
+from distutils.version import StrictVersion
+
+from looseversion import LooseVersion
+
+
+class MozillaVersionCompareMixin:
+ def __cmp__(self, other):
+ # We expect this function to never be called.
+ raise AssertionError()
+
+ def _cmp(self, other):
+ has_esr = set()
+ if isinstance(other, LooseModernMozillaVersion) and str(other).endswith("esr"):
+ # If other version ends with esr, coerce through MozillaVersion ending up with
+ # a StrictVersion if possible
+ has_esr.add("other")
+ other = MozillaVersion(str(other)[:-3]) # strip ESR from end of string
+ if isinstance(self, LooseModernMozillaVersion) and str(self).endswith("esr"):
+ # If our version ends with esr, coerce through MozillaVersion ending up with
+ # a StrictVersion if possible
+ has_esr.add("self")
+ self = MozillaVersion(str(self)[:-3]) # strip ESR from end of string
+ if isinstance(other, LooseModernMozillaVersion) or isinstance(
+ self, LooseModernMozillaVersion
+ ):
+ # If we're still LooseVersion for self or other, run LooseVersion compare
+ # Being sure to pass through Loose Version type first
+ val = LooseVersion._cmp(
+ LooseModernMozillaVersion(str(self)),
+ LooseModernMozillaVersion(str(other)),
+ )
+ else:
+ # No versions are loose, therefore we can use StrictVersion
+ val = StrictVersion._cmp(self, other)
+ if has_esr.isdisjoint(set(["other", "self"])) or has_esr.issuperset(
+ set(["other", "self"])
+ ):
+ # If both had esr string or neither, then _cmp() was accurate
+ return val
+ elif val != 0:
+ # cmp is accurate here even if esr is present in only 1 compare, since
+ # versions are not equal
+ return val
+ elif "other" in has_esr:
+ return -1 # esr is not greater than non esr
+ return 1 # non esr is greater than esr
+
+
+class ModernMozillaVersion(MozillaVersionCompareMixin, StrictVersion):
+ """A version class that is slightly less restrictive than StrictVersion.
+ Instead of just allowing "a" or "b" as prerelease tags, it allows any
+ alpha. This allows us to support the once-shipped "3.6.3plugin1" and
+ similar versions."""
+
+ version_re = re.compile(
+ r"""^(\d+) \. (\d+) (\. (\d+))?
+ ([a-zA-Z]+(\d+))?$""",
+ re.VERBOSE,
+ )
+
+
+class AncientMozillaVersion(MozillaVersionCompareMixin, StrictVersion):
+ """A version class that is slightly less restrictive than StrictVersion.
+ Instead of just allowing "a" or "b" as prerelease tags, it allows any
+ alpha. This allows us to support the once-shipped "3.6.3plugin1" and
+ similar versions.
+ It also supports versions w.x.y.z by transmuting to w.x.z, which
+ is useful for versions like 1.5.0.x and 2.0.0.y"""
+
+ version_re = re.compile(
+ r"""^(\d+) \. (\d+) \. \d (\. (\d+))
+ ([a-zA-Z]+(\d+))?$""",
+ re.VERBOSE,
+ )
+
+
+class LooseModernMozillaVersion(MozillaVersionCompareMixin, LooseVersion):
+ """A version class that is more restrictive than LooseVersion.
+ This class reduces the valid strings to "esr", "a", "b" and "rc" in order
+ to support esr. StrictVersion requires a trailing number after all strings."""
+
+ component_re = re.compile(r"(\d+ | a | b | rc | esr | \.)", re.VERBOSE)
+
+ def __repr__(self):
+ return "LooseModernMozillaVersion ('%s')" % str(self)
+
+
+def MozillaVersion(version):
+ try:
+ return ModernMozillaVersion(version)
+ except ValueError:
+ pass
+ try:
+ if version.count(".") == 3:
+ return AncientMozillaVersion(version)
+ except ValueError:
+ pass
+ try:
+ return LooseModernMozillaVersion(version)
+ except ValueError:
+ pass
+ raise ValueError("Version number %s is invalid." % version)
+
+
+def getPrettyVersion(version):
+ version = re.sub(r"a([0-9]+)$", r" Alpha \1", version)
+ version = re.sub(r"b([0-9]+)$", r" Beta \1", version)
+ version = re.sub(r"rc([0-9]+)$", r" RC \1", version)
+ return version
diff --git a/python/mozrelease/setup.py b/python/mozrelease/setup.py
new file mode 100644
index 0000000000..d831cb14c5
--- /dev/null
+++ b/python/mozrelease/setup.py
@@ -0,0 +1,25 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from setuptools import find_packages, setup
+
+VERSION = "0.2"
+
+setup(
+ author="Mozilla Foundation",
+ author_email="Mozilla Release Engineering",
+ name="mozrelease",
+ description="Common functionality used by Mozilla Release Automation",
+ license="MPL 2.0",
+ packages=find_packages(),
+ version=VERSION,
+ classifiers=[
+ "Development Status :: 3 - Alpha",
+ "Topic :: Software Development :: Build Tools",
+ "License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)",
+ "Programming Language :: Python :: 2.7",
+ "Programming Language :: Python :: Implementation :: CPython",
+ ],
+ keywords="mozilla",
+)
diff --git a/python/mozrelease/test/data/Firefox-62.0.3.update.json b/python/mozrelease/test/data/Firefox-62.0.3.update.json
new file mode 100644
index 0000000000..c22cf42753
--- /dev/null
+++ b/python/mozrelease/test/data/Firefox-62.0.3.update.json
@@ -0,0 +1,74 @@
+[
+ {
+ "fields": {
+ "detailsURL": "https://www.mozilla.org/%LOCALE%/firefox/62.0.3/releasenotes/",
+ "type": "minor"
+ },
+ "for": {}
+ },
+ {
+ "fields": {
+ "actions": "showURL",
+ "openURL": "https://www.mozilla.org/%LOCALE%/firefox/62.0.3/whatsnew/?oldversion=%OLD_VERSION%"
+ },
+ "for": {
+ "channels": [
+ "release",
+ "release-localtest",
+ "release-cdntest"
+ ],
+ "locales": [
+ "cak",
+ "cy",
+ "da",
+ "de",
+ "dsb",
+ "en-CA",
+ "en-US",
+ "es-AR",
+ "es-CL",
+ "es-ES",
+ "es-MX",
+ "et",
+ "fa",
+ "fi",
+ "fr",
+ "fy-NL",
+ "gn",
+ "gu-IN",
+ "hsb",
+ "hu",
+ "ia",
+ "id",
+ "it",
+ "ja",
+ "ja-JP-mac",
+ "ka",
+ "kab",
+ "ko",
+ "lij",
+ "lt",
+ "ms",
+ "nb-NO",
+ "nl",
+ "nn-NO",
+ "pl",
+ "pt-BR",
+ "pt-PT",
+ "sk",
+ "sl",
+ "sq",
+ "sr",
+ "sv-SE",
+ "tr",
+ "uk",
+ "vi",
+ "zh-CN",
+ "zh-TW"
+ ],
+ "versions": [
+ "<62.0"
+ ]
+ }
+ }
+]
diff --git a/python/mozrelease/test/data/Firefox-62.0b11-update.json b/python/mozrelease/test/data/Firefox-62.0b11-update.json
new file mode 100644
index 0000000000..c22cf42753
--- /dev/null
+++ b/python/mozrelease/test/data/Firefox-62.0b11-update.json
@@ -0,0 +1,74 @@
+[
+ {
+ "fields": {
+ "detailsURL": "https://www.mozilla.org/%LOCALE%/firefox/62.0.3/releasenotes/",
+ "type": "minor"
+ },
+ "for": {}
+ },
+ {
+ "fields": {
+ "actions": "showURL",
+ "openURL": "https://www.mozilla.org/%LOCALE%/firefox/62.0.3/whatsnew/?oldversion=%OLD_VERSION%"
+ },
+ "for": {
+ "channels": [
+ "release",
+ "release-localtest",
+ "release-cdntest"
+ ],
+ "locales": [
+ "cak",
+ "cy",
+ "da",
+ "de",
+ "dsb",
+ "en-CA",
+ "en-US",
+ "es-AR",
+ "es-CL",
+ "es-ES",
+ "es-MX",
+ "et",
+ "fa",
+ "fi",
+ "fr",
+ "fy-NL",
+ "gn",
+ "gu-IN",
+ "hsb",
+ "hu",
+ "ia",
+ "id",
+ "it",
+ "ja",
+ "ja-JP-mac",
+ "ka",
+ "kab",
+ "ko",
+ "lij",
+ "lt",
+ "ms",
+ "nb-NO",
+ "nl",
+ "nn-NO",
+ "pl",
+ "pt-BR",
+ "pt-PT",
+ "sk",
+ "sl",
+ "sq",
+ "sr",
+ "sv-SE",
+ "tr",
+ "uk",
+ "vi",
+ "zh-CN",
+ "zh-TW"
+ ],
+ "versions": [
+ "<62.0"
+ ]
+ }
+ }
+]
diff --git a/python/mozrelease/test/data/Firefox-64.0b13.update.json b/python/mozrelease/test/data/Firefox-64.0b13.update.json
new file mode 100644
index 0000000000..0d9a4405e5
--- /dev/null
+++ b/python/mozrelease/test/data/Firefox-64.0b13.update.json
@@ -0,0 +1,9 @@
+[
+ {
+ "fields": {
+ "detailsURL": "https://www.mozilla.org/%LOCALE%/firefox/64.0/releasenotes/",
+ "type": "minor"
+ },
+ "for": {}
+ }
+]
diff --git a/python/mozrelease/test/data/buglist_changesets.json b/python/mozrelease/test/data/buglist_changesets.json
new file mode 100644
index 0000000000..dedcd8a810
--- /dev/null
+++ b/python/mozrelease/test/data/buglist_changesets.json
@@ -0,0 +1,94 @@
+{
+ "entries": [
+ {
+ "desc": "Bug 1354038 - [push-apk] taskgraph: Use rollout and deactivate dry-run on release p=jlorenzo r=aki a=release DONTBUILD"
+ },
+ {
+ "desc": "Bug 1356563 - Only set global ready state on native widget loading; r=snorp a=sylvestre\n\nOur \"chrome-document-loaded\" observer may detect several different types\nof widgets that can exist in the parent process, including the Android\nnsWindow, PuppetWidget, etc. We should only set the global state to\nready when the first top-level nsWindow has loaded, and not just any\nwindow."
+ },
+ {
+ "desc": "No bug, Automated blocklist update from host bld-linux64-spot-305 - a=blocklist-update"
+ },
+ {
+ "desc": "Automatic version bump. CLOSED TREE NO BUG a=release"
+ },
+ {
+ "desc": "No bug - Tagging d345b657d381ade5195f1521313ac651618f54a2 with FIREFOX_53_0_BUILD6, FIREFOX_53_0_RELEASE a=release CLOSED TREE"
+ },
+ {
+ "desc": "No bug, Automated blocklist update from host bld-linux64-spot-305 - a=blocklist-update"
+ },
+ {
+ "desc": "Bug 1344529 - Remove unused variable in widget/gtk/gtk2drawing.c. r=frg a=release DONOTBUILD in a CLOSED TREE"
+ },
+ {
+ "desc": "Bug 1306543 - Avoid using g_unicode_script_from_iso15924 directly. r=jfkthame a=release in a CLOSED TREE DONTBUILD"
+ },
+ {
+ "desc": "Bug 1320072 - Backout intent change - broke partner Google test. r=snorp, a=lizzard"
+ },
+ {
+ "desc": "Bug 1328762 - Cherry-pick ANGLE a4aaa2de57dc51243da35ea147d289a21a9f0c49. a=lizzard\n\nMozReview-Commit-ID: WVK0smAfAW"
+ },
+ {
+ "desc": "Bug 1341190 - Remove .popup-anchor visibility rule. r=mconley, a=lizzard\n\nMozReview-Commit-ID: DFMIKMMnLx5"
+ },
+ {
+ "desc": "Bug 1348409 - Stop supporting the showDialog argument for window.find. r=mrbkap, a=lizzard\n\nThe dialog functionality of the non-standard window.find API has been broken\nwith e10s since it shipped, and bug 1182569 or bug 1232432 (or both) have\nbroken it for non-e10s.\n\nThis patch remove showDialog support entirely, for both e10s and non-e10s,\nin a more deliberate way. We now ignore the argument.\n\nMozReview-Commit-ID: 1CTzgEkDhHW"
+ },
+ {
+ "desc": "Bug 1358089 - [RTL] Separate xml drawable into v17 folder. r=ahunt, a=lizzard\n\nMozReview-Commit-ID: LaOwxXwhsHA"
+ },
+ {
+ "desc": "Bug 1360626 - Create a blacklist for adaptive playback support. r=jolin, a=lizzard\n\nOn some devices / os combinations, enabling adaptive playback causes decoded frame unusable.\nIt may cause the decode frame to be black and white or return tiled frames.\nSo we should do the blacklist according to the report.\n\nMozReview-Commit-ID: j3PZXTtkXG"
+ },
+ {
+ "desc": "Bug 1354038 - part2: [push-apk] taskgraph: Use rollout and deactivate dry-run on release r=aki a=bustage DONTBUILD\n\nMozReview-Commit-ID: 1f22BcAZkvp"
+ },
+ {
+ "desc": "bug 1354038 - empty commit to force builds. a=release"
+ },
+ {
+ "desc": "Bug 1337861 - [Fennec-Relpro] Enforce the presence of $MOZ_BUILD_DATE r=jlund a=release\n\nMozReview-Commit-ID: DzEeeYQjwLW"
+ },
+ {
+ "desc": "Bug 1332731 - Follow-up to fix accessibility breakage. r=sebastian, a=lizzard\n\nFollow-up to fix breakage in accessibility caused by the bundle\nconversion. In particular, optString(foo) should have been converted to\ngetString(foo, \"\") because optString returns \"\" by default.\n\nAlso fix a small bug in Presentation.jsm where an array or null should\nbe used instead of a string."
+ },
+ {
+ "desc": "Bug 1355870 - Allow a system preference to determine distribution dir. r=nalexander, a=lizzard"
+ },
+ {
+ "desc": "Bug 1354911 - Guard against null menu item names. r=sebastian, a=lizzard\n\nAddons may give us invalid menu item names; bail instead of crashing in\nsuch cases."
+ },
+ {
+ "desc": "Bug 1356563 - Remove chrome-document-loaded observer only after handling it. r=me, a=gchang\n\nOnly remove the \"chrome-document-loaded\" observer after handling it in\nnsAppShell. Otherwise we may never end up handling it."
+ },
+ {
+ "desc": "Bug 1352333 - remove autophone webrtc test manifests, r=dminor, a=test-only."
+ },
+ {
+ "desc": "Bug 1352333 - sync autophone webrtc test manifests with normal webrtc manifests, r=jmaher,dminor, a=test-only."
+ },
+ {
+ "desc": "No bug - Tagging f239279b709072490993b099832fa8c18f07713a with FENNEC_53_0_BUILD1, FENNEC_53_0_RELEASE a=release CLOSED TREE"
+ },
+ {
+ "desc": "Automated checkin: version bump for fennec 53.0.1 release. DONTBUILD CLOSED TREE a=release"
+ },
+ {
+ "desc": "Added FENNEC_53_0_1_RELEASE FENNEC_53_0_1_BUILD1 tag(s) for changeset f029d1a1324b. DONTBUILD CLOSED TREE a=release"
+ },
+ {
+ "desc": "Backout Bug 1337861 (Enforce MOZ_BUILD_DATE) due to Bug 1360550. r=catlee a=catlee\n\nBug 1360550 resulted in the buildid the Linux builds had being different than the directory they were uploaded to. This had fallout affects for QA's firefox-ui tests and presumably anything using mozdownload.\n\nMozReview-Commit-ID: 8lMvLU0vGiS"
+ },
+ {
+ "desc": "No bug, Automated blocklist update from host bld-linux64-spot-303 - a=blocklist-update"
+ },
+ {
+ "desc": "Automatic version bump. CLOSED TREE NO BUG a=release"
+ },
+ {
+ "desc": "No bug - Tagging 5cbf464688a47129c0ea36fe38f42f59926e4b2c with FENNEC_53_0_1_BUILD2, FENNEC_53_0_1_RELEASE a=release CLOSED TREE"
+ }
+ ]
+}
diff --git a/python/mozrelease/test/data/sample-update-verify.cfg b/python/mozrelease/test/data/sample-update-verify.cfg
new file mode 100644
index 0000000000..b8c87457b5
--- /dev/null
+++ b/python/mozrelease/test/data/sample-update-verify.cfg
@@ -0,0 +1,4 @@
+release="4.0" product="Firefox" platform="Linux_x86-gcc3" build_id="888" locales="af de en-US ja zh-TW" channel="betatest" patch_types="partial complete" from="/firefox/4.0rc1.tar.bz2" aus_server="https://aus4.mozilla.org" ftp_server_from="stage.mozilla.org/firefox" ftp_server_to="stage.mozilla.org/firefox" to="/firefox/4.0rc2.tar.bz2" mar_channel_IDs="firefox-mozilla-beta" to_build_id="999" to_display_version="99.0 Zeta 9" to_app_version="99.0"
+release="4.0b12" product="Firefox" platform="Linux_x86-gcc3" build_id="777" locales="af en-US" channel="betatest" patch_types="complete" from="/firefox/4.0b12.tar.bz2" ftp_server_from="stage.mozilla.org/firefox" ftp_server_to="stage.mozilla.org/firefox"
+release="4.0b12" product="Firefox" platform="Linux_x86-gcc3" build_id="777" locales="de ja zh-TW" channel="betatest" patch_types="complete" ftp_server_from="stage.mozilla.org/firefox" ftp_server_to="stage.mozilla.org/firefox"
+release="3.7a1" product="Firefox" platform="Linux_x86-gcc3" build_id="666" locales="en-US" channel="betatest" patch_types="complete" ftp_server_from="stage.mozilla.org/firefox" ftp_server_to="stage.mozilla.org/firefox"
diff --git a/python/mozrelease/test/data/whatsnew-62.0.3.yml b/python/mozrelease/test/data/whatsnew-62.0.3.yml
new file mode 100644
index 0000000000..5a9e4c9a1f
--- /dev/null
+++ b/python/mozrelease/test/data/whatsnew-62.0.3.yml
@@ -0,0 +1,65 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+---
+- type: product-details
+ url: "https://www.mozilla.org/%LOCALE%/{product}/{version}/releasenotes/"
+# %LOCALE% is automatically replaced by Balrog.
+- type: show-url
+ # yamllint disable-line rule:line-length
+ url: "https://www.mozilla.org/%LOCALE%/{product}/{version}/whatsnew/?oldversion=%OLD_VERSION%"
+ conditions:
+ release-types: [release]
+ products: [firefox]
+ update-channel: release
+ # e.g.: ["<61.0"]. {version.major_number} reflects the current version.
+ # This is done by taskgraph.
+ versions: ["<{version.major_number}.0"]
+ locales:
+ - cak
+ - cy
+ - da
+ - de
+ - dsb
+ - en-CA
+ - en-US
+ - es-AR
+ - es-CL
+ - es-ES
+ - es-MX
+ - et
+ - fa
+ - fi
+ - fr
+ - fy-NL
+ - gn
+ - gu-IN
+ - hsb
+ - hu
+ - ia
+ - id
+ - it
+ - ja
+ - ja-JP-mac
+ - ka
+ - kab
+ - ko
+ - lij
+ - lt
+ - ms
+ - nb-NO
+ - nl
+ - nn-NO
+ - pl
+ - pt-BR
+ - pt-PT
+ - sk
+ - sl
+ - sq
+ - sr
+ - sv-SE
+ - tr
+ - uk
+ - vi
+ - zh-CN
+ - zh-TW
diff --git a/python/mozrelease/test/data/whatsnew-release.yml b/python/mozrelease/test/data/whatsnew-release.yml
new file mode 100644
index 0000000000..5a9e4c9a1f
--- /dev/null
+++ b/python/mozrelease/test/data/whatsnew-release.yml
@@ -0,0 +1,65 @@
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+
+---
+- type: product-details
+ url: "https://www.mozilla.org/%LOCALE%/{product}/{version}/releasenotes/"
+# %LOCALE% is automatically replaced by Balrog.
+- type: show-url
+ # yamllint disable-line rule:line-length
+ url: "https://www.mozilla.org/%LOCALE%/{product}/{version}/whatsnew/?oldversion=%OLD_VERSION%"
+ conditions:
+ release-types: [release]
+ products: [firefox]
+ update-channel: release
+ # e.g.: ["<61.0"]. {version.major_number} reflects the current version.
+ # This is done by taskgraph.
+ versions: ["<{version.major_number}.0"]
+ locales:
+ - cak
+ - cy
+ - da
+ - de
+ - dsb
+ - en-CA
+ - en-US
+ - es-AR
+ - es-CL
+ - es-ES
+ - es-MX
+ - et
+ - fa
+ - fi
+ - fr
+ - fy-NL
+ - gn
+ - gu-IN
+ - hsb
+ - hu
+ - ia
+ - id
+ - it
+ - ja
+ - ja-JP-mac
+ - ka
+ - kab
+ - ko
+ - lij
+ - lt
+ - ms
+ - nb-NO
+ - nl
+ - nn-NO
+ - pl
+ - pt-BR
+ - pt-PT
+ - sk
+ - sl
+ - sq
+ - sr
+ - sv-SE
+ - tr
+ - uk
+ - vi
+ - zh-CN
+ - zh-TW
diff --git a/python/mozrelease/test/python.ini b/python/mozrelease/test/python.ini
new file mode 100644
index 0000000000..5854d57850
--- /dev/null
+++ b/python/mozrelease/test/python.ini
@@ -0,0 +1,7 @@
+[DEFAULT]
+subsuite=mozrelease
+
+[test_versions.py]
+[test_update_verify.py]
+[test_balrog.py]
+[test_buglist_creator.py]
diff --git a/python/mozrelease/test/test_balrog.py b/python/mozrelease/test/test_balrog.py
new file mode 100644
index 0000000000..0c3adfacac
--- /dev/null
+++ b/python/mozrelease/test/test_balrog.py
@@ -0,0 +1,54 @@
+# -*- coding: utf-8 -*-
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import json
+from pathlib import Path
+
+import mozunit
+import pytest
+from mozilla_version.gecko import GeckoVersion
+
+from mozrelease.balrog import generate_update_properties
+from mozrelease.util import load as yaml_load
+
+DATA_PATH = Path(__file__).parent.joinpath("data")
+
+
+@pytest.mark.parametrize(
+ "context,config_file,output_file",
+ [
+ (
+ {
+ "release-type": "release",
+ "product": "firefox",
+ "version": GeckoVersion.parse("62.0.3"),
+ },
+ "whatsnew-62.0.3.yml",
+ "Firefox-62.0.3.update.json",
+ ),
+ (
+ {
+ "release-type": "beta",
+ "product": "firefox",
+ "version": GeckoVersion.parse("64.0"),
+ },
+ "whatsnew-62.0.3.yml",
+ "Firefox-64.0b13.update.json",
+ ),
+ ],
+)
+def test_update_properties(context, config_file, output_file):
+ with DATA_PATH.joinpath(config_file).open("r", encoding="utf-8") as f:
+ config = yaml_load(f)
+
+ update_line = generate_update_properties(context, config)
+
+ assert update_line == json.load(
+ DATA_PATH.joinpath(output_file).open("r", encoding="utf-8")
+ )
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozrelease/test/test_buglist_creator.py b/python/mozrelease/test/test_buglist_creator.py
new file mode 100644
index 0000000000..13a530bb97
--- /dev/null
+++ b/python/mozrelease/test/test_buglist_creator.py
@@ -0,0 +1,178 @@
+# -*- coding: utf-8 -*-
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# -*- coding: utf-8 -*-
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import json
+from pathlib import Path
+
+import mozunit
+import pytest
+from mozilla_version.gecko import GeckoVersion
+
+from mozrelease.buglist_creator import (
+ create_bugs_url,
+ get_bugs_in_changeset,
+ get_previous_tag_version,
+ is_backout_bug,
+ is_excluded_change,
+ parse_tag_version,
+ tag_version,
+)
+
+DATA_PATH = Path(__file__).with_name("data")
+
+
+def test_beta_1_release():
+ buglist_str_54_0b1 = create_bugs_url(
+ product="firefox",
+ current_version=GeckoVersion.parse("54.0b1"),
+ current_revision="cf76e00dcd6f",
+ )
+ assert buglist_str_54_0b1 == "", "There should be no bugs to compare for beta 1."
+
+
+@pytest.mark.parametrize(
+ "description,is_excluded",
+ (
+ (
+ "something something something a=test-only something something something",
+ True,
+ ),
+ ("this is a a=release change!", True),
+ ),
+)
+def test_is_excluded_change(description, is_excluded):
+ assert is_excluded_change({"desc": description}) == is_excluded
+
+
+@pytest.mark.parametrize(
+ "description,is_backout",
+ (
+ ("I backed out this bug because", True),
+ ("Backing out this bug due to", True),
+ ("Backout bug xyz", True),
+ ("Back out bug xyz", True),
+ ("this is a regular bug description", False),
+ ),
+)
+def test_is_backout_bug(description, is_backout):
+ assert is_backout_bug(description) == is_backout
+
+
+@pytest.mark.parametrize(
+ "product,version,tag",
+ (
+ ("firefox", GeckoVersion.parse("53.0b10"), "FIREFOX_53_0b10_RELEASE"),
+ ("firefox", GeckoVersion.parse("52.0"), "FIREFOX_52_0_RELEASE"),
+ ("fennec", GeckoVersion.parse("52.0.2"), "FENNEC_52_0_2_RELEASE"),
+ ),
+)
+def test_tag_version(product, version, tag):
+ assert tag_version(product, version) == tag
+
+
+@pytest.mark.parametrize(
+ "tag,version",
+ (
+ ("FIREFOX_53_0b10_RELEASE", GeckoVersion.parse("53.0b10")),
+ ("FIREFOX_52_0_RELEASE", GeckoVersion.parse("52.0")),
+ ("FENNEC_52_0_2_RELEASE", GeckoVersion.parse("52.0.2")),
+ ),
+)
+def test_parse_tag_version(tag, version):
+ assert parse_tag_version(tag) == version
+
+
+@pytest.mark.parametrize(
+ "version,tag,previous_tag",
+ (
+ (
+ GeckoVersion.parse("48.0b4"),
+ "FIREFOX_48_0b4_RELEASE",
+ "FIREFOX_48_0b3_RELEASE",
+ ),
+ (
+ GeckoVersion.parse("48.0b9"),
+ "FIREFOX_48_0b9_RELEASE",
+ "FIREFOX_48_0b7_RELEASE",
+ ),
+ (
+ GeckoVersion.parse("48.0.2"),
+ "FIREFOX_48_0_2_RELEASE",
+ "FIREFOX_48_0_1_RELEASE",
+ ),
+ (
+ GeckoVersion.parse("48.0.1"),
+ "FIREFOX_48_0_1_RELEASE",
+ "FIREFOX_48_0_RELEASE",
+ ),
+ ),
+)
+def test_get_previous_tag_version(version, tag, previous_tag):
+ product = "firefox"
+ ff_48_tags = [
+ u"FIREFOX_BETA_48_END",
+ u"FIREFOX_RELEASE_48_END",
+ u"FIREFOX_48_0_2_RELEASE",
+ u"FIREFOX_48_0_2_BUILD1",
+ u"FIREFOX_48_0_1_RELEASE",
+ u"FIREFOX_48_0_1_BUILD3",
+ u"FIREFOX_48_0_RELEASE",
+ u"FIREFOX_48_0_BUILD2",
+ u"FIREFOX_RELEASE_48_BASE",
+ u"FIREFOX_48_0b10_RELEASE",
+ u"FIREFOX_48_0b10_BUILD1",
+ u"FIREFOX_48_0b9_RELEASE",
+ u"FIREFOX_48_0b9_BUILD1",
+ u"FIREFOX_48_0b7_RELEASE",
+ u"FIREFOX_48_0b7_BUILD1",
+ u"FIREFOX_48_0b6_RELEASE",
+ u"FIREFOX_48_0b6_BUILD1",
+ u"FIREFOX_48_0b5_RELEASE",
+ u"FIREFOX_48_0b5_BUILD1",
+ u"FIREFOX_48_0b4_RELEASE",
+ u"FIREFOX_48_0b4_BUILD1",
+ u"FIREFOX_48_0b3_RELEASE",
+ u"FIREFOX_48_0b3_BUILD1",
+ u"FIREFOX_48_0b2_RELEASE",
+ u"FIREFOX_48_0b2_BUILD2",
+ u"FIREFOX_48_0b1_RELEASE",
+ u"FIREFOX_48_0b1_BUILD2",
+ u"FIREFOX_AURORA_48_END",
+ u"FIREFOX_BETA_48_BASE",
+ u"FIREFOX_AURORA_48_BASE",
+ ]
+
+ mock_hg_json = {"tags": [{"tag": ff_48_tag} for ff_48_tag in ff_48_tags]}
+
+ assert get_previous_tag_version(product, version, tag, mock_hg_json) == previous_tag
+
+
+def test_get_bugs_in_changeset():
+ with DATA_PATH.joinpath("buglist_changesets.json").open("r") as fp:
+ changeset_data = json.load(fp)
+ bugs, backouts = get_bugs_in_changeset(changeset_data)
+
+ assert bugs == {
+ u"1356563",
+ u"1348409",
+ u"1341190",
+ u"1360626",
+ u"1332731",
+ u"1328762",
+ u"1355870",
+ u"1358089",
+ u"1354911",
+ u"1354038",
+ }
+ assert backouts == {u"1337861", u"1320072"}
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozrelease/test/test_update_verify.py b/python/mozrelease/test/test_update_verify.py
new file mode 100644
index 0000000000..6f1cb197f7
--- /dev/null
+++ b/python/mozrelease/test/test_update_verify.py
@@ -0,0 +1,425 @@
+# -*- coding: utf-8 -*-
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import unittest
+from pathlib import Path
+from tempfile import mkstemp
+
+import mozunit
+
+from mozrelease.update_verify import UpdateVerifyConfig, UpdateVerifyError
+
+DATA_PATH = Path(__file__).with_name("data")
+
+
+class TestUpdateVerifyConfig(unittest.TestCase):
+ config = str(DATA_PATH.joinpath("sample-update-verify.cfg"))
+
+ def setUp(self):
+ self.uvc = UpdateVerifyConfig()
+ fd, self.tmpfilename = mkstemp()
+ self.tmpfile = os.fdopen(fd, "wb")
+
+ def tearDown(self):
+ self.tmpfile.close()
+ os.unlink(self.tmpfilename)
+
+ def testEq(self):
+ self.uvc.product = "foo"
+ self.uvc.channel = "betatest"
+ self.uvc.aus_server = "aus"
+ self.uvc.ftp_server_from = "ftp"
+ self.uvc.ftp_server_to = "ftp"
+ self.uvc.to = "/firefox/4.0rc2.tar.bz2"
+ self.uvc.mar_channel_IDs = "baz"
+ self.uvc.to_build_id = "999"
+ self.uvc.to_display_version = "99.0 Zeta 9"
+ self.uvc.to_app_version = "99.0"
+ uvc2 = UpdateVerifyConfig()
+ uvc2.product = "foo"
+ uvc2.channel = "betatest"
+ uvc2.aus_server = "aus"
+ uvc2.ftp_server_form = "ftp"
+ uvc2.ftp_server_to = "ftp"
+ uvc2.to = "/firefox/4.0rc2.tar.bz2"
+ uvc2.mar_channel_IDs = "baz"
+ uvc2.to_build_id = "999"
+ uvc2.to_display_version = "99.0 Zeta 9"
+ uvc2.to_app_version = "99.0"
+ self.assertEqual(self.uvc, uvc2)
+
+ def testNe(self):
+ self.uvc.product = "foo"
+ uvc2 = UpdateVerifyConfig()
+ # assertNotEqual doesn't test the __ne__ function, so we do this
+ self.assertTrue(self.uvc != uvc2)
+
+ def testAddRelease(self):
+ releases = [
+ {
+ "release": "4.0",
+ "platform": "bar",
+ "build_id": 555,
+ "locales": ["af", "de"],
+ "patch_types": ["partial", "complete"],
+ "from": "/pub/firefox/foo.bz2",
+ "ftp_server_from": "from",
+ "ftp_server_to": "to",
+ "mar_channel_IDs": "firefox-mozilla-booyah",
+ "updater_package": None,
+ }
+ ]
+ self.uvc.addRelease(
+ "4.0",
+ build_id=555,
+ locales=["af", "de"],
+ patch_types=["partial", "complete"],
+ from_path="/pub/firefox/foo.bz2",
+ ftp_server_from="from",
+ ftp_server_to="to",
+ mar_channel_IDs="firefox-mozilla-booyah",
+ platform="bar",
+ )
+ self.assertEqual(self.uvc.releases, releases)
+
+ def testAddReleasesWithDifferentPlatforms(self):
+ releases = [
+ {
+ "release": "4.0",
+ "platform": "WINNT_x86-msvc",
+ "build_id": 555,
+ "locales": ["af", "de"],
+ "patch_types": ["partial", "complete"],
+ "from": "/pub/firefox/foo.bz2",
+ "ftp_server_from": "from",
+ "ftp_server_to": "to",
+ "mar_channel_IDs": "firefox-mozilla-booyah",
+ "updater_package": None,
+ },
+ {
+ "release": "5.0",
+ "platform": "WINNT_x86-msvc-x86",
+ "build_id": 666,
+ "locales": ["af", "de"],
+ "patch_types": ["partial", "complete"],
+ "from": "/pub/firefox/foo2.bz2",
+ "ftp_server_from": "from",
+ "ftp_server_to": "to",
+ "mar_channel_IDs": "firefox-mozilla-booyah",
+ "updater_package": None,
+ },
+ ]
+ self.uvc.addRelease(
+ "4.0",
+ build_id=555,
+ locales=["af", "de"],
+ patch_types=["partial", "complete"],
+ from_path="/pub/firefox/foo.bz2",
+ ftp_server_from="from",
+ ftp_server_to="to",
+ mar_channel_IDs="firefox-mozilla-booyah",
+ platform="WINNT_x86-msvc",
+ )
+ self.uvc.addRelease(
+ "5.0",
+ build_id=666,
+ locales=["af", "de"],
+ patch_types=["partial", "complete"],
+ from_path="/pub/firefox/foo2.bz2",
+ ftp_server_from="from",
+ ftp_server_to="to",
+ mar_channel_IDs="firefox-mozilla-booyah",
+ platform="WINNT_x86-msvc-x86",
+ )
+ self.assertEqual(self.uvc.releases, releases)
+
+ def testRead(self):
+ ftp_server_from = "stage.mozilla.org/firefox"
+ ftp_server_to = "stage.mozilla.org/firefox"
+ uvc2 = UpdateVerifyConfig()
+ uvc2.product = "Firefox"
+ uvc2.channel = "betatest"
+ uvc2.aus_server = "https://aus4.mozilla.org"
+ uvc2.to = "/firefox/4.0rc2.tar.bz2"
+ uvc2.to_build_id = "999"
+ uvc2.to_display_version = "99.0 Zeta 9"
+ uvc2.to_app_version = "99.0"
+ uvc2.addRelease(
+ "4.0",
+ build_id="888",
+ platform="Linux_x86-gcc3",
+ locales=["af", "de", "en-US", "ja", "zh-TW"],
+ patch_types=["partial", "complete"],
+ from_path="/firefox/4.0rc1.tar.bz2",
+ ftp_server_from=ftp_server_from,
+ ftp_server_to=ftp_server_to,
+ mar_channel_IDs="firefox-mozilla-beta",
+ )
+ uvc2.addRelease(
+ "4.0b12",
+ build_id="777",
+ platform="Linux_x86-gcc3",
+ locales=["af", "en-US"],
+ from_path="/firefox/4.0b12.tar.bz2",
+ ftp_server_from=ftp_server_from,
+ ftp_server_to=ftp_server_to,
+ )
+ uvc2.addRelease(
+ "4.0b12",
+ build_id="777",
+ platform="Linux_x86-gcc3",
+ locales=["de", "ja", "zh-TW"],
+ ftp_server_from=ftp_server_from,
+ ftp_server_to=ftp_server_to,
+ )
+ uvc2.addRelease(
+ "3.7a1",
+ build_id="666",
+ locales=["en-US"],
+ ftp_server_from=ftp_server_from,
+ ftp_server_to=ftp_server_to,
+ platform="Linux_x86-gcc3",
+ )
+
+ self.uvc.read(self.config)
+ self.assertEqual(self.uvc, uvc2)
+
+ def testWrite(self):
+ ftp_server_from = "stage.mozilla.org/firefox"
+ ftp_server_to = "stage.mozilla.org/firefox"
+ self.uvc.product = "Firefox"
+ self.uvc.channel = "betatest"
+ self.uvc.aus_server = "https://aus4.mozilla.org"
+ self.uvc.to = "/firefox/4.0rc2.tar.bz2"
+ self.uvc.to_build_id = "999"
+ self.uvc.to_display_version = "99.0 Zeta 9"
+ self.uvc.to_app_version = "99.0"
+ self.uvc.addRelease(
+ "4.0",
+ build_id="888",
+ platform="Linux_x86-gcc3",
+ locales=("af", "de", "en-US", "ja", "zh-TW"),
+ patch_types=("partial", "complete"),
+ from_path="/firefox/4.0rc1.tar.bz2",
+ ftp_server_from=ftp_server_from,
+ ftp_server_to=ftp_server_to,
+ mar_channel_IDs="firefox-mozilla-beta",
+ )
+ self.uvc.addRelease(
+ "4.0b12",
+ build_id="777",
+ platform="Linux_x86-gcc3",
+ locales=["af", "en-US"],
+ from_path="/firefox/4.0b12.tar.bz2",
+ ftp_server_from=ftp_server_from,
+ ftp_server_to=ftp_server_to,
+ )
+ self.uvc.addRelease(
+ "4.0b12",
+ build_id="777",
+ platform="Linux_x86-gcc3",
+ locales=("de", "ja", "zh-TW"),
+ ftp_server_from=ftp_server_from,
+ ftp_server_to=ftp_server_to,
+ )
+ self.uvc.addRelease(
+ "3.7a1",
+ build_id="666",
+ locales=("en-US",),
+ ftp_server_from=ftp_server_from,
+ ftp_server_to=ftp_server_to,
+ platform="Linux_x86-gcc3",
+ )
+
+ self.uvc.write(self.tmpfile)
+ self.tmpfile.close()
+ self.assertEqual(open(self.config).read(), open(self.tmpfilename).read())
+
+ def testReadInvalidKey(self):
+ invalidLine = 'foo="bar"'
+ self.assertRaises(UpdateVerifyError, self.uvc._parseLine, invalidLine)
+
+ def testReadDuplicateKey(self):
+ invalidLine = 'release="bar" release="blah"'
+ self.assertRaises(UpdateVerifyError, self.uvc._parseLine, invalidLine)
+
+ def testParseLineBad(self):
+ invalidLine = "abh nthntuehonhuh nhhueont hntueoh nthouo"
+ self.assertRaises(UpdateVerifyError, self.uvc._parseLine, invalidLine)
+
+ def testGetChunk(self):
+ ftp_server_from = "stage.mozilla.org/firefox"
+ ftp_server_to = "stage.mozilla.org/firefox"
+ self.uvc.read(self.config)
+ uvc2 = UpdateVerifyConfig()
+ uvc2.product = "Firefox"
+ uvc2.channel = "betatest"
+ uvc2.aus_server = "https://aus4.mozilla.org"
+ uvc2.to = "/firefox/4.0rc2.tar.bz2"
+ uvc2.to_build_id = "999"
+ uvc2.to_display_version = "99.0 Zeta 9"
+ uvc2.to_app_version = "99.0"
+ uvc2.addRelease(
+ "4.0",
+ build_id="888",
+ platform="Linux_x86-gcc3",
+ locales=["af", "de", "en-US"],
+ patch_types=["partial", "complete"],
+ from_path="/firefox/4.0rc1.tar.bz2",
+ ftp_server_from=ftp_server_from,
+ ftp_server_to=ftp_server_to,
+ mar_channel_IDs="firefox-mozilla-beta",
+ )
+ uvc2.addRelease(
+ "4.0b12",
+ build_id="777",
+ platform="Linux_x86-gcc3",
+ locales=["de", "ja"],
+ patch_types=["complete"],
+ ftp_server_from=ftp_server_from,
+ ftp_server_to=ftp_server_to,
+ from_path=None,
+ )
+ chunkedConfig = self.uvc.getChunk(chunks=3, thisChunk=1)
+ self.assertEqual(chunkedConfig, uvc2)
+
+ def testGetChunkWithPathWithSpaces(self):
+ self.uvc.product = "Firefox"
+ self.uvc.channel = "betatest"
+ self.uvc.aus_server = "https://aus4.mozilla.org"
+ self.uvc.ftp_server_from = "stage.mozilla.org/firefox"
+ self.uvc.ftp_server_to = "stage.mozilla.org/firefox"
+ self.uvc.to = "/firefox/Firefox 4.0 Beta 2.exe"
+ self.uvc.to_build_id = "999"
+ self.uvc.to_display_version = "99.0 Zeta 9"
+ self.uvc.to_app_version = "99.0"
+ self.uvc.addRelease(
+ "4.0b1",
+ build_id="222",
+ platform="Linux_x86-gcc3",
+ locales=["en-US", "ja", "zh-TW"],
+ patch_types=["complete"],
+ from_path="/firefox/Firefox 4.0 Beta 1.exe",
+ )
+ uvc2 = UpdateVerifyConfig()
+ uvc2.product = "Firefox"
+ uvc2.channel = "betatest"
+ uvc2.aus_server = "https://aus4.mozilla.org"
+ uvc2.ftp_server_from = "stage.mozilla.org/firefox"
+ uvc2.ftp_server_to = "stage.mozilla.org/firefox"
+ uvc2.to = "/firefox/Firefox 4.0 Beta 2.exe"
+ uvc2.to_build_id = "999"
+ uvc2.to_display_version = "99.0 Zeta 9"
+ uvc2.to_app_version = "99.0"
+ uvc2.addRelease(
+ "4.0b1",
+ build_id="222",
+ platform="Linux_x86-gcc3",
+ locales=["en-US", "ja"],
+ patch_types=["complete"],
+ from_path="/firefox/Firefox 4.0 Beta 1.exe",
+ )
+ chunkedConfig = self.uvc.getChunk(chunks=2, thisChunk=1)
+ self.assertEqual(chunkedConfig, uvc2)
+
+ def testAddLocaleToRelease(self):
+ from_path = "/firefox/4.0rc1.tar.bz2"
+ self.uvc.read(self.config)
+ self.uvc.addLocaleToRelease("888", "he", from_path)
+ self.assertEqual(
+ self.uvc.getRelease("888", from_path)["locales"],
+ ["af", "de", "en-US", "he", "ja", "zh-TW"],
+ )
+
+ def testAddLocaleToReleaseMultipleBuildIDs(self):
+ from_path = None
+ self.uvc.read(self.config)
+ self.uvc.addLocaleToRelease("777", "he", from_path)
+ self.assertEqual(
+ self.uvc.getRelease("777", from_path)["locales"],
+ ["de", "he", "ja", "zh-TW"],
+ )
+
+ def testAddLocaleToNonexistentRelease(self):
+ self.uvc.read(self.config)
+ self.assertRaises(UpdateVerifyError, self.uvc.addLocaleToRelease, "123", "he")
+
+ def testGetReleaseNonexistenceRelease(self):
+ self.uvc.read(self.config)
+ self.assertEqual(self.uvc.getRelease("123", None), {})
+
+ def testGetFullReleaseTests(self):
+ ftp_server_from = "stage.mozilla.org/firefox"
+ ftp_server_to = "stage.mozilla.org/firefox"
+ self.uvc.read(self.config)
+ uvc2 = UpdateVerifyConfig()
+ uvc2.product = "Firefox"
+ uvc2.channel = "betatest"
+ uvc2.aus_server = "https://aus4.mozilla.org"
+ uvc2.to = "/firefox/4.0rc2.tar.bz2"
+ uvc2.to_build_id = "999"
+ uvc2.to_display_version = "99.0 Zeta 9"
+ uvc2.to_app_version = "99.0"
+ uvc2.addRelease(
+ "4.0",
+ build_id="888",
+ platform="Linux_x86-gcc3",
+ locales=["af", "de", "en-US", "ja", "zh-TW"],
+ patch_types=["partial", "complete"],
+ from_path="/firefox/4.0rc1.tar.bz2",
+ ftp_server_from=ftp_server_from,
+ ftp_server_to=ftp_server_to,
+ mar_channel_IDs="firefox-mozilla-beta",
+ )
+ uvc2.addRelease(
+ "4.0b12",
+ build_id="777",
+ platform="Linux_x86-gcc3",
+ locales=["af", "en-US"],
+ patch_types=["complete"],
+ from_path="/firefox/4.0b12.tar.bz2",
+ ftp_server_from=ftp_server_from,
+ ftp_server_to=ftp_server_to,
+ )
+ self.assertEqual(self.uvc.getFullReleaseTests(), uvc2.releases)
+
+ def testGetQuickReleaseTests(self):
+ ftp_server_from = "stage.mozilla.org/firefox"
+ ftp_server_to = "stage.mozilla.org/firefox"
+ self.uvc.read(self.config)
+ uvc2 = UpdateVerifyConfig()
+ uvc2.product = "Firefox"
+ uvc2.channel = "betatest"
+ uvc2.aus_server = "https://aus4.mozilla.org"
+ uvc2.to = "/firefox/4.0rc2.tar.bz2"
+ uvc2.to_build_id = "999"
+ uvc2.to_display_version = "99.0 Zeta 9"
+ uvc2.to_app_version = "99.0"
+ uvc2.addRelease(
+ "4.0b12",
+ build_id="777",
+ platform="Linux_x86-gcc3",
+ locales=["de", "ja", "zh-TW"],
+ patch_types=["complete"],
+ from_path=None,
+ ftp_server_from=ftp_server_from,
+ ftp_server_to=ftp_server_to,
+ )
+ uvc2.addRelease(
+ "3.7a1",
+ build_id="666",
+ platform="Linux_x86-gcc3",
+ locales=["en-US"],
+ patch_types=["complete"],
+ from_path=None,
+ ftp_server_from=ftp_server_from,
+ ftp_server_to=ftp_server_to,
+ )
+ self.assertEqual(self.uvc.getQuickReleaseTests(), uvc2.releases)
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozrelease/test/test_versions.py b/python/mozrelease/test/test_versions.py
new file mode 100644
index 0000000000..f3bca91f1f
--- /dev/null
+++ b/python/mozrelease/test/test_versions.py
@@ -0,0 +1,101 @@
+import mozunit
+import pytest
+
+from mozrelease.versions import MozillaVersion
+
+ALL_VERSIONS = [ # Keep this sorted
+ "3.0",
+ "3.0.1",
+ "3.0.2",
+ "3.0.3",
+ "3.0.4",
+ "3.0.5",
+ "3.0.6",
+ "3.0.7",
+ "3.0.8",
+ "3.0.9",
+ "3.0.10",
+ "3.0.11",
+ "3.0.12",
+ "3.0.13",
+ "3.0.14",
+ "3.0.15",
+ "3.0.16",
+ "3.0.17",
+ "3.0.18",
+ "3.0.19",
+ "3.1b1",
+ "3.1b2",
+ "3.1b3",
+ "3.5b4",
+ "3.5b99",
+ "3.5rc1",
+ "3.5rc2",
+ "3.5rc3",
+ "3.5",
+ "3.5.1",
+ "3.5.2",
+ "3.5.3",
+ "3.5.4",
+ "3.5.5",
+ "3.5.6",
+ "3.5.7",
+ "3.5.8",
+ "3.5.9",
+ "3.5.10",
+ # ... Start skipping around...
+ "4.0b9",
+ "10.0.2esr",
+ "10.0.3esr",
+ "32.0",
+ "49.0a1",
+ "49.0a2",
+ "59.0",
+ "60.0",
+ "60.0esr",
+ "60.0.1esr",
+ "60.1",
+ "60.1esr",
+ "61.0",
+]
+
+
+@pytest.fixture(
+ scope="function",
+ params=range(len(ALL_VERSIONS) - 1),
+ ids=lambda x: "{}, {}".format(ALL_VERSIONS[x], ALL_VERSIONS[x + 1]),
+)
+def comparable_versions(request):
+ index = request.param
+ return ALL_VERSIONS[index], ALL_VERSIONS[index + 1]
+
+
+@pytest.mark.parametrize("version", ALL_VERSIONS)
+def test_versions_parseable(version):
+ """Test that we can parse previously shipped versions.
+
+ We only test 3.0 and up, since we never generate updates against
+ versions that old."""
+ assert MozillaVersion(version) is not None
+
+
+def test_versions_compare_less(comparable_versions):
+ """Test that versions properly compare in order."""
+ smaller_version, larger_version = comparable_versions
+ assert MozillaVersion(smaller_version) < MozillaVersion(larger_version)
+
+
+def test_versions_compare_greater(comparable_versions):
+ """Test that versions properly compare in order."""
+ smaller_version, larger_version = comparable_versions
+ assert MozillaVersion(larger_version) > MozillaVersion(smaller_version)
+
+
+@pytest.mark.parametrize("version", ALL_VERSIONS)
+def test_versions_compare_equal(version):
+ """Test that versions properly compare as equal through multiple passes."""
+ assert MozillaVersion(version) == MozillaVersion(version)
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozterm/.ruff.toml b/python/mozterm/.ruff.toml
new file mode 100644
index 0000000000..b3d3eaace9
--- /dev/null
+++ b/python/mozterm/.ruff.toml
@@ -0,0 +1,4 @@
+extend = "../../pyproject.toml"
+
+[isort]
+known-first-party = ["mozterm"]
diff --git a/python/mozterm/mozterm/__init__.py b/python/mozterm/mozterm/__init__.py
new file mode 100644
index 0000000000..ff15e588ff
--- /dev/null
+++ b/python/mozterm/mozterm/__init__.py
@@ -0,0 +1,4 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+from .terminal import NullTerminal, Terminal # noqa
diff --git a/python/mozterm/mozterm/terminal.py b/python/mozterm/mozterm/terminal.py
new file mode 100644
index 0000000000..f82daa67fd
--- /dev/null
+++ b/python/mozterm/mozterm/terminal.py
@@ -0,0 +1,50 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import sys
+
+import six
+
+
+class NullTerminal(object):
+ """Replacement for `blessed.Terminal()` that does no formatting."""
+
+ number_of_colors = 0
+ width = 0
+ height = 0
+
+ def __init__(self, stream=None, **kwargs):
+ self.stream = stream or sys.__stdout__
+ try:
+ self.is_a_tty = os.isatty(self.stream.fileno())
+ except Exception:
+ self.is_a_tty = False
+
+ class NullCallableString(six.text_type):
+ """A dummy callable Unicode stolen from blessings"""
+
+ def __new__(cls):
+ new = six.text_type.__new__(cls, "")
+ return new
+
+ def __call__(self, *args):
+ if len(args) != 1 or isinstance(args[0], int):
+ return ""
+ return args[0]
+
+ def __getattr__(self, attr):
+ return self.NullCallableString()
+
+
+def Terminal(raises=False, disable_styling=False, **kwargs):
+ if disable_styling:
+ return NullTerminal(**kwargs)
+ try:
+ import blessed
+ except Exception:
+ if raises:
+ raise
+ return NullTerminal(**kwargs)
+ return blessed.Terminal(**kwargs)
diff --git a/python/mozterm/mozterm/widgets.py b/python/mozterm/mozterm/widgets.py
new file mode 100644
index 0000000000..2cf5bf250c
--- /dev/null
+++ b/python/mozterm/mozterm/widgets.py
@@ -0,0 +1,67 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from .terminal import Terminal
+
+DEFAULT = "\x1b(B\x1b[m"
+
+
+class BaseWidget(object):
+ def __init__(self, terminal=None):
+ self.term = terminal or Terminal()
+ self.stream = self.term.stream
+
+
+class Footer(BaseWidget):
+ """Handles display of a footer in a terminal."""
+
+ def clear(self):
+ """Removes the footer from the current terminal."""
+ self.stream.write(self.term.move_x(0))
+ self.stream.write(self.term.clear_eol())
+
+ def write(self, parts):
+ """Write some output in the footer, accounting for terminal width.
+
+ parts is a list of 2-tuples of (encoding_function, input).
+ None means no encoding."""
+
+ # We don't want to write more characters than the current width of the
+ # terminal otherwise wrapping may result in weird behavior. We can't
+ # simply truncate the line at terminal width characters because a)
+ # non-viewable escape characters count towards the limit and b) we
+ # don't want to truncate in the middle of an escape sequence because
+ # subsequent output would inherit the escape sequence.
+ max_width = self.term.width
+ written = 0
+ write_pieces = []
+ for part in parts:
+ try:
+ func, part = part
+ attribute = getattr(self.term, func)
+ # In Blessed, these attributes aren't always callable
+ if callable(attribute):
+ encoded = attribute(part)
+ else:
+ # If it's not callable, assume it's just the raw
+ # ANSI Escape Sequence and prepend it ourselves.
+ # Append DEFAULT to stop text that comes afterwards
+ # from inheriting the formatting we prepended.
+ encoded = attribute + part + DEFAULT
+ except ValueError:
+ encoded = part
+
+ len_part = len(part)
+ len_spaces = len(write_pieces)
+ if written + len_part + len_spaces > max_width:
+ write_pieces.append(part[0 : max_width - written - len_spaces])
+ written += len_part
+ break
+
+ write_pieces.append(encoded)
+ written += len_part
+
+ with self.term.location():
+ self.term.move(self.term.height - 1, 0)
+ self.stream.write(" ".join(write_pieces))
diff --git a/python/mozterm/setup.cfg b/python/mozterm/setup.cfg
new file mode 100644
index 0000000000..3c6e79cf31
--- /dev/null
+++ b/python/mozterm/setup.cfg
@@ -0,0 +1,2 @@
+[bdist_wheel]
+universal=1
diff --git a/python/mozterm/setup.py b/python/mozterm/setup.py
new file mode 100644
index 0000000000..270e87077c
--- /dev/null
+++ b/python/mozterm/setup.py
@@ -0,0 +1,30 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from setuptools import setup
+
+VERSION = "1.0.0"
+DEPS = ["six >= 1.13.0"]
+
+setup(
+ name="mozterm",
+ description="Terminal abstractions built around the blessed module.",
+ license="MPL 2.0",
+ author="Andrew Halberstadt",
+ author_email="ahalberstadt@mozilla.com",
+ url="",
+ packages=["mozterm"],
+ version=VERSION,
+ classifiers=[
+ "Environment :: Console",
+ "Development Status :: 3 - Alpha",
+ "License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)",
+ "Natural Language :: English",
+ "Programming Language :: Python :: 2",
+ "Programming Language :: Python :: 2.7",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.5",
+ ],
+ install_requires=DEPS,
+)
diff --git a/python/mozterm/test/python.ini b/python/mozterm/test/python.ini
new file mode 100644
index 0000000000..948628929f
--- /dev/null
+++ b/python/mozterm/test/python.ini
@@ -0,0 +1,5 @@
+[DEFAULT]
+subsuite = mozterm
+
+[test_terminal.py]
+[test_widgets.py]
diff --git a/python/mozterm/test/test_terminal.py b/python/mozterm/test/test_terminal.py
new file mode 100644
index 0000000000..a24dd01ba4
--- /dev/null
+++ b/python/mozterm/test/test_terminal.py
@@ -0,0 +1,35 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import sys
+
+import mozunit
+import pytest
+
+from mozterm import NullTerminal, Terminal
+
+
+def test_terminal():
+ blessed = pytest.importorskip("blessed")
+ term = Terminal()
+ assert isinstance(term, blessed.Terminal)
+
+ term = Terminal(disable_styling=True)
+ assert isinstance(term, NullTerminal)
+
+
+def test_null_terminal():
+ term = NullTerminal()
+ assert term.red("foo") == "foo"
+ assert term.red == ""
+ assert term.color(1) == ""
+ assert term.number_of_colors == 0
+ assert term.width == 0
+ assert term.height == 0
+ assert term.is_a_tty == os.isatty(sys.stdout.fileno())
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozterm/test/test_widgets.py b/python/mozterm/test/test_widgets.py
new file mode 100644
index 0000000000..d6eb241b94
--- /dev/null
+++ b/python/mozterm/test/test_widgets.py
@@ -0,0 +1,51 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import sys
+from io import StringIO
+
+import mozunit
+import pytest
+
+from mozterm import Terminal
+from mozterm.widgets import Footer
+
+
+@pytest.fixture
+def terminal():
+ blessed = pytest.importorskip("blessed")
+
+ kind = "xterm-256color"
+ try:
+ term = Terminal(stream=StringIO(), force_styling=True, kind=kind)
+ except blessed.curses.error:
+ pytest.skip("terminal '{}' not found".format(kind))
+
+ return term
+
+
+@pytest.mark.skipif(
+ not sys.platform.startswith("win"),
+ reason="Only do ANSI Escape Sequence comparisons on Windows.",
+)
+def test_footer(terminal):
+ footer = Footer(terminal=terminal)
+ footer.write(
+ [
+ ("bright_black", "foo"),
+ ("green", "bar"),
+ ]
+ )
+ value = terminal.stream.getvalue()
+ expected = "\x1b7\x1b[90mfoo\x1b(B\x1b[m \x1b[32mbar\x1b(B\x1b[m\x1b8"
+ assert value == expected
+
+ footer.clear()
+ value = terminal.stream.getvalue()[len(value) :]
+ expected = "\x1b[1G\x1b[K"
+ assert value == expected
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozversioncontrol/.ruff.toml b/python/mozversioncontrol/.ruff.toml
new file mode 100644
index 0000000000..41f57bb0e6
--- /dev/null
+++ b/python/mozversioncontrol/.ruff.toml
@@ -0,0 +1,4 @@
+extend = "../../pyproject.toml"
+
+[isort]
+known-first-party = ["mozversioncontrol"]
diff --git a/python/mozversioncontrol/mozversioncontrol/__init__.py b/python/mozversioncontrol/mozversioncontrol/__init__.py
new file mode 100644
index 0000000000..36d10c05b5
--- /dev/null
+++ b/python/mozversioncontrol/mozversioncontrol/__init__.py
@@ -0,0 +1,946 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this,
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import abc
+import errno
+import os
+import re
+import shutil
+import subprocess
+from pathlib import Path
+from typing import Optional, Union
+
+from mach.util import to_optional_path
+from mozfile import which
+from mozpack.files import FileListFinder
+
+
+class MissingVCSTool(Exception):
+ """Represents a failure to find a version control tool binary."""
+
+
+class MissingVCSInfo(Exception):
+ """Represents a general failure to resolve a VCS interface."""
+
+
+class MissingConfigureInfo(MissingVCSInfo):
+ """Represents error finding VCS info from configure data."""
+
+
+class MissingVCSExtension(MissingVCSInfo):
+ """Represents error finding a required VCS extension."""
+
+ def __init__(self, ext):
+ self.ext = ext
+ msg = "Could not detect required extension '{}'".format(self.ext)
+ super(MissingVCSExtension, self).__init__(msg)
+
+
+class InvalidRepoPath(Exception):
+ """Represents a failure to find a VCS repo at a specified path."""
+
+
+class MissingUpstreamRepo(Exception):
+ """Represents a failure to automatically detect an upstream repo."""
+
+
+class CannotDeleteFromRootOfRepositoryException(Exception):
+ """Represents that the code attempted to delete all files from the root of
+ the repository, which is not permitted."""
+
+
+def get_tool_path(tool: Optional[Union[str, Path]] = None):
+ tool = Path(tool)
+ """Obtain the path of `tool`."""
+ if tool.is_absolute() and tool.exists():
+ return str(tool)
+
+ path = to_optional_path(which(str(tool)))
+ if not path:
+ raise MissingVCSTool(
+ f"Unable to obtain {tool} path. Try running "
+ "|mach bootstrap| to ensure your environment is up to "
+ "date."
+ )
+ return str(path)
+
+
+class Repository(object):
+ """A class wrapping utility methods around version control repositories.
+
+ This class is abstract and never instantiated. Obtain an instance by
+ calling a ``get_repository_*()`` helper function.
+
+ Clients are recommended to use the object as a context manager. But not
+ all methods require this.
+ """
+
+ __metaclass__ = abc.ABCMeta
+
+ def __init__(self, path: Path, tool: Optional[str] = None):
+ self.path = str(path.resolve())
+ self._tool = Path(get_tool_path(tool)) if tool else None
+ self._version = None
+ self._valid_diff_filter = ("m", "a", "d")
+ self._env = os.environ.copy()
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, exc_tb):
+ pass
+
+ def _run(self, *args, **runargs):
+ return_codes = runargs.get("return_codes", [])
+
+ cmd = (str(self._tool),) + args
+ # Check if we have a tool, either hg or git. If this is a
+ # source release we return src, then we dont have a tool to use.
+ # This caused jstests to fail before fixing, because it uses a
+ # packaged mozjs release source
+ if not self._tool:
+ return "src"
+ else:
+ try:
+ return subprocess.check_output(
+ cmd, cwd=self.path, env=self._env, universal_newlines=True
+ )
+ except subprocess.CalledProcessError as e:
+ if e.returncode in return_codes:
+ return ""
+ raise
+
+ @property
+ def tool_version(self):
+ """Return the version of the VCS tool in use as a string."""
+ if self._version:
+ return self._version
+ info = self._run("--version").strip()
+ match = re.search("version ([^\+\)]+)", info)
+ if not match:
+ raise Exception("Unable to identify tool version.")
+
+ self.version = match.group(1)
+ return self.version
+
+ @property
+ def has_git_cinnabar(self):
+ """True if the repository is using git cinnabar."""
+ return False
+
+ @abc.abstractproperty
+ def name(self):
+ """Name of the tool."""
+
+ @abc.abstractproperty
+ def head_ref(self):
+ """Hash of HEAD revision."""
+
+ @abc.abstractproperty
+ def base_ref(self):
+ """Hash of revision the current topic branch is based on."""
+
+ @abc.abstractmethod
+ def base_ref_as_hg(self):
+ """Mercurial hash of revision the current topic branch is based on.
+
+ Return None if the hg hash of the base ref could not be calculated.
+ """
+
+ @abc.abstractproperty
+ def branch(self):
+ """Current branch or bookmark the checkout has active."""
+
+ @abc.abstractmethod
+ def get_commit_time(self):
+ """Return the Unix time of the HEAD revision."""
+
+ @abc.abstractmethod
+ def sparse_checkout_present(self):
+ """Whether the working directory is using a sparse checkout.
+
+ A sparse checkout is defined as a working directory that only
+ materializes a subset of files in a given revision.
+
+ Returns a bool.
+ """
+
+ @abc.abstractmethod
+ def get_user_email(self):
+ """Return the user's email address.
+
+ If no email is configured, then None is returned.
+ """
+
+ @abc.abstractmethod
+ def get_changed_files(self, diff_filter, mode="unstaged", rev=None):
+ """Return a list of files that are changed in this repository's
+ working copy.
+
+ ``diff_filter`` controls which kinds of modifications are returned.
+ It is a string which may only contain the following characters:
+
+ A - Include files that were added
+ D - Include files that were deleted
+ M - Include files that were modified
+
+ By default, all three will be included.
+
+ ``mode`` can be one of 'unstaged', 'staged' or 'all'. Only has an
+ effect on git. Defaults to 'unstaged'.
+
+ ``rev`` is a specifier for which changesets to consider for
+ changes. The exact meaning depends on the vcs system being used.
+ """
+
+ @abc.abstractmethod
+ def get_outgoing_files(self, diff_filter, upstream):
+ """Return a list of changed files compared to upstream.
+
+ ``diff_filter`` works the same as `get_changed_files`.
+ ``upstream`` is a remote ref to compare against. If unspecified,
+ this will be determined automatically. If there is no remote ref,
+ a MissingUpstreamRepo exception will be raised.
+ """
+
+ @abc.abstractmethod
+ def add_remove_files(self, *paths: Union[str, Path]):
+ """Add and remove files under `paths` in this repository's working copy."""
+
+ @abc.abstractmethod
+ def forget_add_remove_files(self, *paths: Union[str, Path]):
+ """Undo the effects of a previous add_remove_files call for `paths`."""
+
+ @abc.abstractmethod
+ def get_tracked_files_finder(self, path=None):
+ """Obtain a mozpack.files.BaseFinder of managed files in the working
+ directory.
+
+ The Finder will have its list of all files in the repo cached for its
+ entire lifetime, so operations on the Finder will not track with, for
+ example, commits to the repo during the Finder's lifetime.
+ """
+
+ @abc.abstractmethod
+ def get_ignored_files_finder(self):
+ """Obtain a mozpack.files.BaseFinder of ignored files in the working
+ directory.
+
+ The Finder will have its list of all files in the repo cached for its
+ entire lifetime, so operations on the Finder will not track with, for
+ example, changes to the repo during the Finder's lifetime.
+ """
+
+ @abc.abstractmethod
+ def working_directory_clean(self, untracked=False, ignored=False):
+ """Determine if the working directory is free of modifications.
+
+ Returns True if the working directory does not have any file
+ modifications. False otherwise.
+
+ By default, untracked and ignored files are not considered. If
+ ``untracked`` or ``ignored`` are set, they influence the clean check
+ to factor these file classes into consideration.
+ """
+
+ @abc.abstractmethod
+ def clean_directory(self, path: Union[str, Path]):
+ """Undo all changes (including removing new untracked files) in the
+ given `path`.
+ """
+
+ @abc.abstractmethod
+ def push_to_try(self, message, allow_log_capture=False):
+ """Create a temporary commit, push it to try and clean it up
+ afterwards.
+
+ With mercurial, MissingVCSExtension will be raised if the `push-to-try`
+ extension is not installed. On git, MissingVCSExtension will be raised
+ if git cinnabar is not present.
+
+ If `allow_log_capture` is set to `True`, then the push-to-try will be run using
+ Popen instead of check_call so that the logs can be captured elsewhere.
+ """
+
+ @abc.abstractmethod
+ def update(self, ref):
+ """Update the working directory to the specified reference."""
+
+ def commit(self, message, author=None, date=None, paths=None):
+ """Create a commit using the provided commit message. The author, date,
+ and files/paths to be included may also be optionally provided. The
+ message, author and date arguments must be strings, and are passed as-is
+ to the commit command. Multiline commit messages are supported. The
+ paths argument must be None or an array of strings that represents the
+ set of files and folders to include in the commit.
+ """
+ args = ["commit", "-m", message]
+ if author is not None:
+ if isinstance(self, HgRepository):
+ args = args + ["--user", author]
+ elif isinstance(self, GitRepository):
+ args = args + ["--author", author]
+ else:
+ raise MissingVCSInfo("Unknown repo type")
+ if date is not None:
+ args = args + ["--date", date]
+ if paths is not None:
+ args = args + paths
+ self._run(*args)
+
+ def _push_to_try_with_log_capture(self, cmd, subprocess_opts):
+ """Push to try but with the ability for the user to capture logs.
+
+ We need to use Popen for this because neither the run method nor
+ check_call will allow us to reasonably catch the logs. With check_call,
+ hg hangs, and with the run method, the logs are output too slowly
+ so you're left wondering if it's working (prime candidate for
+ corrupting local repos).
+ """
+ process = subprocess.Popen(cmd, **subprocess_opts)
+
+ # Print out the lines as they appear so they can be
+ # parsed for information
+ for line in process.stdout or []:
+ print(line)
+ process.stdout.close()
+ process.wait()
+
+ if process.returncode != 0:
+ for line in process.stderr or []:
+ print(line)
+ raise subprocess.CalledProcessError("Failed to push-to-try")
+
+
+class HgRepository(Repository):
+ """An implementation of `Repository` for Mercurial repositories."""
+
+ def __init__(self, path: Path, hg="hg"):
+ import hglib.client
+
+ super(HgRepository, self).__init__(path, tool=hg)
+ self._env["HGPLAIN"] = "1"
+
+ # Setting this modifies a global variable and makes all future hglib
+ # instances use this binary. Since the tool path was validated, this
+ # should be OK. But ideally hglib would offer an API that defines
+ # per-instance binaries.
+ hglib.HGPATH = str(self._tool)
+
+ # Without connect=False this spawns a persistent process. We want
+ # the process lifetime tied to a context manager.
+ self._client = hglib.client.hgclient(
+ self.path, encoding="UTF-8", configs=None, connect=False
+ )
+
+ @property
+ def name(self):
+ return "hg"
+
+ @property
+ def head_ref(self):
+ return self._run("log", "-r", ".", "-T", "{node}")
+
+ @property
+ def base_ref(self):
+ return self._run("log", "-r", "last(ancestors(.) and public())", "-T", "{node}")
+
+ def base_ref_as_hg(self):
+ return self.base_ref
+
+ @property
+ def branch(self):
+ bookmarks_fn = Path(self.path) / ".hg" / "bookmarks.current"
+ if bookmarks_fn.exists():
+ with open(bookmarks_fn) as f:
+ bookmark = f.read()
+ return bookmark or None
+
+ return None
+
+ def __enter__(self):
+ if self._client.server is None:
+ # The cwd if the spawned process should be the repo root to ensure
+ # relative paths are normalized to it.
+ old_cwd = Path.cwd()
+ try:
+ os.chdir(self.path)
+ self._client.open()
+ finally:
+ os.chdir(old_cwd)
+
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self._client.close()
+
+ def _run(self, *args, **runargs):
+ if not self._client.server:
+ return super(HgRepository, self)._run(*args, **runargs)
+
+ # hglib requires bytes on python 3
+ args = [a.encode("utf-8") if not isinstance(a, bytes) else a for a in args]
+ return self._client.rawcommand(args).decode("utf-8")
+
+ def get_commit_time(self):
+ newest_public_revision_time = self._run(
+ "log",
+ "--rev",
+ "heads(ancestors(.) and not draft())",
+ "--template",
+ "{word(0, date|hgdate)}",
+ "--limit",
+ "1",
+ ).strip()
+
+ if not newest_public_revision_time:
+ raise RuntimeError(
+ "Unable to find a non-draft commit in this hg "
+ "repository. If you created this repository from a "
+ 'bundle, have you done a "hg pull" from hg.mozilla.org '
+ "since?"
+ )
+
+ return int(newest_public_revision_time)
+
+ def sparse_checkout_present(self):
+ # We assume a sparse checkout is enabled if the .hg/sparse file
+ # has data. Strictly speaking, we should look for a requirement in
+ # .hg/requires. But since the requirement is still experimental
+ # as of Mercurial 4.3, it's probably more trouble than its worth
+ # to verify it.
+ sparse = Path(self.path) / ".hg" / "sparse"
+
+ try:
+ st = sparse.stat()
+ return st.st_size > 0
+ except OSError as e:
+ if e.errno != errno.ENOENT:
+ raise
+
+ return False
+
+ def get_user_email(self):
+ # Output is in the form "First Last <flast@mozilla.com>"
+ username = self._run("config", "ui.username", return_codes=[0, 1])
+ if not username:
+ # No username is set
+ return None
+ match = re.search(r"<(.*)>", username)
+ if not match:
+ # "ui.username" doesn't follow the "Full Name <email@domain>" convention
+ return None
+ return match.group(1)
+
+ def _format_diff_filter(self, diff_filter, for_status=False):
+ df = diff_filter.lower()
+ assert all(f in self._valid_diff_filter for f in df)
+
+ # When looking at the changes in the working directory, the hg status
+ # command uses 'd' for files that have been deleted with a non-hg
+ # command, and 'r' for files that have been `hg rm`ed. Use both.
+ return df.replace("d", "dr") if for_status else df
+
+ def _files_template(self, diff_filter):
+ template = ""
+ df = self._format_diff_filter(diff_filter)
+ if "a" in df:
+ template += "{file_adds % '{file}\\n'}"
+ if "d" in df:
+ template += "{file_dels % '{file}\\n'}"
+ if "m" in df:
+ template += "{file_mods % '{file}\\n'}"
+ return template
+
+ def get_changed_files(self, diff_filter="ADM", mode="unstaged", rev=None):
+ if rev is None:
+ # Use --no-status to print just the filename.
+ df = self._format_diff_filter(diff_filter, for_status=True)
+ return self._run("status", "--no-status", "-{}".format(df)).splitlines()
+ else:
+ template = self._files_template(diff_filter)
+ return self._run("log", "-r", rev, "-T", template).splitlines()
+
+ def get_outgoing_files(self, diff_filter="ADM", upstream=None):
+ template = self._files_template(diff_filter)
+
+ if not upstream:
+ return self._run(
+ "log", "-r", "draft() and ancestors(.)", "--template", template
+ ).split()
+
+ return self._run(
+ "outgoing",
+ "-r",
+ ".",
+ "--quiet",
+ "--template",
+ template,
+ upstream,
+ return_codes=(1,),
+ ).split()
+
+ def add_remove_files(self, *paths: Union[str, Path]):
+ if not paths:
+ return
+
+ paths = [str(path) for path in paths]
+
+ args = ["addremove"] + paths
+ m = re.search(r"\d+\.\d+", self.tool_version)
+ simplified_version = float(m.group(0)) if m else 0
+ if simplified_version >= 3.9:
+ args = ["--config", "extensions.automv="] + args
+ self._run(*args)
+
+ def forget_add_remove_files(self, *paths: Union[str, Path]):
+ if not paths:
+ return
+
+ paths = [str(path) for path in paths]
+
+ self._run("forget", *paths)
+
+ def get_tracked_files_finder(self, path=None):
+ # Can return backslashes on Windows. Normalize to forward slashes.
+ files = list(
+ p.replace("\\", "/") for p in self._run("files", "-0").split("\0") if p
+ )
+ return FileListFinder(files)
+
+ def get_ignored_files_finder(self):
+ # Can return backslashes on Windows. Normalize to forward slashes.
+ files = list(
+ p.replace("\\", "/").split(" ")[-1]
+ for p in self._run("status", "-i").split("\n")
+ if p
+ )
+ return FileListFinder(files)
+
+ def working_directory_clean(self, untracked=False, ignored=False):
+ args = ["status", "--modified", "--added", "--removed", "--deleted"]
+ if untracked:
+ args.append("--unknown")
+ if ignored:
+ args.append("--ignored")
+
+ # If output is empty, there are no entries of requested status, which
+ # means we are clean.
+ return not len(self._run(*args).strip())
+
+ def clean_directory(self, path: Union[str, Path]):
+ if Path(self.path).samefile(path):
+ raise CannotDeleteFromRootOfRepositoryException()
+ self._run("revert", str(path))
+ for single_path in self._run("st", "-un", str(path)).splitlines():
+ single_path = Path(single_path)
+ if single_path.is_file():
+ single_path.unlink()
+ else:
+ shutil.rmtree(str(single_path))
+
+ def update(self, ref):
+ return self._run("update", "--check", ref)
+
+ def push_to_try(self, message, allow_log_capture=False):
+ try:
+ cmd = (str(self._tool), "push-to-try", "-m", message)
+ if allow_log_capture:
+ self._push_to_try_with_log_capture(
+ cmd,
+ {
+ "stdout": subprocess.PIPE,
+ "stderr": subprocess.PIPE,
+ "cwd": self.path,
+ "env": self._env,
+ "universal_newlines": True,
+ "bufsize": 1,
+ },
+ )
+ else:
+ subprocess.check_call(
+ cmd,
+ cwd=self.path,
+ env=self._env,
+ )
+ except subprocess.CalledProcessError:
+ try:
+ self._run("showconfig", "extensions.push-to-try")
+ except subprocess.CalledProcessError:
+ raise MissingVCSExtension("push-to-try")
+ raise
+ finally:
+ self._run("revert", "-a")
+
+
+class GitRepository(Repository):
+ """An implementation of `Repository` for Git repositories."""
+
+ def __init__(self, path: Path, git="git"):
+ super(GitRepository, self).__init__(path, tool=git)
+
+ @property
+ def name(self):
+ return "git"
+
+ @property
+ def head_ref(self):
+ return self._run("rev-parse", "HEAD").strip()
+
+ @property
+ def base_ref(self):
+ refs = self._run(
+ "rev-list", "HEAD", "--topo-order", "--boundary", "--not", "--remotes"
+ ).splitlines()
+ if refs:
+ return refs[-1][1:] # boundary starts with a prefix `-`
+ return self.head_ref
+
+ def base_ref_as_hg(self):
+ base_ref = self.base_ref
+ try:
+ return self._run("cinnabar", "git2hg", base_ref)
+ except subprocess.CalledProcessError:
+ return
+
+ @property
+ def branch(self):
+ return self._run("branch", "--show-current").strip() or None
+
+ @property
+ def has_git_cinnabar(self):
+ try:
+ self._run("cinnabar", "--version")
+ except subprocess.CalledProcessError:
+ return False
+ return True
+
+ def get_commit_time(self):
+ return int(self._run("log", "-1", "--format=%ct").strip())
+
+ def sparse_checkout_present(self):
+ # Not yet implemented.
+ return False
+
+ def get_user_email(self):
+ email = self._run("config", "user.email", return_codes=[0, 1])
+ if not email:
+ return None
+ return email.strip()
+
+ def get_changed_files(self, diff_filter="ADM", mode="unstaged", rev=None):
+ assert all(f.lower() in self._valid_diff_filter for f in diff_filter)
+
+ if rev is None:
+ cmd = ["diff"]
+ if mode == "staged":
+ cmd.append("--cached")
+ elif mode == "all":
+ cmd.append("HEAD")
+ else:
+ cmd = ["diff-tree", "-r", "--no-commit-id", rev]
+
+ cmd.append("--name-only")
+ cmd.append("--diff-filter=" + diff_filter.upper())
+
+ return self._run(*cmd).splitlines()
+
+ def get_outgoing_files(self, diff_filter="ADM", upstream=None):
+ assert all(f.lower() in self._valid_diff_filter for f in diff_filter)
+
+ not_condition = upstream if upstream else "--remotes"
+
+ files = self._run(
+ "log",
+ "--name-only",
+ "--diff-filter={}".format(diff_filter.upper()),
+ "--oneline",
+ "--pretty=format:",
+ "HEAD",
+ "--not",
+ not_condition,
+ ).splitlines()
+ return [f for f in files if f]
+
+ def add_remove_files(self, *paths: Union[str, Path]):
+ if not paths:
+ return
+
+ paths = [str(path) for path in paths]
+
+ self._run("add", *paths)
+
+ def forget_add_remove_files(self, *paths: Union[str, Path]):
+ if not paths:
+ return
+
+ paths = [str(path) for path in paths]
+
+ self._run("reset", *paths)
+
+ def get_tracked_files_finder(self, path=None):
+ files = [p for p in self._run("ls-files", "-z").split("\0") if p]
+ return FileListFinder(files)
+
+ def get_ignored_files_finder(self):
+ files = [
+ p
+ for p in self._run(
+ "ls-files", "-i", "-o", "-z", "--exclude-standard"
+ ).split("\0")
+ if p
+ ]
+ return FileListFinder(files)
+
+ def working_directory_clean(self, untracked=False, ignored=False):
+ args = ["status", "--porcelain"]
+
+ # Even in --porcelain mode, behavior is affected by the
+ # ``status.showUntrackedFiles`` option, which means we need to be
+ # explicit about how to treat untracked files.
+ if untracked:
+ args.append("--untracked-files=all")
+ else:
+ args.append("--untracked-files=no")
+
+ if ignored:
+ args.append("--ignored")
+
+ return not len(self._run(*args).strip())
+
+ def clean_directory(self, path: Union[str, Path]):
+ if Path(self.path).samefile(path):
+ raise CannotDeleteFromRootOfRepositoryException()
+ self._run("checkout", "--", str(path))
+ self._run("clean", "-df", str(path))
+
+ def update(self, ref):
+ self._run("checkout", ref)
+
+ def push_to_try(self, message, allow_log_capture=False):
+ if not self.has_git_cinnabar:
+ raise MissingVCSExtension("cinnabar")
+
+ self._run(
+ "-c", "commit.gpgSign=false", "commit", "--allow-empty", "-m", message
+ )
+ try:
+ cmd = (
+ str(self._tool),
+ "push",
+ "hg::ssh://hg.mozilla.org/try",
+ "+HEAD:refs/heads/branches/default/tip",
+ )
+ if allow_log_capture:
+ self._push_to_try_with_log_capture(
+ cmd,
+ {
+ "stdout": subprocess.PIPE,
+ "stderr": subprocess.STDOUT,
+ "cwd": self.path,
+ "universal_newlines": True,
+ "bufsize": 1,
+ },
+ )
+ else:
+ subprocess.check_call(cmd, cwd=self.path)
+ finally:
+ self._run("reset", "HEAD~")
+
+ def set_config(self, name, value):
+ self._run("config", name, value)
+
+
+class SrcRepository(Repository):
+ """An implementation of `Repository` for Git repositories."""
+
+ def __init__(self, path: Path, src="src"):
+ super(SrcRepository, self).__init__(path, tool=None)
+
+ @property
+ def name(self):
+ return "src"
+
+ @property
+ def head_ref(self):
+ pass
+
+ @property
+ def base_ref(self):
+ pass
+
+ def base_ref_as_hg(self):
+ pass
+
+ @property
+ def branch(self):
+ pass
+
+ @property
+ def has_git_cinnabar(self):
+ pass
+
+ def get_commit_time(self):
+ pass
+
+ def sparse_checkout_present(self):
+ pass
+
+ def get_user_email(self):
+ pass
+
+ def get_upstream(self):
+ pass
+
+ def get_changed_files(self, diff_filter="ADM", mode="unstaged", rev=None):
+ pass
+
+ def get_outgoing_files(self, diff_filter="ADM", upstream=None):
+ pass
+
+ def add_remove_files(self, *paths: Union[str, Path]):
+ pass
+
+ def forget_add_remove_files(self, *paths: Union[str, Path]):
+ pass
+
+ def git_ignore(self, path):
+ """This function reads the mozilla-central/.gitignore file and creates a
+ list of the patterns to ignore
+ """
+ ignore = []
+ f = open(path + "/.gitignore", "r")
+ while True:
+ line = f.readline()
+ if not line:
+ break
+ if line.startswith("#"):
+ pass
+ elif line.strip() and line not in ["\r", "\r\n"]:
+ ignore.append(line.strip().lstrip("/"))
+ f.close()
+ return ignore
+
+ def get_files(self, path):
+ """This function gets all files in your source folder e.g mozilla-central
+ and creates a list of that
+ """
+ res = []
+ # move away the .git or .hg folder from path to more easily test in a hg/git repo
+ for root, dirs, files in os.walk("."):
+ for name in files:
+ res.append(os.path.join(root, name))
+ return res
+
+ def get_tracked_files_finder(self, path):
+ """Get files, similar to 'hg files -0' or 'git ls-files -z', thats why
+ we read the .gitignore file for patterns to ignore.
+ Speed could probably be improved.
+ """
+ import fnmatch
+
+ files = list(
+ p.replace("\\", "/").replace("./", "") for p in self.get_files(path) if p
+ )
+ files.sort()
+ ig = self.git_ignore(path)
+ mat = []
+ for i in ig:
+ x = fnmatch.filter(files, i)
+ if x:
+ mat = mat + x
+ match = list(set(files) - set(mat))
+ match.sort()
+ if len(match) == 0:
+ return None
+ else:
+ return FileListFinder(match)
+
+ def working_directory_clean(self, untracked=False, ignored=False):
+ pass
+
+ def clean_directory(self, path: Union[str, Path]):
+ pass
+
+ def update(self, ref):
+ pass
+
+ def push_to_try(self, message, allow_log_capture=False):
+ pass
+
+ def set_config(self, name, value):
+ pass
+
+
+def get_repository_object(
+ path: Optional[Union[str, Path]], hg="hg", git="git", src="src"
+):
+ """Get a repository object for the repository at `path`.
+ If `path` is not a known VCS repository, raise an exception.
+ """
+ # If we provide a path to hg that does not match the on-disk casing (e.g.,
+ # because `path` was normcased), then the hg fsmonitor extension will call
+ # watchman with that path and watchman will spew errors.
+ path = Path(path).resolve()
+ if (path / ".hg").is_dir():
+ return HgRepository(path, hg=hg)
+ elif (path / ".git").exists():
+ return GitRepository(path, git=git)
+ elif (path / "config" / "milestone.txt").exists():
+ return SrcRepository(path, src=src)
+ else:
+ raise InvalidRepoPath(f"Unknown VCS, or not a source checkout: {path}")
+
+
+def get_repository_from_build_config(config):
+ """Obtain a repository from the build configuration.
+
+ Accepts an object that has a ``topsrcdir`` and ``subst`` attribute.
+ """
+ flavor = config.substs.get("VCS_CHECKOUT_TYPE")
+
+ # If in build mode, only use what configure found. That way we ensure
+ # that everything in the build system can be controlled via configure.
+ if not flavor:
+ raise MissingConfigureInfo(
+ "could not find VCS_CHECKOUT_TYPE "
+ "in build config; check configure "
+ "output and verify it could find a "
+ "VCS binary"
+ )
+
+ if flavor == "hg":
+ return HgRepository(Path(config.topsrcdir), hg=config.substs["HG"])
+ elif flavor == "git":
+ return GitRepository(Path(config.topsrcdir), git=config.substs["GIT"])
+ elif flavor == "src":
+ return SrcRepository(Path(config.topsrcdir), src=config.substs["SRC"])
+ else:
+ raise MissingVCSInfo("unknown VCS_CHECKOUT_TYPE value: %s" % flavor)
+
+
+def get_repository_from_env():
+ """Obtain a repository object by looking at the environment.
+
+ If inside a build environment (denoted by presence of a ``buildconfig``
+ module), VCS info is obtained from it, as found via configure. This allows
+ us to respect what was passed into configure. Otherwise, we fall back to
+ scanning the filesystem.
+ """
+ try:
+ import buildconfig
+
+ return get_repository_from_build_config(buildconfig)
+ except (ImportError, MissingVCSTool):
+ pass
+
+ paths_to_check = [Path.cwd(), *Path.cwd().parents]
+
+ for path in paths_to_check:
+ try:
+ return get_repository_object(path)
+ except InvalidRepoPath:
+ continue
+
+ raise MissingVCSInfo(f"Could not find Mercurial or Git checkout for {Path.cwd()}")
diff --git a/python/mozversioncontrol/mozversioncontrol/repoupdate.py b/python/mozversioncontrol/mozversioncontrol/repoupdate.py
new file mode 100644
index 0000000000..5336263794
--- /dev/null
+++ b/python/mozversioncontrol/mozversioncontrol/repoupdate.py
@@ -0,0 +1,37 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this,
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import subprocess
+from pathlib import Path
+from typing import Union
+
+# The logic here is far from robust. Improvements are welcome.
+
+
+def update_mercurial_repo(
+ hg: str,
+ repo,
+ path: Union[str, Path],
+ revision="default",
+ hostfingerprints=None,
+ global_args=None,
+):
+ """Ensure a HG repository exists at a path and is up to date."""
+ hostfingerprints = hostfingerprints or {}
+
+ path = Path(path)
+
+ args = [hg]
+ if global_args:
+ args.extend(global_args)
+
+ for host, fingerprint in sorted(hostfingerprints.items()):
+ args.extend(["--config", "hostfingerprints.%s=%s" % (host, fingerprint)])
+
+ if path.exists():
+ subprocess.check_call(args + ["pull", repo], cwd=str(path))
+ else:
+ subprocess.check_call(args + ["clone", repo, str(path)])
+
+ subprocess.check_call([hg, "update", "-r", revision], cwd=str(path))
diff --git a/python/mozversioncontrol/setup.py b/python/mozversioncontrol/setup.py
new file mode 100644
index 0000000000..c0e1aa643f
--- /dev/null
+++ b/python/mozversioncontrol/setup.py
@@ -0,0 +1,28 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from setuptools import find_packages, setup
+
+VERSION = "0.1"
+
+setup(
+ author="Mozilla Foundation",
+ author_email="Mozilla Release Engineering",
+ name="mozversioncontrol",
+ description="Mozilla version control functionality",
+ license="MPL 2.0",
+ packages=find_packages(),
+ version=VERSION,
+ classifiers=[
+ "Development Status :: 3 - Alpha",
+ "Topic :: Software Development :: Build Tools",
+ "License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)",
+ "Programming Language :: Python :: 2",
+ "Programming Language :: Python :: 2.7",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.5",
+ "Programming Language :: Python :: Implementation :: CPython",
+ ],
+ keywords="mozilla",
+)
diff --git a/python/mozversioncontrol/test/conftest.py b/python/mozversioncontrol/test/conftest.py
new file mode 100644
index 0000000000..78e5ad7ca8
--- /dev/null
+++ b/python/mozversioncontrol/test/conftest.py
@@ -0,0 +1,84 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import shutil
+import subprocess
+from pathlib import Path
+
+import pytest
+
+SETUP = {
+ "hg": [
+ """
+ echo "foo" > foo
+ echo "bar" > bar
+ hg init
+ hg add *
+ hg commit -m "Initial commit"
+ hg phase --public .
+ """,
+ """
+ echo [paths] > .hg/hgrc
+ echo "default = ../remoterepo" >> .hg/hgrc
+ """,
+ ],
+ "git": [
+ """
+ echo "foo" > foo
+ echo "bar" > bar
+ git init
+ git config user.name "Testing McTesterson"
+ git config user.email "<test@example.org>"
+ git add *
+ git commit -am "Initial commit"
+ """,
+ """
+ git remote add upstream ../remoterepo
+ git fetch upstream
+ git branch -u upstream/master
+ """,
+ ],
+}
+
+
+class RepoTestFixture:
+ def __init__(self, repo_dir: Path, vcs: str, steps: [str]):
+ self.dir = repo_dir
+ self.vcs = vcs
+
+ # This creates a step iterator. Each time execute_next_step()
+ # is called the next set of instructions will be executed.
+ self.steps = (shell(cmd, self.dir) for cmd in steps)
+
+ def execute_next_step(self):
+ next(self.steps)
+
+
+def shell(cmd, working_dir):
+ for step in cmd.split(os.linesep):
+ subprocess.check_call(step, shell=True, cwd=working_dir)
+
+
+@pytest.fixture(params=["git", "hg"])
+def repo(tmpdir, request):
+ tmpdir = Path(tmpdir)
+ vcs = request.param
+ steps = SETUP[vcs]
+
+ if hasattr(request.module, "STEPS"):
+ steps.extend(request.module.STEPS[vcs])
+
+ repo_dir = (tmpdir / "repo").resolve()
+ (tmpdir / "repo").mkdir()
+
+ repo_test_fixture = RepoTestFixture(repo_dir, vcs, steps)
+
+ repo_test_fixture.execute_next_step()
+
+ shutil.copytree(str(repo_dir), str(tmpdir / "remoterepo"))
+
+ repo_test_fixture.execute_next_step()
+
+ yield repo_test_fixture
diff --git a/python/mozversioncontrol/test/python.ini b/python/mozversioncontrol/test/python.ini
new file mode 100644
index 0000000000..79e52bf937
--- /dev/null
+++ b/python/mozversioncontrol/test/python.ini
@@ -0,0 +1,10 @@
+[DEFAULT]
+subsuite=mozversioncontrol
+
+[test_branch.py]
+[test_commit.py]
+[test_context_manager.py]
+[test_push_to_try.py]
+[test_update.py]
+[test_workdir_outgoing.py]
+[test_working_directory.py]
diff --git a/python/mozversioncontrol/test/test_branch.py b/python/mozversioncontrol/test/test_branch.py
new file mode 100644
index 0000000000..7d211f18e8
--- /dev/null
+++ b/python/mozversioncontrol/test/test_branch.py
@@ -0,0 +1,57 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import mozunit
+import pytest
+from looseversion import LooseVersion
+
+from mozversioncontrol import get_repository_object
+
+STEPS = {
+ "hg": [
+ """
+ hg bookmark test
+ """,
+ """
+ echo "bar" > foo
+ hg commit -m "second commit"
+ """,
+ ],
+ "git": [
+ """
+ git checkout -b test
+ """,
+ """
+ echo "bar" > foo
+ git commit -a -m "second commit"
+ """,
+ ],
+}
+
+
+def test_branch(repo):
+ vcs = get_repository_object(repo.dir)
+ if vcs.name == "git" and LooseVersion(vcs.tool_version) < LooseVersion("2.22.0"):
+ pytest.xfail("`git branch --show-current` not implemented yet")
+
+ if vcs.name == "git":
+ assert vcs.branch == "master"
+ else:
+ assert vcs.branch is None
+
+ repo.execute_next_step()
+ assert vcs.branch == "test"
+
+ repo.execute_next_step()
+ assert vcs.branch == "test"
+
+ vcs.update(vcs.head_ref)
+ assert vcs.branch is None
+
+ vcs.update("test")
+ assert vcs.branch == "test"
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozversioncontrol/test/test_commit.py b/python/mozversioncontrol/test/test_commit.py
new file mode 100644
index 0000000000..b795c0ea6e
--- /dev/null
+++ b/python/mozversioncontrol/test/test_commit.py
@@ -0,0 +1,72 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import mozunit
+
+from mozversioncontrol import get_repository_object
+
+STEPS = {
+ "hg": [
+ """
+ echo "bar" >> bar
+ echo "baz" > foo
+ """,
+ ],
+ "git": [
+ """
+ echo "bar" >> bar
+ echo "baz" > foo
+ """,
+ ],
+}
+
+
+def test_commit(repo):
+ vcs = get_repository_object(repo.dir)
+ assert vcs.working_directory_clean()
+
+ # Modify both foo and bar
+ repo.execute_next_step()
+ assert not vcs.working_directory_clean()
+
+ # Commit just bar
+ vcs.commit(
+ message="Modify bar\n\nbut not baz",
+ author="Testing McTesterson <test@example.org>",
+ date="2017-07-14 02:40:00 UTC",
+ paths=["bar"],
+ )
+
+ # We only committed bar, so foo is still keeping the working dir dirty
+ assert not vcs.working_directory_clean()
+
+ if repo.vcs == "git":
+ log_cmd = ["log", "-1", "--format=%an,%ae,%aD,%B"]
+ patch_cmd = ["log", "-1", "-p"]
+ else:
+ log_cmd = [
+ "log",
+ "-l",
+ "1",
+ "-T",
+ "{person(author)},{email(author)},{date|rfc822date},{desc}",
+ ]
+ patch_cmd = ["log", "-l", "1", "-p"]
+
+ # Verify commit metadata (we rstrip to normalize trivial git/hg differences)
+ log = vcs._run(*log_cmd).rstrip()
+ assert log == (
+ "Testing McTesterson,test@example.org,Fri, 14 "
+ "Jul 2017 02:40:00 +0000,Modify bar\n\nbut not baz"
+ )
+
+ # Verify only the intended file was added to the commit
+ patch = vcs._run(*patch_cmd)
+ diffs = [line for line in patch.splitlines() if "diff --git" in line]
+ assert len(diffs) == 1
+ assert diffs[0] == "diff --git a/bar b/bar"
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozversioncontrol/test/test_context_manager.py b/python/mozversioncontrol/test/test_context_manager.py
new file mode 100644
index 0000000000..3186a144d9
--- /dev/null
+++ b/python/mozversioncontrol/test/test_context_manager.py
@@ -0,0 +1,28 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import mozunit
+
+from mozversioncontrol import get_repository_object
+
+
+def test_context_manager(repo):
+ is_git = repo.vcs == "git"
+ cmd = ["show", "--no-patch"] if is_git else ["tip"]
+
+ vcs = get_repository_object(repo.dir)
+ output_subprocess = vcs._run(*cmd)
+ assert is_git or vcs._client.server is None
+ assert "Initial commit" in output_subprocess
+
+ with vcs:
+ assert is_git or vcs._client.server is not None
+ output_client = vcs._run(*cmd)
+
+ assert is_git or vcs._client.server is None
+ assert output_subprocess == output_client
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozversioncontrol/test/test_push_to_try.py b/python/mozversioncontrol/test/test_push_to_try.py
new file mode 100644
index 0000000000..d0a0b2d993
--- /dev/null
+++ b/python/mozversioncontrol/test/test_push_to_try.py
@@ -0,0 +1,81 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import subprocess
+
+import mozunit
+import pytest
+
+from mozversioncontrol import MissingVCSExtension, get_repository_object
+
+
+def test_push_to_try(repo, monkeypatch):
+ commit_message = "commit message"
+ vcs = get_repository_object(repo.dir)
+
+ captured_commands = []
+
+ def fake_run(*args, **kwargs):
+ captured_commands.append(args[0])
+
+ monkeypatch.setattr(subprocess, "check_output", fake_run)
+ monkeypatch.setattr(subprocess, "check_call", fake_run)
+
+ vcs.push_to_try(commit_message)
+ tool = vcs._tool
+
+ if repo.vcs == "hg":
+ expected = [
+ (str(tool), "push-to-try", "-m", commit_message),
+ (str(tool), "revert", "-a"),
+ ]
+ else:
+ expected = [
+ (str(tool), "cinnabar", "--version"),
+ (
+ str(tool),
+ "-c",
+ "commit.gpgSign=false",
+ "commit",
+ "--allow-empty",
+ "-m",
+ commit_message,
+ ),
+ (
+ str(tool),
+ "push",
+ "hg::ssh://hg.mozilla.org/try",
+ "+HEAD:refs/heads/branches/default/tip",
+ ),
+ (str(tool), "reset", "HEAD~"),
+ ]
+
+ for i, value in enumerate(captured_commands):
+ assert value == expected[i]
+
+ assert len(captured_commands) == len(expected)
+
+
+def test_push_to_try_missing_extensions(repo, monkeypatch):
+ if repo.vcs != "git":
+ return
+
+ vcs = get_repository_object(repo.dir)
+
+ orig = vcs._run
+
+ def cinnabar_raises(*args, **kwargs):
+ # Simulate not having git cinnabar
+ if args[0] == "cinnabar":
+ raise subprocess.CalledProcessError(1, args)
+ return orig(*args, **kwargs)
+
+ monkeypatch.setattr(vcs, "_run", cinnabar_raises)
+
+ with pytest.raises(MissingVCSExtension):
+ vcs.push_to_try("commit message")
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozversioncontrol/test/test_update.py b/python/mozversioncontrol/test/test_update.py
new file mode 100644
index 0000000000..91c7469ee5
--- /dev/null
+++ b/python/mozversioncontrol/test/test_update.py
@@ -0,0 +1,63 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from subprocess import CalledProcessError
+
+import mozunit
+import pytest
+
+from mozversioncontrol import get_repository_object
+
+STEPS = {
+ "hg": [
+ """
+ echo "bar" >> bar
+ echo "baz" > foo
+ hg commit -m "second commit"
+ """,
+ """
+ echo "foobar" > foo
+ """,
+ ],
+ "git": [
+ """
+ echo "bar" >> bar
+ echo "baz" > foo
+ git add *
+ git commit -m "second commit"
+ """,
+ """
+ echo "foobar" > foo
+ """,
+ ],
+}
+
+
+def test_update(repo):
+ vcs = get_repository_object(repo.dir)
+ rev0 = vcs.head_ref
+
+ repo.execute_next_step()
+ rev1 = vcs.head_ref
+ assert rev0 != rev1
+
+ if repo.vcs == "hg":
+ vcs.update(".~1")
+ else:
+ vcs.update("HEAD~1")
+ assert vcs.head_ref == rev0
+
+ vcs.update(rev1)
+ assert vcs.head_ref == rev1
+
+ # Update should fail with dirty working directory.
+ repo.execute_next_step()
+ with pytest.raises(CalledProcessError):
+ vcs.update(rev0)
+
+ assert vcs.head_ref == rev1
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozversioncontrol/test/test_workdir_outgoing.py b/python/mozversioncontrol/test/test_workdir_outgoing.py
new file mode 100644
index 0000000000..7bf2e6ec57
--- /dev/null
+++ b/python/mozversioncontrol/test/test_workdir_outgoing.py
@@ -0,0 +1,108 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+
+import mozunit
+
+from mozversioncontrol import get_repository_object
+
+STEPS = {
+ "hg": [
+ """
+ echo "bar" >> bar
+ echo "baz" > baz
+ hg add baz
+ hg rm foo
+ """,
+ """
+ hg commit -m "Remove foo; modify bar; add baz"
+ """,
+ """
+ echo ooka >> baz
+ echo newborn > baby
+ hg add baby
+ """,
+ """
+ hg commit -m "Modify baz; add baby"
+ """,
+ ],
+ "git": [
+ """
+ echo "bar" >> bar
+ echo "baz" > baz
+ git add baz
+ git rm foo
+ """,
+ """
+ git commit -am "Remove foo; modify bar; add baz"
+ """,
+ """
+ echo ooka >> baz
+ echo newborn > baby
+ git add baz baby
+ """,
+ """
+ git commit -m "Modify baz; add baby"
+ """,
+ ],
+}
+
+
+def assert_files(actual, expected):
+ assert set(map(os.path.basename, actual)) == set(expected)
+
+
+def test_workdir_outgoing(repo):
+ vcs = get_repository_object(repo.dir)
+ assert vcs.path == str(repo.dir)
+
+ remote_path = "../remoterepo" if repo.vcs == "hg" else "upstream/master"
+
+ # Mutate files.
+ repo.execute_next_step()
+
+ assert_files(vcs.get_changed_files("A", "all"), ["baz"])
+ assert_files(vcs.get_changed_files("AM", "all"), ["bar", "baz"])
+ assert_files(vcs.get_changed_files("D", "all"), ["foo"])
+ if repo.vcs == "git":
+ assert_files(vcs.get_changed_files("AM", mode="staged"), ["baz"])
+ elif repo.vcs == "hg":
+ # Mercurial does not use a staging area (and ignores the mode parameter.)
+ assert_files(vcs.get_changed_files("AM", "unstaged"), ["bar", "baz"])
+ assert_files(vcs.get_outgoing_files("AMD"), [])
+ assert_files(vcs.get_outgoing_files("AMD", remote_path), [])
+
+ # Create a commit.
+ repo.execute_next_step()
+
+ assert_files(vcs.get_changed_files("AMD", "all"), [])
+ assert_files(vcs.get_changed_files("AMD", "staged"), [])
+ assert_files(vcs.get_outgoing_files("AMD"), ["bar", "baz", "foo"])
+ assert_files(vcs.get_outgoing_files("AMD", remote_path), ["bar", "baz", "foo"])
+
+ # Mutate again.
+ repo.execute_next_step()
+
+ assert_files(vcs.get_changed_files("A", "all"), ["baby"])
+ assert_files(vcs.get_changed_files("AM", "all"), ["baby", "baz"])
+ assert_files(vcs.get_changed_files("D", "all"), [])
+
+ # Create a second commit.
+ repo.execute_next_step()
+
+ assert_files(vcs.get_outgoing_files("AM"), ["bar", "baz", "baby"])
+ assert_files(vcs.get_outgoing_files("AM", remote_path), ["bar", "baz", "baby"])
+ if repo.vcs == "git":
+ assert_files(vcs.get_changed_files("AM", rev="HEAD~1"), ["bar", "baz"])
+ assert_files(vcs.get_changed_files("AM", rev="HEAD"), ["baby", "baz"])
+ else:
+ assert_files(vcs.get_changed_files("AM", rev=".^"), ["bar", "baz"])
+ assert_files(vcs.get_changed_files("AM", rev="."), ["baby", "baz"])
+ assert_files(vcs.get_changed_files("AM", rev=".^::"), ["bar", "baz", "baby"])
+ assert_files(vcs.get_changed_files("AM", rev="modifies(baz)"), ["baz", "baby"])
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/mozversioncontrol/test/test_working_directory.py b/python/mozversioncontrol/test/test_working_directory.py
new file mode 100644
index 0000000000..00094a0cc4
--- /dev/null
+++ b/python/mozversioncontrol/test/test_working_directory.py
@@ -0,0 +1,46 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import mozunit
+
+from mozversioncontrol import get_repository_object
+
+STEPS = {
+ "hg": [
+ """
+ echo "bar" >> bar
+ echo "baz" > baz
+ hg rm foo
+ """,
+ """
+ hg commit -m "Remove foo; modify bar; touch baz (but don't add it)"
+ """,
+ ],
+ "git": [
+ """
+ echo "bar" >> bar
+ echo "baz" > baz
+ git rm foo
+ """,
+ """
+ git commit -am "Remove foo; modify bar; touch baz (but don't add it)"
+ """,
+ ],
+}
+
+
+def test_working_directory_clean_untracked_files(repo):
+ vcs = get_repository_object(repo.dir)
+ assert vcs.working_directory_clean()
+
+ repo.execute_next_step()
+ assert not vcs.working_directory_clean()
+
+ repo.execute_next_step()
+ assert vcs.working_directory_clean()
+ assert not vcs.working_directory_clean(untracked=True)
+
+
+if __name__ == "__main__":
+ mozunit.main()
diff --git a/python/sites/build.txt b/python/sites/build.txt
new file mode 100644
index 0000000000..4e6f6c51d9
--- /dev/null
+++ b/python/sites/build.txt
@@ -0,0 +1 @@
+pth:third_party/python/vsdownload
diff --git a/python/sites/common.txt b/python/sites/common.txt
new file mode 100644
index 0000000000..8b13789179
--- /dev/null
+++ b/python/sites/common.txt
@@ -0,0 +1 @@
+
diff --git a/python/sites/docs.txt b/python/sites/docs.txt
new file mode 100644
index 0000000000..b6b9cd63e8
--- /dev/null
+++ b/python/sites/docs.txt
@@ -0,0 +1,68 @@
+pth:tools/lint/eslint/
+pypi:alabaster==0.7.13
+pypi:Babel==2.12.1
+pypi:backports-abc==0.5
+pypi:boto3==1.26.80
+pypi:botocore==1.29.80
+pypi:colorama==0.4.5
+pypi:commonmark==0.9.1
+pypi:docutils==0.17.1
+pypi:fluent.pygments==1.0
+pypi:fluent.syntax==0.19.0
+pypi:idna==2.10
+pypi:imagesize==1.4.1
+pypi:importlib-resources==5.12.0
+pypi:jmespath==1.0.1
+pypi:jsonschema==4.17.3
+pypi:livereload==2.6.3
+pypi:Markdown==3.4.3
+pypi:MarkupSafe==2.0.1
+pypi:mock==5.0.1
+pypi:mots==0.8.0
+pypi:myst-parser==1.0
+pypi:mdit-py-plugins==0.3.5
+pypi:parsimonious==0.10.0
+pypi:pyasn1==0.4.8
+pypi:pyasn1-modules==0.2.8
+pypi:Pygments==2.14.0
+pypi:pyparsing==2.4.7
+# for autodoc
+pypi:pytest==7.2.1
+pypi:python-dateutil==2.8.2
+pypi:python-frontmatter==1.0.0
+pypi:pytz==2022.7.1
+# for autodoc
+pypi:pywatchman==1.4.1
+pypi:redo==2.0.3
+pypi:s3transfer==0.6.0
+pypi:scandir==1.10.0
+pypi:singledispatch==4.0.0
+pypi:snowballstemmer==2.2.0
+pypi:Sphinx==5.1.1
+pypi:sphinx-copybutton==0.5.1
+pypi:sphinx-design==0.3.0
+pypi:sphinx-js==3.2.1
+pypi:sphinx-markdown-tables==0.0.17
+pypi:sphinx-rtd-theme==1.2.0
+pypi:sphinxcontrib-applehelp==1.0.2
+pypi:sphinxcontrib-devhelp==1.0.2
+pypi:sphinxcontrib-htmlhelp==2.0.0
+pypi:sphinxcontrib-jsmath==1.0.1
+pypi:sphinxcontrib-mermaid==0.8.1
+pypi:sphinxcontrib-qthelp==1.0.3
+pypi:sphinxcontrib-serializinghtml==1.1.5
+pypi:tornado==6.0.4
+vendored:third_party/python/Jinja2
+vendored:third_party/python/certifi
+vendored:third_party/python/chardet
+vendored:third_party/python/importlib_metadata
+vendored:third_party/python/importlib_resources
+vendored:third_party/python/jsonschema
+vendored:third_party/python/packaging
+vendored:third_party/python/pkgutil_resolve_name
+vendored:third_party/python/requests
+vendored:third_party/python/rsa
+vendored:third_party/python/sentry_sdk
+vendored:third_party/python/six
+vendored:third_party/python/urllib3
+vendored:third_party/python/zipp
diff --git a/python/sites/ipython.txt b/python/sites/ipython.txt
new file mode 100644
index 0000000000..a569a360b3
--- /dev/null
+++ b/python/sites/ipython.txt
@@ -0,0 +1 @@
+pypi:ipython==7.16.1
diff --git a/python/sites/lint.txt b/python/sites/lint.txt
new file mode 100644
index 0000000000..139597f9cb
--- /dev/null
+++ b/python/sites/lint.txt
@@ -0,0 +1,2 @@
+
+
diff --git a/python/sites/mach.txt b/python/sites/mach.txt
new file mode 100644
index 0000000000..55cc6fb2ed
--- /dev/null
+++ b/python/sites/mach.txt
@@ -0,0 +1,145 @@
+pth:build
+pth:config
+pth:config/mozunit
+pth:dom/bindings
+pth:dom/bindings/parser
+pth:layout/tools/reftest
+pth:python/l10n
+pth:python/mach
+pth:python/mozboot
+pth:python/mozbuild
+pth:python/mozlint
+pth:python/mozperftest
+pth:python/mozrelease
+pth:python/mozterm
+pth:python/mozversioncontrol
+pth:security/manager/tools
+pth:taskcluster
+pth:testing
+pth:testing/awsy
+pth:testing/condprofile
+pth:testing/firefox-ui/harness
+pth:testing/marionette/client
+pth:testing/marionette/harness
+pth:testing/mozbase/manifestparser
+pth:testing/mozbase/mozcrash
+pth:testing/mozbase/mozdebug
+pth:testing/mozbase/mozdevice
+pth:testing/mozbase/mozfile
+pth:testing/mozbase/mozhttpd
+pth:testing/mozbase/mozgeckoprofiler
+pth:testing/mozbase/mozinfo
+pth:testing/mozbase/mozinstall
+pth:testing/mozbase/mozleak
+pth:testing/mozbase/mozlog
+pth:testing/mozbase/moznetwork
+pth:testing/mozbase/mozpower
+pth:testing/mozbase/mozprocess
+pth:testing/mozbase/mozprofile
+pth:testing/mozbase/mozproxy
+pth:testing/mozbase/mozrunner
+pth:testing/mozbase/mozserve
+pth:testing/mozbase/mozsystemmonitor
+pth:testing/mozbase/mozscreenshot
+pth:testing/mozbase/moztest
+pth:testing/mozbase/mozversion
+pth:testing/mozharness
+pth:testing/raptor
+pth:testing/talos
+pth:testing/web-platform
+vendored:testing/web-platform/tests/tools/third_party/h2
+vendored:testing/web-platform/tests/tools/third_party/hpack
+vendored:testing/web-platform/tests/tools/third_party/html5lib
+vendored:testing/web-platform/tests/tools/third_party/hyperframe
+vendored:testing/web-platform/tests/tools/third_party/pywebsocket3
+vendored:testing/web-platform/tests/tools/third_party/webencodings
+vendored:testing/web-platform/tests/tools/wptserve
+vendored:testing/web-platform/tests/tools/wptrunner
+pth:testing/xpcshell
+vendored:third_party/python/aiohttp
+vendored:third_party/python/ansicon
+vendored:third_party/python/appdirs
+vendored:third_party/python/async_timeout
+vendored:third_party/python/attrs
+vendored:third_party/python/blessed
+vendored:third_party/python/cbor2
+vendored:third_party/python/certifi
+vendored:third_party/python/chardet
+vendored:third_party/python/click
+vendored:third_party/python/colorama
+vendored:third_party/python/compare_locales
+vendored:third_party/python/cookies
+vendored:third_party/python/cram
+vendored:third_party/python/diskcache
+vendored:third_party/python/distro
+vendored:third_party/python/dlmanager
+vendored:third_party/python/ecdsa
+vendored:third_party/python/esprima
+vendored:third_party/python/fluent.migrate
+vendored:third_party/python/fluent.syntax
+vendored:third_party/python/giturlparse
+vendored:third_party/python/glean_parser
+vendored:third_party/python/gyp/pylib
+vendored:third_party/python/idna
+vendored:third_party/python/importlib_metadata
+vendored:third_party/python/importlib_resources
+vendored:third_party/python/Jinja2
+vendored:third_party/python/jinxed
+vendored:third_party/python/jsmin
+vendored:third_party/python/json-e
+vendored:third_party/python/jsonschema
+vendored:third_party/python/looseversion
+vendored:third_party/python/MarkupSafe/src
+vendored:third_party/python/mohawk
+vendored:third_party/python/mozilla_repo_urls
+vendored:third_party/python/mozilla_version
+vendored:third_party/python/multidict
+vendored:third_party/python/packaging
+vendored:third_party/python/pathspec
+vendored:third_party/python/pip
+vendored:third_party/python/pip_tools
+vendored:third_party/python/pkgutil_resolve_name
+vendored:third_party/python/ply
+vendored:third_party/python/pyasn1
+vendored:third_party/python/pyasn1_modules
+vendored:third_party/python/pylru
+vendored:third_party/python/pyparsing
+vendored:third_party/python/pyrsistent
+vendored:third_party/python/python-hglib
+vendored:third_party/python/PyYAML/lib3/
+vendored:third_party/python/redo
+vendored:third_party/python/requests
+vendored:third_party/python/requests_unixsocket
+vendored:third_party/python/responses
+vendored:third_party/python/rsa
+vendored:third_party/python/setuptools
+vendored:third_party/python/sentry_sdk
+vendored:third_party/python/six
+vendored:third_party/python/slugid
+vendored:third_party/python/taskcluster
+vendored:third_party/python/taskcluster_taskgraph
+vendored:third_party/python/taskcluster_urls
+vendored:third_party/python/toml
+vendored:third_party/python/tqdm
+vendored:third_party/python/typing_extensions
+vendored:third_party/python/urllib3
+vendored:third_party/python/voluptuous
+vendored:third_party/python/wcwidth
+vendored:third_party/python/wheel
+vendored:third_party/python/yamllint
+vendored:third_party/python/yarl
+vendored:third_party/python/zipp
+pth:toolkit/components/telemetry/tests/marionette/harness
+pth:tools
+pth:tools/moztreedocs
+pth:xpcom/ds/tools
+pth:xpcom/geckoprocesstypes_generator
+pth:xpcom/idl-parser
+# glean-sdk may not be installable if a wheel isn't available
+# and it has to be built from source.
+pypi-optional:glean-sdk==52.7.0:telemetry will not be collected
+# Mach gracefully handles the case where `psutil` is unavailable.
+# We aren't (yet) able to pin packages in automation, so we have to
+# support down to the oldest locally-installed version (5.4.2).
+pypi-optional:psutil>=5.4.2,<=5.9.4:telemetry will be missing some data
+pypi-optional:zstandard>=0.11.1,<=0.22.0:zstd archives will not be possible to extract
diff --git a/python/sites/perftest-side-by-side.txt b/python/sites/perftest-side-by-side.txt
new file mode 100644
index 0000000000..f457c8c667
--- /dev/null
+++ b/python/sites/perftest-side-by-side.txt
@@ -0,0 +1,2 @@
+pypi:pytest==7.0.1
+pypi:coverage==5.1
diff --git a/python/sites/perftest-test.txt b/python/sites/perftest-test.txt
new file mode 100644
index 0000000000..f457c8c667
--- /dev/null
+++ b/python/sites/perftest-test.txt
@@ -0,0 +1,2 @@
+pypi:pytest==7.0.1
+pypi:coverage==5.1
diff --git a/python/sites/python-test.txt b/python/sites/python-test.txt
new file mode 100644
index 0000000000..74889365a2
--- /dev/null
+++ b/python/sites/python-test.txt
@@ -0,0 +1 @@
+pypi:pytest==7.0.1
diff --git a/python/sites/repackage-deb.txt b/python/sites/repackage-deb.txt
new file mode 100644
index 0000000000..2c0367e10f
--- /dev/null
+++ b/python/sites/repackage-deb.txt
@@ -0,0 +1 @@
+pypi:fluent.runtime==0.4.0
diff --git a/python/sites/upload-generated-sources.txt b/python/sites/upload-generated-sources.txt
new file mode 100644
index 0000000000..2d0115476e
--- /dev/null
+++ b/python/sites/upload-generated-sources.txt
@@ -0,0 +1 @@
+pypi:boto3==1.4.4
diff --git a/python/sites/vendor.txt b/python/sites/vendor.txt
new file mode 100644
index 0000000000..cb5b087afc
--- /dev/null
+++ b/python/sites/vendor.txt
@@ -0,0 +1,4 @@
+pypi:poetry==1.4
+# Pin poetry-core so that the same one is used between Python versions.
+# Otherwise, different versions of poetry-core output different "requirements.txt" contents
+pypi:poetry-core==1.5.1 \ No newline at end of file
diff --git a/python/sites/watch.txt b/python/sites/watch.txt
new file mode 100644
index 0000000000..b4f2d7a058
--- /dev/null
+++ b/python/sites/watch.txt
@@ -0,0 +1 @@
+pypi:pywatchman==1.4.1
diff --git a/python/sites/webcompat.txt b/python/sites/webcompat.txt
new file mode 100644
index 0000000000..9aea33dbdc
--- /dev/null
+++ b/python/sites/webcompat.txt
@@ -0,0 +1,5 @@
+pth:testing/webcompat
+pypi:pytest==4.6.6
+pypi:selenium==3.141.0
+vendored:testing/web-platform/tests/tools/webdriver
+vendored:testing/web-platform/tests/tools/third_party/websockets/src
diff --git a/python/sites/wpt.txt b/python/sites/wpt.txt
new file mode 100644
index 0000000000..23784a524f
--- /dev/null
+++ b/python/sites/wpt.txt
@@ -0,0 +1,6 @@
+pypi:tox==3.12.1
+pypi:ujson==4.0.2
+pypi:aioquic==0.9.19
+# Bug 1823701 - tomli is a pytest dependency that isn't pulled into WPT's
+# vendor dir for some reason
+pypi:tomli==2.0.1