From 26a029d407be480d791972afb5975cf62c9360a6 Mon Sep 17 00:00:00 2001 From: Daniel Baumann Date: Fri, 19 Apr 2024 02:47:55 +0200 Subject: Adding upstream version 124.0.1. Signed-off-by: Daniel Baumann --- testing/mozbase/README.md | 20 + testing/mozbase/docs/Makefile | 153 + testing/mozbase/docs/_static/structured_example.py | 111 + testing/mozbase/docs/conf.py | 280 ++ testing/mozbase/docs/devicemanagement.rst | 11 + testing/mozbase/docs/gettinginfo.rst | 13 + testing/mozbase/docs/index.rst | 44 + testing/mozbase/docs/loggingreporting.rst | 11 + testing/mozbase/docs/make.bat | 190 + testing/mozbase/docs/manifestparser.rst | 648 +++ testing/mozbase/docs/mozcrash.rst | 8 + testing/mozbase/docs/mozdebug.rst | 5 + testing/mozbase/docs/mozdevice.rst | 8 + testing/mozbase/docs/mozfile.rst | 9 + testing/mozbase/docs/mozgeckoprofiler.rst | 21 + testing/mozbase/docs/mozhttpd.rst | 22 + testing/mozbase/docs/mozinfo.rst | 70 + testing/mozbase/docs/mozinstall.rst | 29 + testing/mozbase/docs/mozlog.rst | 520 +++ testing/mozbase/docs/moznetwork.rst | 8 + testing/mozbase/docs/mozpower.rst | 112 + testing/mozbase/docs/mozprocess.rst | 324 ++ testing/mozbase/docs/mozprofile.rst | 94 + testing/mozbase/docs/mozproxy.rst | 46 + testing/mozbase/docs/mozrunner.rst | 183 + testing/mozbase/docs/mozversion.rst | 70 + testing/mozbase/docs/requirements.txt | 1 + testing/mozbase/docs/servingcontent.rst | 11 + testing/mozbase/docs/setuprunning.rst | 20 + .../manifestparser/manifestparser/__init__.py | 8 + .../mozbase/manifestparser/manifestparser/cli.py | 286 ++ .../manifestparser/manifestparser/expression.py | 324 ++ .../manifestparser/manifestparser/filters.py | 557 +++ .../mozbase/manifestparser/manifestparser/ini.py | 208 + .../manifestparser/manifestparser/logger.py | 76 + .../manifestparser/manifestparser.py | 938 +++++ .../mozbase/manifestparser/manifestparser/toml.py | 321 ++ .../mozbase/manifestparser/manifestparser/util.py | 51 + testing/mozbase/manifestparser/setup.py | 38 + .../manifestparser/tests/broken-skip-if.toml | 4 + .../manifestparser/tests/comment-example.toml | 11 + .../manifestparser/tests/default-skipif.toml | 32 + .../manifestparser/tests/default-subsuite.toml | 5 + .../manifestparser/tests/default-suppfiles.toml | 8 + .../manifestparser/tests/edit-manifest-after.toml | 37 + .../manifestparser/tests/edit-manifest-before.toml | 23 + .../manifestparser/tests/filter-example.toml | 20 + testing/mozbase/manifestparser/tests/fleem | 1 + .../manifestparser/tests/include-example.toml | 11 + .../manifestparser/tests/include-invalid.toml | 1 + .../mozbase/manifestparser/tests/include/bar.ini | 4 + .../mozbase/manifestparser/tests/include/bar.toml | 4 + .../manifestparser/tests/include/crash-handling | 1 + .../mozbase/manifestparser/tests/include/flowers | 1 + .../mozbase/manifestparser/tests/include/foo.ini | 5 + .../mozbase/manifestparser/tests/include/foo.toml | 5 + .../manifestparser/tests/just-defaults.toml | 2 + testing/mozbase/manifestparser/tests/manifest.toml | 23 + .../mozbase/manifestparser/tests/missing-path.toml | 2 + .../manifestparser/tests/mozmill-example.toml | 80 + .../tests/mozmill-restart-example.toml | 26 + testing/mozbase/manifestparser/tests/no-tests.toml | 2 + .../tests/parent/include/first/manifest.ini | 3 + .../tests/parent/include/first/manifest.toml | 3 + .../tests/parent/include/manifest.ini | 8 + .../tests/parent/include/manifest.toml | 8 + .../tests/parent/include/second/manifest.ini | 3 + .../tests/parent/include/second/manifest.toml | 3 + .../tests/parent/level_1/level_1.ini | 5 + .../tests/parent/level_1/level_1.toml | 5 + .../tests/parent/level_1/level_2/level_2.ini | 3 + .../tests/parent/level_1/level_2/level_2.toml | 3 + .../parent/level_1/level_2/level_3/level_3.ini | 3 + .../parent/level_1/level_2/level_3/level_3.toml | 3 + .../level_1/level_2/level_3/level_3_default.ini | 6 + .../level_1/level_2/level_3/level_3_default.toml | 6 + .../tests/parent/level_1/level_2/level_3/test_3 | 1 + .../tests/parent/level_1/level_2/test_2 | 1 + .../manifestparser/tests/parent/level_1/test_1 | 1 + .../mozbase/manifestparser/tests/parent/root/dummy | 0 .../mozbase/manifestparser/tests/parse-error.toml | 1 + .../mozbase/manifestparser/tests/path-example.toml | 2 + .../manifestparser/tests/relative-path.toml | 5 + testing/mozbase/manifestparser/tests/subsuite.toml | 13 + .../mozbase/manifestparser/tests/test_chunking.py | 308 ++ .../manifestparser/tests/test_convert_directory.py | 277 ++ .../manifestparser/tests/test_convert_symlinks.py | 137 + .../manifestparser/tests/test_default_overrides.py | 138 + .../manifestparser/tests/test_expressionparser.py | 154 + .../mozbase/manifestparser/tests/test_filters.py | 333 ++ .../manifestparser/tests/test_manifestparser.py | 627 +++ .../mozbase/manifestparser/tests/test_read_ini.py | 134 + .../manifestparser/tests/test_testmanifest.py | 125 + testing/mozbase/manifestparser/tests/test_util.py | 104 + .../tests/verifyDirectory/subdir/manifest.ini | 1 + .../tests/verifyDirectory/subdir/manifest.toml | 1 + .../tests/verifyDirectory/subdir/test_sub.js | 1 + .../manifestparser/tests/verifyDirectory/test_1.js | 1 + .../manifestparser/tests/verifyDirectory/test_2.js | 1 + .../manifestparser/tests/verifyDirectory/test_3.js | 1 + .../tests/verifyDirectory/verifyDirectory.ini | 4 + .../tests/verifyDirectory/verifyDirectory.toml | 4 + .../verifyDirectory/verifyDirectory_incomplete.ini | 3 + .../verifyDirectory_incomplete.toml | 3 + .../verifyDirectory_toocomplete.ini | 5 + .../verifyDirectory_toocomplete.toml | 5 + testing/mozbase/moz.build | 70 + testing/mozbase/mozcrash/mozcrash/__init__.py | 9 + testing/mozbase/mozcrash/mozcrash/mozcrash.py | 865 ++++ testing/mozbase/mozcrash/setup.cfg | 2 + testing/mozbase/mozcrash/setup.py | 33 + testing/mozbase/mozcrash/tests/conftest.py | 127 + testing/mozbase/mozcrash/tests/manifest.toml | 12 + testing/mozbase/mozcrash/tests/test_basic.py | 43 + .../mozbase/mozcrash/tests/test_java_exception.py | 51 + testing/mozbase/mozcrash/tests/test_save_path.py | 68 + testing/mozbase/mozcrash/tests/test_stackwalk.py | 42 + .../mozbase/mozcrash/tests/test_symbols_path.py | 97 + testing/mozbase/mozdebug/mozdebug/__init__.py | 30 + testing/mozbase/mozdebug/mozdebug/mozdebug.py | 315 ++ testing/mozbase/mozdebug/setup.cfg | 2 + testing/mozbase/mozdebug/setup.py | 31 + .../mozdebug/tests/fake_debuggers/cgdb/cgdb | 0 .../tests/fake_debuggers/devenv/devenv.exe | 0 .../mozbase/mozdebug/tests/fake_debuggers/gdb/gdb | 0 .../mozdebug/tests/fake_debuggers/lldb/lldb | 0 .../tests/fake_debuggers/wdexpress/wdexpress.exe | 0 testing/mozbase/mozdebug/tests/manifest.toml | 4 + testing/mozbase/mozdebug/tests/test.py | 65 + testing/mozbase/mozdevice/mozdevice/__init__.py | 181 + testing/mozbase/mozdevice/mozdevice/adb.py | 4438 ++++++++++++++++++++ testing/mozbase/mozdevice/mozdevice/adb_android.py | 13 + .../mozdevice/mozdevice/remote_process_monitor.py | 285 ++ .../mozbase/mozdevice/mozdevice/version_codes.py | 70 + testing/mozbase/mozdevice/setup.cfg | 2 + testing/mozbase/mozdevice/setup.py | 34 + testing/mozbase/mozdevice/tests/conftest.py | 236 ++ testing/mozbase/mozdevice/tests/manifest.toml | 10 + testing/mozbase/mozdevice/tests/test_chown.py | 67 + .../mozdevice/tests/test_escape_command_line.py | 21 + .../mozdevice/tests/test_is_app_installed.py | 38 + .../mozdevice/tests/test_socket_connection.py | 124 + testing/mozbase/mozfile/mozfile/__init__.py | 6 + testing/mozbase/mozfile/mozfile/mozfile.py | 691 +++ testing/mozbase/mozfile/setup.cfg | 2 + testing/mozbase/mozfile/setup.py | 34 + .../tests/files/missing_file_attributes.zip | Bin 0 -> 442 bytes testing/mozbase/mozfile/tests/files/which/baz | 0 testing/mozbase/mozfile/tests/files/which/baz.exe | 0 .../mozfile/tests/files/which/registered/quux.exe | 0 .../mozbase/mozfile/tests/files/which/unix/baz.exe | 0 .../mozbase/mozfile/tests/files/which/unix/file | 0 testing/mozbase/mozfile/tests/files/which/unix/foo | 0 testing/mozbase/mozfile/tests/files/which/win/bar | 0 .../mozbase/mozfile/tests/files/which/win/baz.exe | 0 testing/mozbase/mozfile/tests/files/which/win/foo | 0 .../mozbase/mozfile/tests/files/which/win/foo.exe | 0 testing/mozbase/mozfile/tests/manifest.toml | 18 + testing/mozbase/mozfile/tests/stubs.py | 56 + testing/mozbase/mozfile/tests/test_copycontents.py | 126 + testing/mozbase/mozfile/tests/test_extract.py | 152 + testing/mozbase/mozfile/tests/test_load.py | 63 + testing/mozbase/mozfile/tests/test_move_remove.py | 253 ++ testing/mozbase/mozfile/tests/test_tempdir.py | 44 + testing/mozbase/mozfile/tests/test_tempfile.py | 105 + testing/mozbase/mozfile/tests/test_tree.py | 30 + testing/mozbase/mozfile/tests/test_url.py | 23 + testing/mozbase/mozfile/tests/test_which.py | 63 + .../mozgeckoprofiler/mozgeckoprofiler/__init__.py | 17 + .../mozgeckoprofiler/dump_syms_mac | Bin 0 -> 424940 bytes .../mozgeckoprofiler/mozgeckoprofiler/profiling.py | 85 + .../mozgeckoprofiler/symFileManager.py | 353 ++ .../mozgeckoprofiler/symbolication.py | 360 ++ .../mozgeckoprofiler/symbolicationRequest.py | 331 ++ .../mozgeckoprofiler/viewgeckoprofile.py | 136 + testing/mozbase/mozgeckoprofiler/setup.py | 32 + .../mozbase/mozgeckoprofiler/tests/manifest.toml | 4 + .../tests/test_view_gecko_profiler.py | 105 + testing/mozbase/mozhttpd/mozhttpd/__init__.py | 47 + testing/mozbase/mozhttpd/mozhttpd/handlers.py | 20 + testing/mozbase/mozhttpd/mozhttpd/mozhttpd.py | 350 ++ testing/mozbase/mozhttpd/setup.py | 34 + testing/mozbase/mozhttpd/tests/api.py | 381 ++ testing/mozbase/mozhttpd/tests/baseurl.py | 33 + testing/mozbase/mozhttpd/tests/basic.py | 50 + testing/mozbase/mozhttpd/tests/filelisting.py | 68 + testing/mozbase/mozhttpd/tests/manifest.toml | 16 + testing/mozbase/mozhttpd/tests/paths.py | 121 + testing/mozbase/mozhttpd/tests/requestlog.py | 62 + testing/mozbase/mozinfo/mozinfo/__init__.py | 58 + testing/mozbase/mozinfo/mozinfo/mozinfo.py | 363 ++ testing/mozbase/mozinfo/mozinfo/string_version.py | 73 + testing/mozbase/mozinfo/setup.cfg | 2 + testing/mozbase/mozinfo/setup.py | 41 + testing/mozbase/mozinfo/tests/manifest.toml | 4 + testing/mozbase/mozinfo/tests/test.py | 176 + testing/mozbase/mozinstall/mozinstall/__init__.py | 6 + .../mozbase/mozinstall/mozinstall/mozinstall.py | 443 ++ testing/mozbase/mozinstall/setup.cfg | 2 + testing/mozbase/mozinstall/setup.py | 59 + testing/mozbase/mozinstall/tests/conftest.py | 14 + .../mozinstall/tests/installer_stubs/firefox.dmg | Bin 0 -> 27309 bytes .../tests/installer_stubs/firefox.tar.bz2 | Bin 0 -> 2882 bytes .../mozinstall/tests/installer_stubs/firefox.zip | Bin 0 -> 8707 bytes testing/mozbase/mozinstall/tests/manifest.toml | 12 + testing/mozbase/mozinstall/tests/test_binary.py | 50 + testing/mozbase/mozinstall/tests/test_install.py | 90 + .../mozbase/mozinstall/tests/test_is_installer.py | 40 + testing/mozbase/mozinstall/tests/test_uninstall.py | 39 + testing/mozbase/mozleak/mozleak/__init__.py | 12 + testing/mozbase/mozleak/mozleak/leaklog.py | 255 ++ testing/mozbase/mozleak/mozleak/lsan.py | 220 + testing/mozbase/mozleak/setup.cfg | 2 + testing/mozbase/mozleak/setup.py | 29 + testing/mozbase/mozleak/tests/manifest.toml | 4 + testing/mozbase/mozleak/tests/test_lsan.py | 30 + testing/mozbase/mozlog/mozlog/__init__.py | 34 + testing/mozbase/mozlog/mozlog/capture.py | 96 + testing/mozbase/mozlog/mozlog/commandline.py | 344 ++ .../mozbase/mozlog/mozlog/formatters/__init__.py | 32 + testing/mozbase/mozlog/mozlog/formatters/base.py | 25 + .../mozlog/mozlog/formatters/errorsummary.py | 208 + .../mozbase/mozlog/mozlog/formatters/grouping.py | 391 ++ .../mozlog/mozlog/formatters/html/__init__.py | 7 + .../mozbase/mozlog/mozlog/formatters/html/html.py | 343 ++ .../mozbase/mozlog/mozlog/formatters/html/main.js | 166 + .../mozlog/mozlog/formatters/html/style.css | 155 + .../mozlog/mozlog/formatters/html/xmlgen.py | 310 ++ .../mozlog/mozlog/formatters/machformatter.py | 657 +++ .../mozbase/mozlog/mozlog/formatters/process.py | 59 + .../mozlog/mozlog/formatters/tbplformatter.py | 473 +++ .../mozbase/mozlog/mozlog/formatters/unittest.py | 83 + testing/mozbase/mozlog/mozlog/formatters/xunit.py | 115 + testing/mozbase/mozlog/mozlog/handlers/__init__.py | 19 + testing/mozbase/mozlog/mozlog/handlers/base.py | 124 + .../mozlog/mozlog/handlers/bufferhandler.py | 86 + .../mozlog/mozlog/handlers/messagehandler.py | 39 + .../mozlog/mozlog/handlers/statushandler.py | 87 + .../mozlog/mozlog/handlers/summaryhandler.py | 193 + .../mozlog/mozlog/handlers/valgrindhandler.py | 138 + testing/mozbase/mozlog/mozlog/logtypes.py | 302 ++ testing/mozbase/mozlog/mozlog/proxy.py | 81 + .../mozlog/mozlog/pytest_mozlog/__init__.py | 0 .../mozbase/mozlog/mozlog/pytest_mozlog/plugin.py | 127 + testing/mozbase/mozlog/mozlog/reader.py | 78 + testing/mozbase/mozlog/mozlog/scripts/__init__.py | 41 + testing/mozbase/mozlog/mozlog/scripts/format.py | 55 + testing/mozbase/mozlog/mozlog/scripts/logmerge.py | 90 + testing/mozbase/mozlog/mozlog/scripts/unstable.py | 148 + testing/mozbase/mozlog/mozlog/stdadapter.py | 50 + testing/mozbase/mozlog/mozlog/structuredlog.py | 820 ++++ .../mozbase/mozlog/mozlog/unstructured/__init__.py | 8 + .../mozbase/mozlog/mozlog/unstructured/logger.py | 191 + .../mozlog/mozlog/unstructured/loggingmixin.py | 42 + .../mozlog/mozlog/unstructured/loglistener.py | 50 + testing/mozbase/mozlog/setup.cfg | 2 + testing/mozbase/mozlog/setup.py | 43 + testing/mozbase/mozlog/tests/conftest.py | 24 + testing/mozbase/mozlog/tests/manifest.toml | 16 + testing/mozbase/mozlog/tests/test_capture.py | 37 + testing/mozbase/mozlog/tests/test_errorsummary.py | 262 ++ testing/mozbase/mozlog/tests/test_formatters.py | 767 ++++ testing/mozbase/mozlog/tests/test_logger.py | 303 ++ testing/mozbase/mozlog/tests/test_logtypes.py | 106 + testing/mozbase/mozlog/tests/test_structured.py | 1162 +++++ .../mozbase/mozlog/tests/test_terminal_colors.py | 62 + testing/mozbase/moznetwork/moznetwork/__init__.py | 26 + .../mozbase/moznetwork/moznetwork/moznetwork.py | 215 + testing/mozbase/moznetwork/setup.py | 36 + testing/mozbase/moznetwork/tests/manifest.toml | 4 + .../mozbase/moznetwork/tests/test_moznetwork.py | 73 + testing/mozbase/mozpower/mozpower/__init__.py | 24 + .../mozpower/mozpower/intel_power_gadget.py | 910 ++++ testing/mozbase/mozpower/mozpower/macintelpower.py | 92 + testing/mozbase/mozpower/mozpower/mozpower.py | 376 ++ testing/mozbase/mozpower/mozpower/mozpowerutils.py | 58 + testing/mozbase/mozpower/mozpower/powerbase.py | 122 + testing/mozbase/mozpower/setup.cfg | 2 + testing/mozbase/mozpower/setup.py | 34 + testing/mozbase/mozpower/tests/conftest.py | 103 + testing/mozbase/mozpower/tests/files/emptyfile.txt | 0 .../raptor-tp6-amazon-firefox_powerlog_1_.txt | 30 + .../raptor-tp6-amazon-firefox_powerlog_2_.txt | 30 + .../raptor-tp6-amazon-firefox_powerlog_3_.txt | 30 + .../mozpower/tests/files/valueerrorfile.txt | 30 + testing/mozbase/mozpower/tests/manifest.toml | 10 + .../mozpower/tests/test_intelpowergadget.py | 348 ++ .../mozbase/mozpower/tests/test_macintelpower.py | 76 + testing/mozbase/mozpower/tests/test_mozpower.py | 253 ++ testing/mozbase/mozpower/tests/test_powerbase.py | 91 + testing/mozbase/mozprocess/mozprocess/__init__.py | 16 + .../mozbase/mozprocess/mozprocess/mozprocess.py | 119 + .../mozprocess/mozprocess/processhandler.py | 1275 ++++++ testing/mozbase/mozprocess/mozprocess/qijo.py | 175 + .../mozbase/mozprocess/mozprocess/winprocess.py | 565 +++ testing/mozbase/mozprocess/setup.cfg | 2 + testing/mozbase/mozprocess/setup.py | 36 + testing/mozbase/mozprocess/tests/manifest.toml | 23 + .../mozprocess/tests/process_normal_broad.ini | 30 + .../mozprocess/tests/process_normal_deep.ini | 65 + .../mozprocess/tests/process_normal_finish.ini | 17 + .../process_normal_finish_no_process_group.ini | 2 + .../mozprocess/tests/process_waittimeout.ini | 16 + .../mozprocess/tests/process_waittimeout_10s.ini | 16 + testing/mozbase/mozprocess/tests/proclaunch.py | 209 + testing/mozbase/mozprocess/tests/proctest.py | 62 + .../mozprocess/tests/scripts/ignore_sigterm.py | 13 + .../mozprocess/tests/scripts/infinite_loop.py | 18 + .../mozprocess/tests/scripts/proccountfive.py | 2 + .../mozprocess/tests/scripts/procnonewline.py | 4 + testing/mozbase/mozprocess/tests/test_detached.py | 62 + testing/mozbase/mozprocess/tests/test_kill.py | 144 + testing/mozbase/mozprocess/tests/test_misc.py | 63 + testing/mozbase/mozprocess/tests/test_output.py | 76 + testing/mozbase/mozprocess/tests/test_params.py | 94 + testing/mozbase/mozprocess/tests/test_pid.py | 46 + testing/mozbase/mozprocess/tests/test_poll.py | 150 + .../mozprocess/tests/test_process_reader.py | 114 + .../mozbase/mozprocess/tests/test_run_and_wait.py | 126 + testing/mozbase/mozprocess/tests/test_wait.py | 144 + testing/mozbase/mozprofile/mozprofile/__init__.py | 20 + testing/mozbase/mozprofile/mozprofile/addons.py | 354 ++ testing/mozbase/mozprofile/mozprofile/cli.py | 203 + testing/mozbase/mozprofile/mozprofile/diff.py | 84 + .../mozbase/mozprofile/mozprofile/permissions.py | 335 ++ testing/mozbase/mozprofile/mozprofile/prefs.py | 271 ++ testing/mozbase/mozprofile/mozprofile/profile.py | 594 +++ testing/mozbase/mozprofile/mozprofile/view.py | 46 + testing/mozbase/mozprofile/setup.cfg | 2 + testing/mozbase/mozprofile/setup.py | 49 + testing/mozbase/mozprofile/tests/addon_stubs.py | 66 + .../apply-css-id-via-browser-specific-settings.xpi | Bin 0 -> 6444 bytes .../mozprofile/tests/addons/apply-css-sans-id.xpi | Bin 0 -> 3371 bytes .../mozbase/mozprofile/tests/addons/apply-css.xpi | Bin 0 -> 3412 bytes testing/mozbase/mozprofile/tests/addons/empty.xpi | Bin 0 -> 530 bytes .../mozprofile/tests/addons/empty/install.rdf | 20 + .../mozbase/mozprofile/tests/addons/invalid.xpi | Bin 0 -> 564 bytes .../tests/files/dummy-profile/.eslintrc.js | 7 + .../tests/files/dummy-profile/Preferences | 1 + .../tests/files/dummy-profile/extensions/empty.xpi | Bin 0 -> 530 bytes .../mozprofile/tests/files/dummy-profile/prefs.js | 1 + .../mozprofile/tests/files/dummy-profile/user.js | 1 + .../mozprofile/tests/files/not_an_addon.txt | 0 .../mozprofile/tests/files/prefs_with_comments.js | 6 + .../tests/files/prefs_with_interpolation.js | 5 + .../mozprofile/tests/files/prefs_with_multiline.js | 5 + .../tests/install_manifests/test_addon_1.rdf | 21 + .../tests/install_manifests/test_addon_2.rdf | 21 + .../tests/install_manifests/test_addon_3.rdf | 22 + .../tests/install_manifests/test_addon_4.rdf | 22 + .../install_manifests/test_addon_invalid_no_id.rdf | 22 + .../test_addon_invalid_not_wellformed.rdf | 23 + .../test_addon_invalid_version.rdf | 23 + .../tests/install_manifests/test_addon_unpack.rdf | 22 + testing/mozbase/mozprofile/tests/manifest.toml | 24 + testing/mozbase/mozprofile/tests/test_addonid.py | 157 + testing/mozbase/mozprofile/tests/test_addons.py | 373 ++ testing/mozbase/mozprofile/tests/test_bug758250.py | 45 + .../mozprofile/tests/test_chrome_profile.py | 72 + .../mozbase/mozprofile/tests/test_clone_cleanup.py | 78 + testing/mozbase/mozprofile/tests/test_nonce.py | 41 + .../mozbase/mozprofile/tests/test_permissions.py | 64 + .../mozbase/mozprofile/tests/test_preferences.py | 428 ++ testing/mozbase/mozprofile/tests/test_profile.py | 111 + .../mozbase/mozprofile/tests/test_profile_view.py | 76 + .../mozprofile/tests/test_server_locations.py | 117 + testing/mozbase/mozproxy/MANIFEST.in | 1 + testing/mozbase/mozproxy/mozproxy/__init__.py | 44 + testing/mozbase/mozproxy/mozproxy/__main__.py | 10 + .../mozbase/mozproxy/mozproxy/backends/__init__.py | 3 + testing/mozbase/mozproxy/mozproxy/backends/base.py | 32 + .../mozproxy/mozproxy/backends/mitm/__init__.py | 6 + .../mozproxy/mozproxy/backends/mitm/android.py | 245 ++ .../mozproxy/mozproxy/backends/mitm/desktop.py | 160 + .../mitmproxy-rel-bin-4.0.4-linux64.manifest | 10 + .../manifests/mitmproxy-rel-bin-4.0.4-osx.manifest | 10 + .../manifests/mitmproxy-rel-bin-4.0.4-win.manifest | 10 + .../mitmproxy-rel-bin-5.0.1-linux64.manifest | 10 + .../manifests/mitmproxy-rel-bin-5.0.1-osx.manifest | 10 + .../manifests/mitmproxy-rel-bin-5.0.1-win.manifest | 10 + .../mitmproxy-rel-bin-5.1.1-linux64.manifest | 10 + .../manifests/mitmproxy-rel-bin-5.1.1-osx.manifest | 10 + .../manifests/mitmproxy-rel-bin-5.1.1-win.manifest | 10 + .../mitmproxy-rel-bin-6.0.2-linux64.manifest | 10 + .../manifests/mitmproxy-rel-bin-6.0.2-osx.manifest | 10 + .../manifests/mitmproxy-rel-bin-6.0.2-win.manifest | 10 + .../mitmproxy-rel-bin-7.0.4-linux64.manifest | 10 + .../manifests/mitmproxy-rel-bin-7.0.4-osx.manifest | 10 + .../manifests/mitmproxy-rel-bin-7.0.4-win.manifest | 10 + .../mitmproxy-rel-bin-8.1.1-linux64.manifest | 10 + .../manifests/mitmproxy-rel-bin-8.1.1-osx.manifest | 10 + .../manifests/mitmproxy-rel-bin-8.1.1-win.manifest | 10 + .../mozproxy/mozproxy/backends/mitm/mitm.py | 454 ++ .../backends/mitm/mitmproxy_requirements.txt | 35 + .../mozproxy/backends/mitm/scripts/__init__.py | 4 + .../backends/mitm/scripts/alt-serverplayback.py | 264 ++ .../mitm/scripts/alternate-server-replay.py | 316 ++ .../backends/mitm/scripts/catapult/LICENSE | 27 + .../mitm/scripts/catapult/deterministic.js | 71 + .../mitm/scripts/http_protocol_extractor.py | 83 + .../backends/mitm/scripts/inject-deterministic.py | 206 + testing/mozbase/mozproxy/mozproxy/driver.py | 171 + testing/mozbase/mozproxy/mozproxy/recordings.py | 163 + testing/mozbase/mozproxy/mozproxy/server.py | 16 + testing/mozbase/mozproxy/mozproxy/utils.py | 249 ++ testing/mozbase/mozproxy/setup.py | 37 + testing/mozbase/mozproxy/tests/__init__.py | 1 + testing/mozbase/mozproxy/tests/archive.tar.gz | Bin 0 -> 184 bytes testing/mozbase/mozproxy/tests/example.dump | Bin 0 -> 494196 bytes .../files/mitm5-linux-firefox-amazon.manifest | 10 + .../tests/files/mitm5-linux-firefox-amazon.zip | Bin 0 -> 6588776 bytes testing/mozbase/mozproxy/tests/files/recording.zip | Bin 0 -> 384 bytes testing/mozbase/mozproxy/tests/firefox | 1 + testing/mozbase/mozproxy/tests/manifest.toml | 16 + testing/mozbase/mozproxy/tests/paypal.mp | 1 + testing/mozbase/mozproxy/tests/support.py | 14 + .../mozbase/mozproxy/tests/test_command_line.py | 269 ++ testing/mozbase/mozproxy/tests/test_mitm_addons.py | 89 + testing/mozbase/mozproxy/tests/test_proxy.py | 212 + testing/mozbase/mozproxy/tests/test_recording.py | 80 + testing/mozbase/mozproxy/tests/test_recordings.py | 37 + testing/mozbase/mozproxy/tests/test_utils.py | 32 + testing/mozbase/mozrunner/mozrunner/__init__.py | 12 + testing/mozbase/mozrunner/mozrunner/application.py | 156 + .../mozbase/mozrunner/mozrunner/base/__init__.py | 8 + .../mozbase/mozrunner/mozrunner/base/browser.py | 122 + testing/mozbase/mozrunner/mozrunner/base/device.py | 199 + testing/mozbase/mozrunner/mozrunner/base/runner.py | 278 ++ testing/mozbase/mozrunner/mozrunner/cli.py | 181 + .../mozrunner/mozrunner/devices/__init__.py | 17 + .../mozrunner/mozrunner/devices/android_device.py | 1062 +++++ .../mozbase/mozrunner/mozrunner/devices/base.py | 259 ++ .../mozrunner/mozrunner/devices/emulator.py | 224 + .../mozrunner/devices/emulator_battery.py | 53 + .../mozrunner/mozrunner/devices/emulator_geo.py | 16 + .../mozrunner/mozrunner/devices/emulator_screen.py | 91 + testing/mozbase/mozrunner/mozrunner/errors.py | 16 + testing/mozbase/mozrunner/mozrunner/runners.py | 144 + testing/mozbase/mozrunner/mozrunner/utils.py | 299 ++ testing/mozbase/mozrunner/setup.cfg | 2 + testing/mozbase/mozrunner/setup.py | 53 + testing/mozbase/mozrunner/tests/conftest.py | 81 + testing/mozbase/mozrunner/tests/manifest.toml | 19 + testing/mozbase/mozrunner/tests/test_crash.py | 36 + .../mozbase/mozrunner/tests/test_interactive.py | 40 + testing/mozbase/mozrunner/tests/test_start.py | 61 + testing/mozbase/mozrunner/tests/test_states.py | 22 + testing/mozbase/mozrunner/tests/test_stop.py | 41 + testing/mozbase/mozrunner/tests/test_threads.py | 57 + testing/mozbase/mozrunner/tests/test_wait.py | 32 + .../mozscreenshot/mozscreenshot/__init__.py | 110 + testing/mozbase/mozscreenshot/setup.cfg | 2 + testing/mozbase/mozscreenshot/setup.py | 29 + testing/mozbase/mozserve/mozserve/__init__.py | 12 + testing/mozbase/mozserve/mozserve/servers.py | 289 ++ testing/mozbase/mozserve/setup.py | 17 + testing/mozbase/mozsystemmonitor/README.rst | 12 + .../mozsystemmonitor/mozsystemmonitor/__init__.py | 0 .../mozsystemmonitor/resourcemonitor.py | 1220 ++++++ testing/mozbase/mozsystemmonitor/setup.cfg | 2 + testing/mozbase/mozsystemmonitor/setup.py | 33 + .../mozbase/mozsystemmonitor/tests/manifest.toml | 4 + .../tests/test_resource_monitor.py | 183 + testing/mozbase/moztest/moztest/__init__.py | 7 + .../mozbase/moztest/moztest/adapters/__init__.py | 7 + testing/mozbase/moztest/moztest/adapters/unit.py | 216 + testing/mozbase/moztest/moztest/resolve.py | 1042 +++++ testing/mozbase/moztest/moztest/results.py | 366 ++ .../mozbase/moztest/moztest/selftest/__init__.py | 0 .../mozbase/moztest/moztest/selftest/fixtures.py | 116 + testing/mozbase/moztest/moztest/selftest/output.py | 52 + testing/mozbase/moztest/setup.py | 33 + .../moztest/tests/data/srcdir/apple/a11y.toml | 3 + .../moztest/tests/data/srcdir/apple/moz.build | 1 + .../moztest/tests/data/srcdir/banana/moz.build | 1 + .../moztest/tests/data/srcdir/banana/xpcshell.toml | 5 + .../moztest/tests/data/srcdir/carrot/moz.build | 1 + .../tests/data/srcdir/carrot/xpcshell-one.toml | 5 + .../tests/data/srcdir/carrot/xpcshell-shared.toml | 3 + .../tests/data/srcdir/carrot/xpcshell-two.toml | 5 + .../dragonfruit/elderberry/xpcshell_updater.toml | 8 + .../tests/data/srcdir/dragonfruit/moz.build | 1 + .../tests/data/srcdir/dragonfruit/xpcshell.toml | 6 + .../data/srcdir/fig/grape/instrumentation.toml | 4 + .../srcdir/fig/huckleberry/instrumentation.toml | 4 + .../moztest/tests/data/srcdir/fig/moz.build | 4 + .../moztest/tests/data/srcdir/juniper/browser.toml | 3 + .../moztest/tests/data/srcdir/kiwi/browser.toml | 5 + .../mozbase/moztest/tests/data/srcdir/moz.build | 4 + .../tests/data/srcdir/wpt_manifest_data.json | 8 + testing/mozbase/moztest/tests/manifest.toml | 6 + testing/mozbase/moztest/tests/test.py | 54 + testing/mozbase/moztest/tests/test_resolve.py | 577 +++ testing/mozbase/mozversion/mozversion/__init__.py | 9 + testing/mozbase/mozversion/mozversion/errors.py | 29 + .../mozbase/mozversion/mozversion/mozversion.py | 153 + testing/mozbase/mozversion/setup.cfg | 2 + testing/mozbase/mozversion/setup.py | 33 + testing/mozbase/mozversion/tests/manifest.toml | 6 + testing/mozbase/mozversion/tests/test_apk.py | 45 + testing/mozbase/mozversion/tests/test_binary.py | 157 + testing/mozbase/rust/mozdevice/Cargo.toml | 25 + testing/mozbase/rust/mozdevice/src/adb.rs | 38 + testing/mozbase/rust/mozdevice/src/lib.rs | 1065 +++++ testing/mozbase/rust/mozdevice/src/shell.rs | 66 + testing/mozbase/rust/mozdevice/src/test.rs | 760 ++++ testing/mozbase/rust/mozprofile/Cargo.toml | 16 + testing/mozbase/rust/mozprofile/fuzz/Cargo.toml | 25 + .../mozprofile/fuzz/fuzz_targets/prefreader.rs | 16 + testing/mozbase/rust/mozprofile/src/lib.rs | 241 ++ testing/mozbase/rust/mozprofile/src/preferences.rs | 138 + testing/mozbase/rust/mozprofile/src/prefreader.rs | 1046 +++++ testing/mozbase/rust/mozprofile/src/profile.rs | 135 + testing/mozbase/rust/mozrunner/Cargo.toml | 28 + .../rust/mozrunner/src/bin/firefox-default-path.rs | 21 + testing/mozbase/rust/mozrunner/src/firefox_args.rs | 384 ++ testing/mozbase/rust/mozrunner/src/lib.rs | 20 + testing/mozbase/rust/mozrunner/src/path.rs | 61 + testing/mozbase/rust/mozrunner/src/runner.rs | 528 +++ testing/mozbase/rust/mozversion/Cargo.toml | 18 + testing/mozbase/rust/mozversion/src/lib.rs | 410 ++ testing/mozbase/setup_development.py | 290 ++ testing/mozbase/versioninfo.py | 153 + 523 files changed, 59328 insertions(+) create mode 100644 testing/mozbase/README.md create mode 100644 testing/mozbase/docs/Makefile create mode 100644 testing/mozbase/docs/_static/structured_example.py create mode 100644 testing/mozbase/docs/conf.py create mode 100644 testing/mozbase/docs/devicemanagement.rst create mode 100644 testing/mozbase/docs/gettinginfo.rst create mode 100644 testing/mozbase/docs/index.rst create mode 100644 testing/mozbase/docs/loggingreporting.rst create mode 100644 testing/mozbase/docs/make.bat create mode 100644 testing/mozbase/docs/manifestparser.rst create mode 100644 testing/mozbase/docs/mozcrash.rst create mode 100644 testing/mozbase/docs/mozdebug.rst create mode 100644 testing/mozbase/docs/mozdevice.rst create mode 100644 testing/mozbase/docs/mozfile.rst create mode 100644 testing/mozbase/docs/mozgeckoprofiler.rst create mode 100644 testing/mozbase/docs/mozhttpd.rst create mode 100644 testing/mozbase/docs/mozinfo.rst create mode 100644 testing/mozbase/docs/mozinstall.rst create mode 100644 testing/mozbase/docs/mozlog.rst create mode 100644 testing/mozbase/docs/moznetwork.rst create mode 100644 testing/mozbase/docs/mozpower.rst create mode 100644 testing/mozbase/docs/mozprocess.rst create mode 100644 testing/mozbase/docs/mozprofile.rst create mode 100644 testing/mozbase/docs/mozproxy.rst create mode 100644 testing/mozbase/docs/mozrunner.rst create mode 100644 testing/mozbase/docs/mozversion.rst create mode 100644 testing/mozbase/docs/requirements.txt create mode 100644 testing/mozbase/docs/servingcontent.rst create mode 100644 testing/mozbase/docs/setuprunning.rst create mode 100644 testing/mozbase/manifestparser/manifestparser/__init__.py create mode 100644 testing/mozbase/manifestparser/manifestparser/cli.py create mode 100644 testing/mozbase/manifestparser/manifestparser/expression.py create mode 100644 testing/mozbase/manifestparser/manifestparser/filters.py create mode 100644 testing/mozbase/manifestparser/manifestparser/ini.py create mode 100644 testing/mozbase/manifestparser/manifestparser/logger.py create mode 100644 testing/mozbase/manifestparser/manifestparser/manifestparser.py create mode 100644 testing/mozbase/manifestparser/manifestparser/toml.py create mode 100644 testing/mozbase/manifestparser/manifestparser/util.py create mode 100644 testing/mozbase/manifestparser/setup.py create mode 100644 testing/mozbase/manifestparser/tests/broken-skip-if.toml create mode 100644 testing/mozbase/manifestparser/tests/comment-example.toml create mode 100644 testing/mozbase/manifestparser/tests/default-skipif.toml create mode 100644 testing/mozbase/manifestparser/tests/default-subsuite.toml create mode 100644 testing/mozbase/manifestparser/tests/default-suppfiles.toml create mode 100644 testing/mozbase/manifestparser/tests/edit-manifest-after.toml create mode 100644 testing/mozbase/manifestparser/tests/edit-manifest-before.toml create mode 100644 testing/mozbase/manifestparser/tests/filter-example.toml create mode 100644 testing/mozbase/manifestparser/tests/fleem create mode 100644 testing/mozbase/manifestparser/tests/include-example.toml create mode 100644 testing/mozbase/manifestparser/tests/include-invalid.toml create mode 100644 testing/mozbase/manifestparser/tests/include/bar.ini create mode 100644 testing/mozbase/manifestparser/tests/include/bar.toml create mode 100644 testing/mozbase/manifestparser/tests/include/crash-handling create mode 100644 testing/mozbase/manifestparser/tests/include/flowers create mode 100644 testing/mozbase/manifestparser/tests/include/foo.ini create mode 100644 testing/mozbase/manifestparser/tests/include/foo.toml create mode 100644 testing/mozbase/manifestparser/tests/just-defaults.toml create mode 100644 testing/mozbase/manifestparser/tests/manifest.toml create mode 100644 testing/mozbase/manifestparser/tests/missing-path.toml create mode 100644 testing/mozbase/manifestparser/tests/mozmill-example.toml create mode 100644 testing/mozbase/manifestparser/tests/mozmill-restart-example.toml create mode 100644 testing/mozbase/manifestparser/tests/no-tests.toml create mode 100644 testing/mozbase/manifestparser/tests/parent/include/first/manifest.ini create mode 100644 testing/mozbase/manifestparser/tests/parent/include/first/manifest.toml create mode 100644 testing/mozbase/manifestparser/tests/parent/include/manifest.ini create mode 100644 testing/mozbase/manifestparser/tests/parent/include/manifest.toml create mode 100644 testing/mozbase/manifestparser/tests/parent/include/second/manifest.ini create mode 100644 testing/mozbase/manifestparser/tests/parent/include/second/manifest.toml create mode 100644 testing/mozbase/manifestparser/tests/parent/level_1/level_1.ini create mode 100644 testing/mozbase/manifestparser/tests/parent/level_1/level_1.toml create mode 100644 testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_2.ini create mode 100644 testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_2.toml create mode 100644 testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_3/level_3.ini create mode 100644 testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_3/level_3.toml create mode 100644 testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_3/level_3_default.ini create mode 100644 testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_3/level_3_default.toml create mode 100644 testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_3/test_3 create mode 100644 testing/mozbase/manifestparser/tests/parent/level_1/level_2/test_2 create mode 100644 testing/mozbase/manifestparser/tests/parent/level_1/test_1 create mode 100644 testing/mozbase/manifestparser/tests/parent/root/dummy create mode 100644 testing/mozbase/manifestparser/tests/parse-error.toml create mode 100644 testing/mozbase/manifestparser/tests/path-example.toml create mode 100644 testing/mozbase/manifestparser/tests/relative-path.toml create mode 100644 testing/mozbase/manifestparser/tests/subsuite.toml create mode 100644 testing/mozbase/manifestparser/tests/test_chunking.py create mode 100755 testing/mozbase/manifestparser/tests/test_convert_directory.py create mode 100755 testing/mozbase/manifestparser/tests/test_convert_symlinks.py create mode 100755 testing/mozbase/manifestparser/tests/test_default_overrides.py create mode 100755 testing/mozbase/manifestparser/tests/test_expressionparser.py create mode 100644 testing/mozbase/manifestparser/tests/test_filters.py create mode 100755 testing/mozbase/manifestparser/tests/test_manifestparser.py create mode 100755 testing/mozbase/manifestparser/tests/test_read_ini.py create mode 100644 testing/mozbase/manifestparser/tests/test_testmanifest.py create mode 100644 testing/mozbase/manifestparser/tests/test_util.py create mode 100644 testing/mozbase/manifestparser/tests/verifyDirectory/subdir/manifest.ini create mode 100644 testing/mozbase/manifestparser/tests/verifyDirectory/subdir/manifest.toml create mode 100644 testing/mozbase/manifestparser/tests/verifyDirectory/subdir/test_sub.js create mode 100644 testing/mozbase/manifestparser/tests/verifyDirectory/test_1.js create mode 100644 testing/mozbase/manifestparser/tests/verifyDirectory/test_2.js create mode 100644 testing/mozbase/manifestparser/tests/verifyDirectory/test_3.js create mode 100644 testing/mozbase/manifestparser/tests/verifyDirectory/verifyDirectory.ini create mode 100644 testing/mozbase/manifestparser/tests/verifyDirectory/verifyDirectory.toml create mode 100644 testing/mozbase/manifestparser/tests/verifyDirectory/verifyDirectory_incomplete.ini create mode 100644 testing/mozbase/manifestparser/tests/verifyDirectory/verifyDirectory_incomplete.toml create mode 100644 testing/mozbase/manifestparser/tests/verifyDirectory/verifyDirectory_toocomplete.ini create mode 100644 testing/mozbase/manifestparser/tests/verifyDirectory/verifyDirectory_toocomplete.toml create mode 100644 testing/mozbase/moz.build create mode 100644 testing/mozbase/mozcrash/mozcrash/__init__.py create mode 100644 testing/mozbase/mozcrash/mozcrash/mozcrash.py create mode 100644 testing/mozbase/mozcrash/setup.cfg create mode 100644 testing/mozbase/mozcrash/setup.py create mode 100644 testing/mozbase/mozcrash/tests/conftest.py create mode 100644 testing/mozbase/mozcrash/tests/manifest.toml create mode 100644 testing/mozbase/mozcrash/tests/test_basic.py create mode 100644 testing/mozbase/mozcrash/tests/test_java_exception.py create mode 100644 testing/mozbase/mozcrash/tests/test_save_path.py create mode 100644 testing/mozbase/mozcrash/tests/test_stackwalk.py create mode 100644 testing/mozbase/mozcrash/tests/test_symbols_path.py create mode 100644 testing/mozbase/mozdebug/mozdebug/__init__.py create mode 100755 testing/mozbase/mozdebug/mozdebug/mozdebug.py create mode 100644 testing/mozbase/mozdebug/setup.cfg create mode 100644 testing/mozbase/mozdebug/setup.py create mode 100755 testing/mozbase/mozdebug/tests/fake_debuggers/cgdb/cgdb create mode 100755 testing/mozbase/mozdebug/tests/fake_debuggers/devenv/devenv.exe create mode 100755 testing/mozbase/mozdebug/tests/fake_debuggers/gdb/gdb create mode 100755 testing/mozbase/mozdebug/tests/fake_debuggers/lldb/lldb create mode 100755 testing/mozbase/mozdebug/tests/fake_debuggers/wdexpress/wdexpress.exe create mode 100644 testing/mozbase/mozdebug/tests/manifest.toml create mode 100644 testing/mozbase/mozdebug/tests/test.py create mode 100644 testing/mozbase/mozdevice/mozdevice/__init__.py create mode 100644 testing/mozbase/mozdevice/mozdevice/adb.py create mode 100644 testing/mozbase/mozdevice/mozdevice/adb_android.py create mode 100644 testing/mozbase/mozdevice/mozdevice/remote_process_monitor.py create mode 100644 testing/mozbase/mozdevice/mozdevice/version_codes.py create mode 100644 testing/mozbase/mozdevice/setup.cfg create mode 100644 testing/mozbase/mozdevice/setup.py create mode 100644 testing/mozbase/mozdevice/tests/conftest.py create mode 100644 testing/mozbase/mozdevice/tests/manifest.toml create mode 100644 testing/mozbase/mozdevice/tests/test_chown.py create mode 100644 testing/mozbase/mozdevice/tests/test_escape_command_line.py create mode 100644 testing/mozbase/mozdevice/tests/test_is_app_installed.py create mode 100644 testing/mozbase/mozdevice/tests/test_socket_connection.py create mode 100644 testing/mozbase/mozfile/mozfile/__init__.py create mode 100644 testing/mozbase/mozfile/mozfile/mozfile.py create mode 100644 testing/mozbase/mozfile/setup.cfg create mode 100644 testing/mozbase/mozfile/setup.py create mode 100644 testing/mozbase/mozfile/tests/files/missing_file_attributes.zip create mode 100755 testing/mozbase/mozfile/tests/files/which/baz create mode 100755 testing/mozbase/mozfile/tests/files/which/baz.exe create mode 100755 testing/mozbase/mozfile/tests/files/which/registered/quux.exe create mode 100755 testing/mozbase/mozfile/tests/files/which/unix/baz.exe create mode 100644 testing/mozbase/mozfile/tests/files/which/unix/file create mode 100755 testing/mozbase/mozfile/tests/files/which/unix/foo create mode 100755 testing/mozbase/mozfile/tests/files/which/win/bar create mode 100755 testing/mozbase/mozfile/tests/files/which/win/baz.exe create mode 100755 testing/mozbase/mozfile/tests/files/which/win/foo create mode 100755 testing/mozbase/mozfile/tests/files/which/win/foo.exe create mode 100644 testing/mozbase/mozfile/tests/manifest.toml create mode 100644 testing/mozbase/mozfile/tests/stubs.py create mode 100644 testing/mozbase/mozfile/tests/test_copycontents.py create mode 100644 testing/mozbase/mozfile/tests/test_extract.py create mode 100755 testing/mozbase/mozfile/tests/test_load.py create mode 100644 testing/mozbase/mozfile/tests/test_move_remove.py create mode 100644 testing/mozbase/mozfile/tests/test_tempdir.py create mode 100644 testing/mozbase/mozfile/tests/test_tempfile.py create mode 100644 testing/mozbase/mozfile/tests/test_tree.py create mode 100755 testing/mozbase/mozfile/tests/test_url.py create mode 100644 testing/mozbase/mozfile/tests/test_which.py create mode 100644 testing/mozbase/mozgeckoprofiler/mozgeckoprofiler/__init__.py create mode 100755 testing/mozbase/mozgeckoprofiler/mozgeckoprofiler/dump_syms_mac create mode 100644 testing/mozbase/mozgeckoprofiler/mozgeckoprofiler/profiling.py create mode 100644 testing/mozbase/mozgeckoprofiler/mozgeckoprofiler/symFileManager.py create mode 100644 testing/mozbase/mozgeckoprofiler/mozgeckoprofiler/symbolication.py create mode 100644 testing/mozbase/mozgeckoprofiler/mozgeckoprofiler/symbolicationRequest.py create mode 100644 testing/mozbase/mozgeckoprofiler/mozgeckoprofiler/viewgeckoprofile.py create mode 100644 testing/mozbase/mozgeckoprofiler/setup.py create mode 100644 testing/mozbase/mozgeckoprofiler/tests/manifest.toml create mode 100644 testing/mozbase/mozgeckoprofiler/tests/test_view_gecko_profiler.py create mode 100644 testing/mozbase/mozhttpd/mozhttpd/__init__.py create mode 100644 testing/mozbase/mozhttpd/mozhttpd/handlers.py create mode 100755 testing/mozbase/mozhttpd/mozhttpd/mozhttpd.py create mode 100644 testing/mozbase/mozhttpd/setup.py create mode 100644 testing/mozbase/mozhttpd/tests/api.py create mode 100644 testing/mozbase/mozhttpd/tests/baseurl.py create mode 100644 testing/mozbase/mozhttpd/tests/basic.py create mode 100644 testing/mozbase/mozhttpd/tests/filelisting.py create mode 100644 testing/mozbase/mozhttpd/tests/manifest.toml create mode 100644 testing/mozbase/mozhttpd/tests/paths.py create mode 100644 testing/mozbase/mozhttpd/tests/requestlog.py create mode 100644 testing/mozbase/mozinfo/mozinfo/__init__.py create mode 100755 testing/mozbase/mozinfo/mozinfo/mozinfo.py create mode 100644 testing/mozbase/mozinfo/mozinfo/string_version.py create mode 100644 testing/mozbase/mozinfo/setup.cfg create mode 100644 testing/mozbase/mozinfo/setup.py create mode 100644 testing/mozbase/mozinfo/tests/manifest.toml create mode 100644 testing/mozbase/mozinfo/tests/test.py create mode 100644 testing/mozbase/mozinstall/mozinstall/__init__.py create mode 100644 testing/mozbase/mozinstall/mozinstall/mozinstall.py create mode 100644 testing/mozbase/mozinstall/setup.cfg create mode 100644 testing/mozbase/mozinstall/setup.py create mode 100644 testing/mozbase/mozinstall/tests/conftest.py create mode 100644 testing/mozbase/mozinstall/tests/installer_stubs/firefox.dmg create mode 100644 testing/mozbase/mozinstall/tests/installer_stubs/firefox.tar.bz2 create mode 100644 testing/mozbase/mozinstall/tests/installer_stubs/firefox.zip create mode 100644 testing/mozbase/mozinstall/tests/manifest.toml create mode 100644 testing/mozbase/mozinstall/tests/test_binary.py create mode 100644 testing/mozbase/mozinstall/tests/test_install.py create mode 100644 testing/mozbase/mozinstall/tests/test_is_installer.py create mode 100644 testing/mozbase/mozinstall/tests/test_uninstall.py create mode 100644 testing/mozbase/mozleak/mozleak/__init__.py create mode 100644 testing/mozbase/mozleak/mozleak/leaklog.py create mode 100644 testing/mozbase/mozleak/mozleak/lsan.py create mode 100644 testing/mozbase/mozleak/setup.cfg create mode 100644 testing/mozbase/mozleak/setup.py create mode 100644 testing/mozbase/mozleak/tests/manifest.toml create mode 100644 testing/mozbase/mozleak/tests/test_lsan.py create mode 100644 testing/mozbase/mozlog/mozlog/__init__.py create mode 100644 testing/mozbase/mozlog/mozlog/capture.py create mode 100644 testing/mozbase/mozlog/mozlog/commandline.py create mode 100644 testing/mozbase/mozlog/mozlog/formatters/__init__.py create mode 100644 testing/mozbase/mozlog/mozlog/formatters/base.py create mode 100644 testing/mozbase/mozlog/mozlog/formatters/errorsummary.py create mode 100644 testing/mozbase/mozlog/mozlog/formatters/grouping.py create mode 100644 testing/mozbase/mozlog/mozlog/formatters/html/__init__.py create mode 100755 testing/mozbase/mozlog/mozlog/formatters/html/html.py create mode 100644 testing/mozbase/mozlog/mozlog/formatters/html/main.js create mode 100644 testing/mozbase/mozlog/mozlog/formatters/html/style.css create mode 100644 testing/mozbase/mozlog/mozlog/formatters/html/xmlgen.py create mode 100644 testing/mozbase/mozlog/mozlog/formatters/machformatter.py create mode 100644 testing/mozbase/mozlog/mozlog/formatters/process.py create mode 100644 testing/mozbase/mozlog/mozlog/formatters/tbplformatter.py create mode 100755 testing/mozbase/mozlog/mozlog/formatters/unittest.py create mode 100644 testing/mozbase/mozlog/mozlog/formatters/xunit.py create mode 100644 testing/mozbase/mozlog/mozlog/handlers/__init__.py create mode 100644 testing/mozbase/mozlog/mozlog/handlers/base.py create mode 100644 testing/mozbase/mozlog/mozlog/handlers/bufferhandler.py create mode 100644 testing/mozbase/mozlog/mozlog/handlers/messagehandler.py create mode 100644 testing/mozbase/mozlog/mozlog/handlers/statushandler.py create mode 100644 testing/mozbase/mozlog/mozlog/handlers/summaryhandler.py create mode 100644 testing/mozbase/mozlog/mozlog/handlers/valgrindhandler.py create mode 100644 testing/mozbase/mozlog/mozlog/logtypes.py create mode 100644 testing/mozbase/mozlog/mozlog/proxy.py create mode 100644 testing/mozbase/mozlog/mozlog/pytest_mozlog/__init__.py create mode 100644 testing/mozbase/mozlog/mozlog/pytest_mozlog/plugin.py create mode 100644 testing/mozbase/mozlog/mozlog/reader.py create mode 100644 testing/mozbase/mozlog/mozlog/scripts/__init__.py create mode 100644 testing/mozbase/mozlog/mozlog/scripts/format.py create mode 100644 testing/mozbase/mozlog/mozlog/scripts/logmerge.py create mode 100644 testing/mozbase/mozlog/mozlog/scripts/unstable.py create mode 100644 testing/mozbase/mozlog/mozlog/stdadapter.py create mode 100644 testing/mozbase/mozlog/mozlog/structuredlog.py create mode 100644 testing/mozbase/mozlog/mozlog/unstructured/__init__.py create mode 100644 testing/mozbase/mozlog/mozlog/unstructured/logger.py create mode 100644 testing/mozbase/mozlog/mozlog/unstructured/loggingmixin.py create mode 100644 testing/mozbase/mozlog/mozlog/unstructured/loglistener.py create mode 100644 testing/mozbase/mozlog/setup.cfg create mode 100644 testing/mozbase/mozlog/setup.py create mode 100644 testing/mozbase/mozlog/tests/conftest.py create mode 100644 testing/mozbase/mozlog/tests/manifest.toml create mode 100644 testing/mozbase/mozlog/tests/test_capture.py create mode 100644 testing/mozbase/mozlog/tests/test_errorsummary.py create mode 100644 testing/mozbase/mozlog/tests/test_formatters.py create mode 100644 testing/mozbase/mozlog/tests/test_logger.py create mode 100644 testing/mozbase/mozlog/tests/test_logtypes.py create mode 100644 testing/mozbase/mozlog/tests/test_structured.py create mode 100644 testing/mozbase/mozlog/tests/test_terminal_colors.py create mode 100644 testing/mozbase/moznetwork/moznetwork/__init__.py create mode 100644 testing/mozbase/moznetwork/moznetwork/moznetwork.py create mode 100644 testing/mozbase/moznetwork/setup.py create mode 100644 testing/mozbase/moznetwork/tests/manifest.toml create mode 100644 testing/mozbase/moznetwork/tests/test_moznetwork.py create mode 100644 testing/mozbase/mozpower/mozpower/__init__.py create mode 100644 testing/mozbase/mozpower/mozpower/intel_power_gadget.py create mode 100644 testing/mozbase/mozpower/mozpower/macintelpower.py create mode 100644 testing/mozbase/mozpower/mozpower/mozpower.py create mode 100644 testing/mozbase/mozpower/mozpower/mozpowerutils.py create mode 100644 testing/mozbase/mozpower/mozpower/powerbase.py create mode 100644 testing/mozbase/mozpower/setup.cfg create mode 100644 testing/mozbase/mozpower/setup.py create mode 100644 testing/mozbase/mozpower/tests/conftest.py create mode 100644 testing/mozbase/mozpower/tests/files/emptyfile.txt create mode 100644 testing/mozbase/mozpower/tests/files/raptor-tp6-amazon-firefox_powerlog_1_.txt create mode 100644 testing/mozbase/mozpower/tests/files/raptor-tp6-amazon-firefox_powerlog_2_.txt create mode 100644 testing/mozbase/mozpower/tests/files/raptor-tp6-amazon-firefox_powerlog_3_.txt create mode 100644 testing/mozbase/mozpower/tests/files/valueerrorfile.txt create mode 100644 testing/mozbase/mozpower/tests/manifest.toml create mode 100644 testing/mozbase/mozpower/tests/test_intelpowergadget.py create mode 100644 testing/mozbase/mozpower/tests/test_macintelpower.py create mode 100644 testing/mozbase/mozpower/tests/test_mozpower.py create mode 100644 testing/mozbase/mozpower/tests/test_powerbase.py create mode 100644 testing/mozbase/mozprocess/mozprocess/__init__.py create mode 100644 testing/mozbase/mozprocess/mozprocess/mozprocess.py create mode 100644 testing/mozbase/mozprocess/mozprocess/processhandler.py create mode 100644 testing/mozbase/mozprocess/mozprocess/qijo.py create mode 100644 testing/mozbase/mozprocess/mozprocess/winprocess.py create mode 100644 testing/mozbase/mozprocess/setup.cfg create mode 100644 testing/mozbase/mozprocess/setup.py create mode 100644 testing/mozbase/mozprocess/tests/manifest.toml create mode 100644 testing/mozbase/mozprocess/tests/process_normal_broad.ini create mode 100644 testing/mozbase/mozprocess/tests/process_normal_deep.ini create mode 100644 testing/mozbase/mozprocess/tests/process_normal_finish.ini create mode 100644 testing/mozbase/mozprocess/tests/process_normal_finish_no_process_group.ini create mode 100644 testing/mozbase/mozprocess/tests/process_waittimeout.ini create mode 100644 testing/mozbase/mozprocess/tests/process_waittimeout_10s.ini create mode 100644 testing/mozbase/mozprocess/tests/proclaunch.py create mode 100644 testing/mozbase/mozprocess/tests/proctest.py create mode 100644 testing/mozbase/mozprocess/tests/scripts/ignore_sigterm.py create mode 100644 testing/mozbase/mozprocess/tests/scripts/infinite_loop.py create mode 100644 testing/mozbase/mozprocess/tests/scripts/proccountfive.py create mode 100644 testing/mozbase/mozprocess/tests/scripts/procnonewline.py create mode 100644 testing/mozbase/mozprocess/tests/test_detached.py create mode 100644 testing/mozbase/mozprocess/tests/test_kill.py create mode 100644 testing/mozbase/mozprocess/tests/test_misc.py create mode 100644 testing/mozbase/mozprocess/tests/test_output.py create mode 100644 testing/mozbase/mozprocess/tests/test_params.py create mode 100644 testing/mozbase/mozprocess/tests/test_pid.py create mode 100644 testing/mozbase/mozprocess/tests/test_poll.py create mode 100644 testing/mozbase/mozprocess/tests/test_process_reader.py create mode 100644 testing/mozbase/mozprocess/tests/test_run_and_wait.py create mode 100644 testing/mozbase/mozprocess/tests/test_wait.py create mode 100644 testing/mozbase/mozprofile/mozprofile/__init__.py create mode 100644 testing/mozbase/mozprofile/mozprofile/addons.py create mode 100755 testing/mozbase/mozprofile/mozprofile/cli.py create mode 100644 testing/mozbase/mozprofile/mozprofile/diff.py create mode 100644 testing/mozbase/mozprofile/mozprofile/permissions.py create mode 100644 testing/mozbase/mozprofile/mozprofile/prefs.py create mode 100644 testing/mozbase/mozprofile/mozprofile/profile.py create mode 100644 testing/mozbase/mozprofile/mozprofile/view.py create mode 100644 testing/mozbase/mozprofile/setup.cfg create mode 100644 testing/mozbase/mozprofile/setup.py create mode 100644 testing/mozbase/mozprofile/tests/addon_stubs.py create mode 100644 testing/mozbase/mozprofile/tests/addons/apply-css-id-via-browser-specific-settings.xpi create mode 100644 testing/mozbase/mozprofile/tests/addons/apply-css-sans-id.xpi create mode 100644 testing/mozbase/mozprofile/tests/addons/apply-css.xpi create mode 100644 testing/mozbase/mozprofile/tests/addons/empty.xpi create mode 100644 testing/mozbase/mozprofile/tests/addons/empty/install.rdf create mode 100644 testing/mozbase/mozprofile/tests/addons/invalid.xpi create mode 100644 testing/mozbase/mozprofile/tests/files/dummy-profile/.eslintrc.js create mode 100644 testing/mozbase/mozprofile/tests/files/dummy-profile/Preferences create mode 100644 testing/mozbase/mozprofile/tests/files/dummy-profile/extensions/empty.xpi create mode 100644 testing/mozbase/mozprofile/tests/files/dummy-profile/prefs.js create mode 100644 testing/mozbase/mozprofile/tests/files/dummy-profile/user.js create mode 100644 testing/mozbase/mozprofile/tests/files/not_an_addon.txt create mode 100644 testing/mozbase/mozprofile/tests/files/prefs_with_comments.js create mode 100644 testing/mozbase/mozprofile/tests/files/prefs_with_interpolation.js create mode 100644 testing/mozbase/mozprofile/tests/files/prefs_with_multiline.js create mode 100644 testing/mozbase/mozprofile/tests/install_manifests/test_addon_1.rdf create mode 100644 testing/mozbase/mozprofile/tests/install_manifests/test_addon_2.rdf create mode 100644 testing/mozbase/mozprofile/tests/install_manifests/test_addon_3.rdf create mode 100644 testing/mozbase/mozprofile/tests/install_manifests/test_addon_4.rdf create mode 100644 testing/mozbase/mozprofile/tests/install_manifests/test_addon_invalid_no_id.rdf create mode 100644 testing/mozbase/mozprofile/tests/install_manifests/test_addon_invalid_not_wellformed.rdf create mode 100644 testing/mozbase/mozprofile/tests/install_manifests/test_addon_invalid_version.rdf create mode 100644 testing/mozbase/mozprofile/tests/install_manifests/test_addon_unpack.rdf create mode 100644 testing/mozbase/mozprofile/tests/manifest.toml create mode 100755 testing/mozbase/mozprofile/tests/test_addonid.py create mode 100644 testing/mozbase/mozprofile/tests/test_addons.py create mode 100755 testing/mozbase/mozprofile/tests/test_bug758250.py create mode 100644 testing/mozbase/mozprofile/tests/test_chrome_profile.py create mode 100644 testing/mozbase/mozprofile/tests/test_clone_cleanup.py create mode 100755 testing/mozbase/mozprofile/tests/test_nonce.py create mode 100755 testing/mozbase/mozprofile/tests/test_permissions.py create mode 100755 testing/mozbase/mozprofile/tests/test_preferences.py create mode 100644 testing/mozbase/mozprofile/tests/test_profile.py create mode 100644 testing/mozbase/mozprofile/tests/test_profile_view.py create mode 100644 testing/mozbase/mozprofile/tests/test_server_locations.py create mode 100644 testing/mozbase/mozproxy/MANIFEST.in create mode 100644 testing/mozbase/mozproxy/mozproxy/__init__.py create mode 100644 testing/mozbase/mozproxy/mozproxy/__main__.py create mode 100644 testing/mozbase/mozproxy/mozproxy/backends/__init__.py create mode 100644 testing/mozbase/mozproxy/mozproxy/backends/base.py create mode 100644 testing/mozbase/mozproxy/mozproxy/backends/mitm/__init__.py create mode 100644 testing/mozbase/mozproxy/mozproxy/backends/mitm/android.py create mode 100644 testing/mozbase/mozproxy/mozproxy/backends/mitm/desktop.py create mode 100644 testing/mozbase/mozproxy/mozproxy/backends/mitm/manifests/mitmproxy-rel-bin-4.0.4-linux64.manifest create mode 100644 testing/mozbase/mozproxy/mozproxy/backends/mitm/manifests/mitmproxy-rel-bin-4.0.4-osx.manifest create mode 100644 testing/mozbase/mozproxy/mozproxy/backends/mitm/manifests/mitmproxy-rel-bin-4.0.4-win.manifest create mode 100644 testing/mozbase/mozproxy/mozproxy/backends/mitm/manifests/mitmproxy-rel-bin-5.0.1-linux64.manifest create mode 100644 testing/mozbase/mozproxy/mozproxy/backends/mitm/manifests/mitmproxy-rel-bin-5.0.1-osx.manifest create mode 100644 testing/mozbase/mozproxy/mozproxy/backends/mitm/manifests/mitmproxy-rel-bin-5.0.1-win.manifest create mode 100644 testing/mozbase/mozproxy/mozproxy/backends/mitm/manifests/mitmproxy-rel-bin-5.1.1-linux64.manifest create mode 100644 testing/mozbase/mozproxy/mozproxy/backends/mitm/manifests/mitmproxy-rel-bin-5.1.1-osx.manifest create mode 100644 testing/mozbase/mozproxy/mozproxy/backends/mitm/manifests/mitmproxy-rel-bin-5.1.1-win.manifest create mode 100644 testing/mozbase/mozproxy/mozproxy/backends/mitm/manifests/mitmproxy-rel-bin-6.0.2-linux64.manifest create mode 100644 testing/mozbase/mozproxy/mozproxy/backends/mitm/manifests/mitmproxy-rel-bin-6.0.2-osx.manifest create mode 100644 testing/mozbase/mozproxy/mozproxy/backends/mitm/manifests/mitmproxy-rel-bin-6.0.2-win.manifest create mode 100644 testing/mozbase/mozproxy/mozproxy/backends/mitm/manifests/mitmproxy-rel-bin-7.0.4-linux64.manifest create mode 100644 testing/mozbase/mozproxy/mozproxy/backends/mitm/manifests/mitmproxy-rel-bin-7.0.4-osx.manifest create mode 100644 testing/mozbase/mozproxy/mozproxy/backends/mitm/manifests/mitmproxy-rel-bin-7.0.4-win.manifest create mode 100644 testing/mozbase/mozproxy/mozproxy/backends/mitm/manifests/mitmproxy-rel-bin-8.1.1-linux64.manifest create mode 100644 testing/mozbase/mozproxy/mozproxy/backends/mitm/manifests/mitmproxy-rel-bin-8.1.1-osx.manifest create mode 100644 testing/mozbase/mozproxy/mozproxy/backends/mitm/manifests/mitmproxy-rel-bin-8.1.1-win.manifest create mode 100644 testing/mozbase/mozproxy/mozproxy/backends/mitm/mitm.py create mode 100644 testing/mozbase/mozproxy/mozproxy/backends/mitm/mitmproxy_requirements.txt create mode 100644 testing/mozbase/mozproxy/mozproxy/backends/mitm/scripts/__init__.py create mode 100644 testing/mozbase/mozproxy/mozproxy/backends/mitm/scripts/alt-serverplayback.py create mode 100644 testing/mozbase/mozproxy/mozproxy/backends/mitm/scripts/alternate-server-replay.py create mode 100644 testing/mozbase/mozproxy/mozproxy/backends/mitm/scripts/catapult/LICENSE create mode 100644 testing/mozbase/mozproxy/mozproxy/backends/mitm/scripts/catapult/deterministic.js create mode 100644 testing/mozbase/mozproxy/mozproxy/backends/mitm/scripts/http_protocol_extractor.py create mode 100644 testing/mozbase/mozproxy/mozproxy/backends/mitm/scripts/inject-deterministic.py create mode 100644 testing/mozbase/mozproxy/mozproxy/driver.py create mode 100644 testing/mozbase/mozproxy/mozproxy/recordings.py create mode 100644 testing/mozbase/mozproxy/mozproxy/server.py create mode 100644 testing/mozbase/mozproxy/mozproxy/utils.py create mode 100644 testing/mozbase/mozproxy/setup.py create mode 100644 testing/mozbase/mozproxy/tests/__init__.py create mode 100644 testing/mozbase/mozproxy/tests/archive.tar.gz create mode 100644 testing/mozbase/mozproxy/tests/example.dump create mode 100644 testing/mozbase/mozproxy/tests/files/mitm5-linux-firefox-amazon.manifest create mode 100644 testing/mozbase/mozproxy/tests/files/mitm5-linux-firefox-amazon.zip create mode 100644 testing/mozbase/mozproxy/tests/files/recording.zip create mode 100644 testing/mozbase/mozproxy/tests/firefox create mode 100644 testing/mozbase/mozproxy/tests/manifest.toml create mode 100644 testing/mozbase/mozproxy/tests/paypal.mp create mode 100644 testing/mozbase/mozproxy/tests/support.py create mode 100644 testing/mozbase/mozproxy/tests/test_command_line.py create mode 100644 testing/mozbase/mozproxy/tests/test_mitm_addons.py create mode 100644 testing/mozbase/mozproxy/tests/test_proxy.py create mode 100644 testing/mozbase/mozproxy/tests/test_recording.py create mode 100644 testing/mozbase/mozproxy/tests/test_recordings.py create mode 100644 testing/mozbase/mozproxy/tests/test_utils.py create mode 100644 testing/mozbase/mozrunner/mozrunner/__init__.py create mode 100644 testing/mozbase/mozrunner/mozrunner/application.py create mode 100644 testing/mozbase/mozrunner/mozrunner/base/__init__.py create mode 100644 testing/mozbase/mozrunner/mozrunner/base/browser.py create mode 100644 testing/mozbase/mozrunner/mozrunner/base/device.py create mode 100644 testing/mozbase/mozrunner/mozrunner/base/runner.py create mode 100644 testing/mozbase/mozrunner/mozrunner/cli.py create mode 100644 testing/mozbase/mozrunner/mozrunner/devices/__init__.py create mode 100644 testing/mozbase/mozrunner/mozrunner/devices/android_device.py create mode 100644 testing/mozbase/mozrunner/mozrunner/devices/base.py create mode 100644 testing/mozbase/mozrunner/mozrunner/devices/emulator.py create mode 100644 testing/mozbase/mozrunner/mozrunner/devices/emulator_battery.py create mode 100644 testing/mozbase/mozrunner/mozrunner/devices/emulator_geo.py create mode 100644 testing/mozbase/mozrunner/mozrunner/devices/emulator_screen.py create mode 100644 testing/mozbase/mozrunner/mozrunner/errors.py create mode 100644 testing/mozbase/mozrunner/mozrunner/runners.py create mode 100755 testing/mozbase/mozrunner/mozrunner/utils.py create mode 100644 testing/mozbase/mozrunner/setup.cfg create mode 100644 testing/mozbase/mozrunner/setup.py create mode 100644 testing/mozbase/mozrunner/tests/conftest.py create mode 100644 testing/mozbase/mozrunner/tests/manifest.toml create mode 100644 testing/mozbase/mozrunner/tests/test_crash.py create mode 100644 testing/mozbase/mozrunner/tests/test_interactive.py create mode 100644 testing/mozbase/mozrunner/tests/test_start.py create mode 100644 testing/mozbase/mozrunner/tests/test_states.py create mode 100644 testing/mozbase/mozrunner/tests/test_stop.py create mode 100644 testing/mozbase/mozrunner/tests/test_threads.py create mode 100644 testing/mozbase/mozrunner/tests/test_wait.py create mode 100644 testing/mozbase/mozscreenshot/mozscreenshot/__init__.py create mode 100644 testing/mozbase/mozscreenshot/setup.cfg create mode 100644 testing/mozbase/mozscreenshot/setup.py create mode 100644 testing/mozbase/mozserve/mozserve/__init__.py create mode 100644 testing/mozbase/mozserve/mozserve/servers.py create mode 100644 testing/mozbase/mozserve/setup.py create mode 100644 testing/mozbase/mozsystemmonitor/README.rst create mode 100644 testing/mozbase/mozsystemmonitor/mozsystemmonitor/__init__.py create mode 100644 testing/mozbase/mozsystemmonitor/mozsystemmonitor/resourcemonitor.py create mode 100644 testing/mozbase/mozsystemmonitor/setup.cfg create mode 100644 testing/mozbase/mozsystemmonitor/setup.py create mode 100644 testing/mozbase/mozsystemmonitor/tests/manifest.toml create mode 100644 testing/mozbase/mozsystemmonitor/tests/test_resource_monitor.py create mode 100644 testing/mozbase/moztest/moztest/__init__.py create mode 100644 testing/mozbase/moztest/moztest/adapters/__init__.py create mode 100644 testing/mozbase/moztest/moztest/adapters/unit.py create mode 100644 testing/mozbase/moztest/moztest/resolve.py create mode 100644 testing/mozbase/moztest/moztest/results.py create mode 100644 testing/mozbase/moztest/moztest/selftest/__init__.py create mode 100644 testing/mozbase/moztest/moztest/selftest/fixtures.py create mode 100644 testing/mozbase/moztest/moztest/selftest/output.py create mode 100644 testing/mozbase/moztest/setup.py create mode 100644 testing/mozbase/moztest/tests/data/srcdir/apple/a11y.toml create mode 100644 testing/mozbase/moztest/tests/data/srcdir/apple/moz.build create mode 100644 testing/mozbase/moztest/tests/data/srcdir/banana/moz.build create mode 100644 testing/mozbase/moztest/tests/data/srcdir/banana/xpcshell.toml create mode 100644 testing/mozbase/moztest/tests/data/srcdir/carrot/moz.build create mode 100644 testing/mozbase/moztest/tests/data/srcdir/carrot/xpcshell-one.toml create mode 100644 testing/mozbase/moztest/tests/data/srcdir/carrot/xpcshell-shared.toml create mode 100644 testing/mozbase/moztest/tests/data/srcdir/carrot/xpcshell-two.toml create mode 100644 testing/mozbase/moztest/tests/data/srcdir/dragonfruit/elderberry/xpcshell_updater.toml create mode 100644 testing/mozbase/moztest/tests/data/srcdir/dragonfruit/moz.build create mode 100644 testing/mozbase/moztest/tests/data/srcdir/dragonfruit/xpcshell.toml create mode 100644 testing/mozbase/moztest/tests/data/srcdir/fig/grape/instrumentation.toml create mode 100644 testing/mozbase/moztest/tests/data/srcdir/fig/huckleberry/instrumentation.toml create mode 100644 testing/mozbase/moztest/tests/data/srcdir/fig/moz.build create mode 100644 testing/mozbase/moztest/tests/data/srcdir/juniper/browser.toml create mode 100644 testing/mozbase/moztest/tests/data/srcdir/kiwi/browser.toml create mode 100644 testing/mozbase/moztest/tests/data/srcdir/moz.build create mode 100644 testing/mozbase/moztest/tests/data/srcdir/wpt_manifest_data.json create mode 100644 testing/mozbase/moztest/tests/manifest.toml create mode 100644 testing/mozbase/moztest/tests/test.py create mode 100644 testing/mozbase/moztest/tests/test_resolve.py create mode 100644 testing/mozbase/mozversion/mozversion/__init__.py create mode 100644 testing/mozbase/mozversion/mozversion/errors.py create mode 100644 testing/mozbase/mozversion/mozversion/mozversion.py create mode 100644 testing/mozbase/mozversion/setup.cfg create mode 100644 testing/mozbase/mozversion/setup.py create mode 100644 testing/mozbase/mozversion/tests/manifest.toml create mode 100644 testing/mozbase/mozversion/tests/test_apk.py create mode 100644 testing/mozbase/mozversion/tests/test_binary.py create mode 100644 testing/mozbase/rust/mozdevice/Cargo.toml create mode 100644 testing/mozbase/rust/mozdevice/src/adb.rs create mode 100644 testing/mozbase/rust/mozdevice/src/lib.rs create mode 100644 testing/mozbase/rust/mozdevice/src/shell.rs create mode 100644 testing/mozbase/rust/mozdevice/src/test.rs create mode 100644 testing/mozbase/rust/mozprofile/Cargo.toml create mode 100644 testing/mozbase/rust/mozprofile/fuzz/Cargo.toml create mode 100644 testing/mozbase/rust/mozprofile/fuzz/fuzz_targets/prefreader.rs create mode 100644 testing/mozbase/rust/mozprofile/src/lib.rs create mode 100644 testing/mozbase/rust/mozprofile/src/preferences.rs create mode 100644 testing/mozbase/rust/mozprofile/src/prefreader.rs create mode 100644 testing/mozbase/rust/mozprofile/src/profile.rs create mode 100644 testing/mozbase/rust/mozrunner/Cargo.toml create mode 100644 testing/mozbase/rust/mozrunner/src/bin/firefox-default-path.rs create mode 100644 testing/mozbase/rust/mozrunner/src/firefox_args.rs create mode 100644 testing/mozbase/rust/mozrunner/src/lib.rs create mode 100644 testing/mozbase/rust/mozrunner/src/path.rs create mode 100644 testing/mozbase/rust/mozrunner/src/runner.rs create mode 100644 testing/mozbase/rust/mozversion/Cargo.toml create mode 100644 testing/mozbase/rust/mozversion/src/lib.rs create mode 100755 testing/mozbase/setup_development.py create mode 100755 testing/mozbase/versioninfo.py (limited to 'testing/mozbase') diff --git a/testing/mozbase/README.md b/testing/mozbase/README.md new file mode 100644 index 0000000000..dab25961bf --- /dev/null +++ b/testing/mozbase/README.md @@ -0,0 +1,20 @@ +# Mozbase + +Mozbase is a set of easy-to-use Python packages forming a supplemental standard +library for Mozilla. It provides consistency and reduces redundancy in +automation and other system-level software. All of Mozilla's test harnesses use +mozbase to some degree, including Talos, mochitest, and reftest. + +Learn more about mozbase at the [project page][]. + +Read [detailed docs][] online, or build them locally by running "make html" in +the docs directory. + +Consult [open][] [bugs][] and feel free to file [new bugs][]. + + +[project page]: https://wiki.mozilla.org/Auto-tools/Projects/Mozbase +[detailed docs]: https://firefox-source-docs.mozilla.org/mozbase/index.html +[open]: https://bugzilla.mozilla.org/buglist.cgi?resolution=---&component=Mozbase&product=Testing +[bugs]: https://bugzilla.mozilla.org/buglist.cgi?resolution=---&status_whiteboard_type=allwordssubstr&query_format=advanced&status_whiteboard=mozbase +[new bugs]: https://bugzilla.mozilla.org/enter_bug.cgi?product=Testing&component=Mozbase diff --git a/testing/mozbase/docs/Makefile b/testing/mozbase/docs/Makefile new file mode 100644 index 0000000000..386a52db13 --- /dev/null +++ b/testing/mozbase/docs/Makefile @@ -0,0 +1,153 @@ +# Makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +PAPER = +BUILDDIR = _build + +# Internal variables. +PAPEROPT_a4 = -D latex_paper_size=a4 +PAPEROPT_letter = -D latex_paper_size=letter +ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . +# the i18n builder cannot share the environment and doctrees with the others +I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . + +.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext + +help: + @echo "Please use \`make ' where is one of" + @echo " html to make standalone HTML files" + @echo " dirhtml to make HTML files named index.html in directories" + @echo " singlehtml to make a single large HTML file" + @echo " pickle to make pickle files" + @echo " json to make JSON files" + @echo " htmlhelp to make HTML files and a HTML help project" + @echo " qthelp to make HTML files and a qthelp project" + @echo " devhelp to make HTML files and a Devhelp project" + @echo " epub to make an epub" + @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" + @echo " latexpdf to make LaTeX files and run them through pdflatex" + @echo " text to make text files" + @echo " man to make manual pages" + @echo " texinfo to make Texinfo files" + @echo " info to make Texinfo files and run them through makeinfo" + @echo " gettext to make PO message catalogs" + @echo " changes to make an overview of all changed/added/deprecated items" + @echo " linkcheck to check all external links for integrity" + @echo " doctest to run all doctests embedded in the documentation (if enabled)" + +clean: + -rm -rf $(BUILDDIR)/* + +html: + $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + +dirhtml: + $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." + +singlehtml: + $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml + @echo + @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." + +pickle: + $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle + @echo + @echo "Build finished; now you can process the pickle files." + +json: + $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json + @echo + @echo "Build finished; now you can process the JSON files." + +htmlhelp: + $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp + @echo + @echo "Build finished; now you can run HTML Help Workshop with the" \ + ".hhp project file in $(BUILDDIR)/htmlhelp." + +qthelp: + $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp + @echo + @echo "Build finished; now you can run "qcollectiongenerator" with the" \ + ".qhcp project file in $(BUILDDIR)/qthelp, like this:" + @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/MozBase.qhcp" + @echo "To view the help file:" + @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/MozBase.qhc" + +devhelp: + $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp + @echo + @echo "Build finished." + @echo "To view the help file:" + @echo "# mkdir -p $$HOME/.local/share/devhelp/MozBase" + @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/MozBase" + @echo "# devhelp" + +epub: + $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub + @echo + @echo "Build finished. The epub file is in $(BUILDDIR)/epub." + +latex: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo + @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." + @echo "Run \`make' in that directory to run these through (pdf)latex" \ + "(use \`make latexpdf' here to do that automatically)." + +latexpdf: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through pdflatex..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +text: + $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text + @echo + @echo "Build finished. The text files are in $(BUILDDIR)/text." + +man: + $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man + @echo + @echo "Build finished. The manual pages are in $(BUILDDIR)/man." + +texinfo: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo + @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." + @echo "Run \`make' in that directory to run these through makeinfo" \ + "(use \`make info' here to do that automatically)." + +info: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo "Running Texinfo files through makeinfo..." + make -C $(BUILDDIR)/texinfo info + @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." + +gettext: + $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale + @echo + @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." + +changes: + $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes + @echo + @echo "The overview file is in $(BUILDDIR)/changes." + +linkcheck: + $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck + @echo + @echo "Link check complete; look for any errors in the above output " \ + "or in $(BUILDDIR)/linkcheck/output.txt." + +doctest: + $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest + @echo "Testing of doctests in the sources finished, look at the " \ + "results in $(BUILDDIR)/doctest/output.txt." diff --git a/testing/mozbase/docs/_static/structured_example.py b/testing/mozbase/docs/_static/structured_example.py new file mode 100644 index 0000000000..3ec1aa8dcc --- /dev/null +++ b/testing/mozbase/docs/_static/structured_example.py @@ -0,0 +1,111 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import argparse +import sys +import traceback +import types + +import six +from mozlog import commandline, get_default_logger + + +class TestAssertion(Exception): + pass + + +def assert_equals(a, b): + if a != b: + raise TestAssertion("%r not equal to %r" % (a, b)) + + +def expected(status): + def inner(f): + def test_func(): + f() + + test_func.__name__ = f.__name__ + test_func._expected = status + return test_func + + return inner + + +def test_that_passes(): + assert_equals(1, int("1")) + + +def test_that_fails(): + assert_equals(1, int("2")) + + +def test_that_has_an_error(): + assert_equals(2, 1 + "1") + + +@expected("FAIL") +def test_expected_fail(): + assert_equals(2 + 2, 5) + + +class TestRunner(object): + def __init__(self): + self.logger = get_default_logger(component="TestRunner") + + def gather_tests(self): + for item in six.itervalues(globals()): + if isinstance(item, types.FunctionType) and item.__name__.startswith( + "test_" + ): + yield item.__name__, item + + def run(self): + tests = list(self.gather_tests()) + + self.logger.suite_start(tests=[name for name, func in tests]) + self.logger.info("Running tests") + for name, func in tests: + self.run_test(name, func) + self.logger.suite_end() + + def run_test(self, name, func): + self.logger.test_start(name) + status = None + message = None + expected = func._expected if hasattr(func, "_expected") else "PASS" + try: + func() + except TestAssertion as e: + status = "FAIL" + message = str(e) + except Exception: + status = "ERROR" + message = traceback.format_exc() + else: + status = "PASS" + self.logger.test_end(name, status=status, expected=expected, message=message) + + +def get_parser(): + parser = argparse.ArgumentParser() + return parser + + +def main(): + parser = get_parser() + commandline.add_logging_group(parser) + + args = parser.parse_args() + + logger = commandline.setup_logging("structured-example", args, {"raw": sys.stdout}) + + runner = TestRunner() + try: + runner.run() + except Exception: + logger.critical("Error during test run:\n%s" % traceback.format_exc()) + + +if __name__ == "__main__": + main() diff --git a/testing/mozbase/docs/conf.py b/testing/mozbase/docs/conf.py new file mode 100644 index 0000000000..7855e250b8 --- /dev/null +++ b/testing/mozbase/docs/conf.py @@ -0,0 +1,280 @@ +# -*- coding: utf-8 -*- +# +# MozBase documentation build configuration file, created by +# sphinx-quickstart on Mon Oct 22 14:02:17 2012. +# +# This file is execfile()d with the current directory set to its containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import os +import sys + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +here = os.path.dirname(os.path.abspath(__file__)) +parent = os.path.dirname(here) +for item in os.listdir(parent): + path = os.path.join(parent, item) + if (not os.path.isdir(path)) or ( + not os.path.exists(os.path.join(path, "setup.py")) + ): + continue + sys.path.insert(0, path) + +# -- General configuration ----------------------------------------------------- + +# If your documentation needs a minimal Sphinx version, state it here. +# needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be extensions +# coming with Sphinx (named 'sphinx.ext.*') or your custom ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.doctest", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix of source filenames. +source_suffix = ".rst" + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = "index" + +# General information about the project. +project = "MozBase" +copyright = "2012, Mozilla Automation and Tools team" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The short X.Y version. +version = "1" +# The full version, including alpha/beta/rc tags. +release = "1" + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + + +# -- Options for HTML output --------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "default" +on_rtd = os.environ.get("READTHEDOCS", None) == "True" + +if not on_rtd: + try: + import sphinx_rtd_theme + + html_theme = "sphinx_rtd_theme" + html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] + except ImportError: + pass + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +# html_theme_options = {} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +html_title = "mozbase documentation" + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +html_show_copyright = False + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Output file base name for HTML help builder. +htmlhelp_basename = "MozBasedoc" + + +# -- Options for LaTeX output -------------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, author, documentclass [howto/manual]). +latex_documents = [ + ( + "index", + "MozBase.tex", + "MozBase Documentation", + "Mozilla Automation and Tools team", + "manual", + ), +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output -------------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + "index", + "mozbase", + "MozBase Documentation", + ["Mozilla Automation and Tools team"], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------------ + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + "index", + "MozBase", + "MozBase Documentation", + "Mozilla Automation and Tools team", + "MozBase", + "One line description of project.", + "Miscellaneous", + ), +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' diff --git a/testing/mozbase/docs/devicemanagement.rst b/testing/mozbase/docs/devicemanagement.rst new file mode 100644 index 0000000000..e2c229b3b4 --- /dev/null +++ b/testing/mozbase/docs/devicemanagement.rst @@ -0,0 +1,11 @@ +Device management +----------------- + +Mozbase provides a module called `mozdevice` for the purposes of +running automated tests or scripts on an Android phone, tablet, or +emulator connected to a workstation. + +.. toctree:: + :maxdepth: 3 + + mozdevice diff --git a/testing/mozbase/docs/gettinginfo.rst b/testing/mozbase/docs/gettinginfo.rst new file mode 100644 index 0000000000..35c4c45081 --- /dev/null +++ b/testing/mozbase/docs/gettinginfo.rst @@ -0,0 +1,13 @@ +Getting information on the system under test +============================================ + +It's often necessary to get some information about the system we're +testing, for example to turn on or off some platform specific +behaviour. + +.. toctree:: + :maxdepth: 2 + + mozinfo + moznetwork + mozversion diff --git a/testing/mozbase/docs/index.rst b/testing/mozbase/docs/index.rst new file mode 100644 index 0000000000..f63f0aa68d --- /dev/null +++ b/testing/mozbase/docs/index.rst @@ -0,0 +1,44 @@ +.. MozBase documentation master file, created by + sphinx-quickstart on Mon Oct 22 14:02:17 2012. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +mozbase +======= + +Mozbase is a set of easy-to-use Python packages forming a supplemental standard +library for Mozilla. It provides consistency and reduces redundancy in +automation and other system-level software. All of Mozilla's test harnesses use +mozbase to some degree, including Talos_, mochitest_, and reftest_. + +.. _Talos: https://wiki.mozilla.org/Talos + +.. _mochitest: https://developer.mozilla.org/en-US/docs/Mochitest + +.. _reftest: https://developer.mozilla.org/en-US/docs/Creating_reftest-based_unit_tests + +In the course of writing automated tests at Mozilla, we found that +the same tasks came up over and over, regardless of the specific nature of +what we were testing. We figured that consolidating this code into a set of +libraries would save us a good deal of time, and so we spent some effort +factoring out the best-of-breed automation code into something we named +"mozbase" (usually written all in lower case except at the beginning of a +sentence). + +This is the main documentation for users of mozbase. There is also a +project_ wiki page with notes on development practices and administration. + +.. _project: https://wiki.mozilla.org/Auto-tools/Projects/Mozbase + +The documentation is organized by category, then by module. Figure out what you +want to do then dive in! + +.. toctree:: + :maxdepth: 2 + + manifestparser + gettinginfo + setuprunning + servingcontent + loggingreporting + devicemanagement diff --git a/testing/mozbase/docs/loggingreporting.rst b/testing/mozbase/docs/loggingreporting.rst new file mode 100644 index 0000000000..a8561a49b2 --- /dev/null +++ b/testing/mozbase/docs/loggingreporting.rst @@ -0,0 +1,11 @@ +Logging and reporting +===================== + +Ideally output between different types of testing system should be as +uniform as possible, as well as making it easy to make things more or +less verbose. We created some libraries to make doing this easy. + +.. toctree:: + :maxdepth: 2 + + mozlog diff --git a/testing/mozbase/docs/make.bat b/testing/mozbase/docs/make.bat new file mode 100644 index 0000000000..d67c86ae98 --- /dev/null +++ b/testing/mozbase/docs/make.bat @@ -0,0 +1,190 @@ +@ECHO OFF + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set BUILDDIR=_build +set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . +set I18NSPHINXOPTS=%SPHINXOPTS% . +if NOT "%PAPER%" == "" ( + set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% + set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% +) + +if "%1" == "" goto help + +if "%1" == "help" ( + :help + echo.Please use `make ^` where ^ is one of + echo. html to make standalone HTML files + echo. dirhtml to make HTML files named index.html in directories + echo. singlehtml to make a single large HTML file + echo. pickle to make pickle files + echo. json to make JSON files + echo. htmlhelp to make HTML files and a HTML help project + echo. qthelp to make HTML files and a qthelp project + echo. devhelp to make HTML files and a Devhelp project + echo. epub to make an epub + echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter + echo. text to make text files + echo. man to make manual pages + echo. texinfo to make Texinfo files + echo. gettext to make PO message catalogs + echo. changes to make an overview over all changed/added/deprecated items + echo. linkcheck to check all external links for integrity + echo. doctest to run all doctests embedded in the documentation if enabled + goto end +) + +if "%1" == "clean" ( + for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i + del /q /s %BUILDDIR%\* + goto end +) + +if "%1" == "html" ( + %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/html. + goto end +) + +if "%1" == "dirhtml" ( + %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. + goto end +) + +if "%1" == "singlehtml" ( + %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. + goto end +) + +if "%1" == "pickle" ( + %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can process the pickle files. + goto end +) + +if "%1" == "json" ( + %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can process the JSON files. + goto end +) + +if "%1" == "htmlhelp" ( + %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can run HTML Help Workshop with the ^ +.hhp project file in %BUILDDIR%/htmlhelp. + goto end +) + +if "%1" == "qthelp" ( + %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can run "qcollectiongenerator" with the ^ +.qhcp project file in %BUILDDIR%/qthelp, like this: + echo.^> qcollectiongenerator %BUILDDIR%\qthelp\MozBase.qhcp + echo.To view the help file: + echo.^> assistant -collectionFile %BUILDDIR%\qthelp\MozBase.ghc + goto end +) + +if "%1" == "devhelp" ( + %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. + goto end +) + +if "%1" == "epub" ( + %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The epub file is in %BUILDDIR%/epub. + goto end +) + +if "%1" == "latex" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "text" ( + %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The text files are in %BUILDDIR%/text. + goto end +) + +if "%1" == "man" ( + %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The manual pages are in %BUILDDIR%/man. + goto end +) + +if "%1" == "texinfo" ( + %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. + goto end +) + +if "%1" == "gettext" ( + %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The message catalogs are in %BUILDDIR%/locale. + goto end +) + +if "%1" == "changes" ( + %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes + if errorlevel 1 exit /b 1 + echo. + echo.The overview file is in %BUILDDIR%/changes. + goto end +) + +if "%1" == "linkcheck" ( + %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck + if errorlevel 1 exit /b 1 + echo. + echo.Link check complete; look for any errors in the above output ^ +or in %BUILDDIR%/linkcheck/output.txt. + goto end +) + +if "%1" == "doctest" ( + %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest + if errorlevel 1 exit /b 1 + echo. + echo.Testing of doctests in the sources finished, look at the ^ +results in %BUILDDIR%/doctest/output.txt. + goto end +) + +:end diff --git a/testing/mozbase/docs/manifestparser.rst b/testing/mozbase/docs/manifestparser.rst new file mode 100644 index 0000000000..3ab2f20098 --- /dev/null +++ b/testing/mozbase/docs/manifestparser.rst @@ -0,0 +1,648 @@ +Managing lists of tests +======================= + +.. py:currentmodule:: manifestparser + +We don't always want to run all tests, all the time. Sometimes a test +may be broken, in other cases we only want to run a test on a specific +platform or build of Mozilla. To handle these cases (and more), we +created a python library to create and use test "manifests", which +codify this information. + +Update for August 2023: Transition to TOML for manifestparser +````````````````````````````````````````````````````````````` + +As of August 2023, manifestparser will be transitioning from INI format +configuration files to TOML. The new TOML format will better support +future continuous integration automation and has a much more +precise syntax (FFI see `Bug 1821199 `_). +During the migration period both ``*.ini`` files and +``*.toml`` files will be supported. If an INI config file is specified +(e.g. in ``moz.build``) and a TOML file is present, the TOML file will be +used. + +:mod:`manifestparser` --- Create and manage test manifests +----------------------------------------------------------- + +manifestparser lets you easily create and use test manifests, to +control which tests are run under what circumstances. + +What manifestparser gives you: + +* manifests are ordered lists of tests +* tests may have an arbitrary number of key, value pairs +* the parser returns an ordered list of test data structures, which + are just dicts with some keys. For example, a test with no + user-specified metadata looks like this: + +.. code-block:: text + + [{'expected': 'pass', + 'path': '/home/mozilla/mozmill/src/manifestparser/manifestparser/tests/testToolbar/testBackForwardButtons.js', + 'relpath': 'testToolbar/testBackForwardButtons.js', + 'name': 'testBackForwardButtons.js', + 'here': '/home/mozilla/mozmill/src/manifestparser/manifestparser/tests', + 'manifest': '/home/mozilla/mozmill/src/manifestparser/manifestparser/tests/manifest.toml',}] + +The keys displayed here (path, relpath, name, here, and manifest) are +reserved keys for manifestparser and any consuming APIs. You can add +additional key, value metadata to each test. + +Why have test manifests? +```````````````````````` + +It is desirable to have a unified format for test manifests for testing +`mozilla-central `_, etc. + +* It is desirable to be able to selectively enable or disable tests based on platform or other conditions. This should be easy to do. Currently, since many of the harnesses just crawl directories, there is no effective way of disabling a test except for removal from mozilla-central +* It is desriable to do this in a universal way so that enabling and disabling tests as well as other tasks are easily accessible to a wider audience than just those intimately familiar with the specific test framework. +* It is desirable to have other metadata on top of the test. For instance, let's say a test is marked as skipped. It would be nice to give the reason why. + + +Most Mozilla test harnesses work by crawling a directory structure. +While this is straight-forward, manifests offer several practical +advantages: + +* ability to turn a test off easily: if a test is broken on m-c + currently, the only way to turn it off, generally speaking, is just + removing the test. Often this is undesirable, as if the test should + be dismissed because other people want to land and it can't be + investigated in real time (is it a failure? is the test bad? is no + one around that knows the test?), then backing out a test is at best + problematic. With a manifest, a test may be disabled without + removing it from the tree and a bug filed with the appropriate + reason: + +.. code-block:: text + + ["test_broken.js"] + disabled = "https://bugzilla.mozilla.org/show_bug.cgi?id=123456" + +* ability to run different (subsets of) tests on different + platforms. Traditionally, we've done a bit of magic or had the test + know what platform it would or would not run on. With manifests, you + can mark what platforms a test will or will not run on and change + these without changing the test. + +.. code-block:: text + + ["test_works_on_windows_only.js"] + skip-if = ["os != 'win'"] + +* ability to markup tests with metadata. We have a large, complicated, + and always changing infrastructure. key, value metadata may be used + as an annotation to a test and appropriately curated and mined. For + instance, we could mark certain tests as randomorange with a bug + number, if it were desirable. + +* ability to have sane and well-defined test-runs. You can keep + different manifests for different test runs and ``["include:FILENAME.toml"]`` + (sub)manifests as appropriate to your needs. + +Manifest Format +``````````````` + +Manifests are ``*.toml`` (formerly ``*.ini``) files with the section names denoting the path +relative to the manifest: + +.. code-block:: text + + ["foo.js"] + ["bar.js"] + ["fleem.js"] + +The sections are read in order. In addition, tests may include +arbitrary key, value metadata to be used by the harness. You may also +have a `[DEFAULT]` section that will give key, value pairs that will +be inherited by each test unless overridden: + +.. code-block:: text + + [DEFAULT] + type = "restart" + + ["lilies.js"] + color = "white" + + ["daffodils.js"] + color = "yellow" + type = "other" + # override type from DEFAULT + + ["roses.js"] + color = "red" + +You can also include other manifests: + +.. code-block:: text + + ["include:subdir/anothermanifest.toml"] + +And reference parent manifests to inherit keys and values from the DEFAULT +section, without adding possible included tests. + +.. code-block:: text + + ["parent:../manifest.toml"] + +Manifests are included relative to the directory of the manifest with +the `[include:]` directive unless they are absolute paths. + +By default you can use '#' as a comment character. Comments can start a +new line, or be inline. + +.. code-block:: text + + ["roses.js"] + # a valid comment + color = "red" # another valid comment + +Because in TOML all values must be quoted there is no risk of an anchor in +an URL being interpreted as a comment. + +.. code-block:: text + + ["test1.js"] + url = "https://foo.com/bar#baz" # Bug 1234 + + +Manifest Conditional Expressions +```````````````````````````````` +The conditional expressions used in manifests are parsed using the *ExpressionParser* class. + +.. autoclass:: manifestparser.ExpressionParser + +Consumers of this module are expected to pass in a value dictionary +for evaluating conditional expressions. A common pattern is to pass +the dictionary from the :mod:`mozinfo` module. + +Data +```` + +Manifest Destiny gives tests as a list of dictionaries (in python +terms). + +* path: full path to the test +* relpath: relative path starting from the root directory. The root directory + is typically the location of the root manifest, or the source + repository. It can be specified at runtime by passing in `rootdir` + to `TestManifest`. Defaults to the directory containing the test's + ancestor manifest. +* name: file name of the test +* here: the parent directory of the manifest +* manifest: the path to the manifest containing the test + +This data corresponds to a one-line manifest: + +.. code-block:: text + + ["testToolbar/testBackForwardButtons.js"] + +If additional key, values were specified, they would be in this dict +as well. + +Outside of the reserved keys, the remaining key, values +are up to convention to use. There is a (currently very minimal) +generic integration layer in manifestparser for use of all harnesses, +`manifestparser.TestManifest`. +For instance, if the 'disabled' key is present, you can get the set of +tests without disabled (various other queries are doable as well). + +Since the system is convention-based, the harnesses may do whatever +they want with the data. They may ignore it completely, they may use +the provided integration layer, or they may provide their own +integration layer. This should allow whatever sort of logic is +desired. For instance, if in yourtestharness you wanted to run only on +mondays for a certain class of tests: + +.. code-block:: text + + tests = [] + for test in manifests.tests: + if 'runOnDay' in test: + if calendar.day_name[calendar.weekday(*datetime.datetime.now().timetuple()[:3])].lower() == test['runOnDay'].lower(): + tests.append(test) + else: + tests.append(test) + +To recap: + +* the manifests allow you to specify test data +* the parser gives you this data +* you can use it however you want or process it further as you need + +Tests are denoted by sections in an ``*.toml`` file (see +https://searchfox.org/mozilla-central/source/testing/mozbase/manifestparser/tests/manifest.toml +). + +Additional manifest files may be included with an `[include:]` directive: + +.. code-block:: text + + ["include:path-to-additional-file-manifest.toml"] + +The path to included files is relative to the current manifest. + +The `[DEFAULT]` section contains variables that all tests inherit from. + +Included files will inherit the top-level variables but may override +in their own `[DEFAULT]` section. + +manifestparser Architecture +```````````````````````````` + +There is a two- or three-layered approach to the manifestparser +architecture, depending on your needs: + +1. ManifestParser: this is a generic parser for ``*.toml`` manifests that +facilitates the `[include:]` logic and the inheritance of +metadata. Despite the internal variable being called `self.tests` +(an oversight), this layer has nothing in particular to do with tests. + +2. TestManifest: this is a harness-agnostic integration layer that is +test-specific. TestManifest facilitates `skip-if` logic. + +3. Optionally, a harness will have an integration layer than inherits +from TestManifest if more harness-specific customization is desired at +the manifest level. + +See the source code at +https://searchfox.org/mozilla-central/source/testing/mozbase/manifestparser +. + +Filtering Manifests +``````````````````` + +After creating a `TestManifest` object, all manifest files are read and a list +of test objects can be accessed via `TestManifest.tests`. However this list contains +all test objects, whether they should be run or not. Normally they need to be +filtered down only to the set of tests that should be run by the test harness. + +To do this, a test harness can call `TestManifest.active_tests`: + +.. code-block:: python + + tests = manifest.active_tests(exists=True, disabled=True, **tags) + +By default, `active_tests` runs the filters found in +:attr:`~.DEFAULT_FILTERS`. It also accepts two convenience arguments: + +1. `exists`: if True (default), filter out tests that do not exist on the local file system. +2. `disabled`: if True (default), do not filter out tests containing the 'disabled' key + (which can be set by `skip-if` manually). + +This works for simple cases, but there are other built-in filters, or even custom filters +that can be applied to the `TestManifest`. To do so, add the filter to `TestManifest.filters`: + +.. code-block:: python + + from manifestparser.filters import subsuite + import mozinfo + + filters = [subsuite('devtools')] + tests = manifest.active_tests(filters=filters, **mozinfo.info) + +.. automodule:: manifestparser.filters + :members: + :exclude-members: filterlist,InstanceFilter,DEFAULT_FILTERS + +.. autodata:: manifestparser.filters.DEFAULT_FILTERS + :annotation: + +For example, suppose we want to introduce a new key called `timeout-if` that adds a +'timeout' property to a test if a certain condition is True. The syntax in the manifest +files will look like this: + +.. code-block:: text + + ["test_foo.py"] + timeout-if = ["300, os == 'win'"] + +The value is , where condition is the same format as the one in +`skip-if`. In the above case, if os == 'win', a timeout of 300 seconds will be +applied. Otherwise, no timeout will be applied. All we need to do is define the filter +and add it: + +.. code-block:: python + + from manifestparser.expression import parse + import mozinfo + + def timeout_if(tests, values): + for test in tests: + if 'timeout-if' in test: + timeout, condition = test['timeout-if'].split(',', 1) + if parse(condition, **values): + test['timeout'] = timeout + yield test + + tests = manifest.active_tests(filters=[timeout_if], **mozinfo.info) + + +CLI +``` + +**NOTE:** *The manifestparser CLI is currently being updated to support TOML.* + +Run `manifestparser help` for usage information. + +To create a manifest from a set of directories: + +.. code-block:: text + + manifestparser [options] create directory <...> [create-options] + +To output a manifest of tests: + +.. code-block:: text + + manifestparser [options] write manifest <...> -tag1 -tag2 --key1=value1 --key2=value2 ... + +To copy tests and manifests from a source: + +.. code-block:: text + + manifestparser [options] copy from_manifest to_manifest -tag1 -tag2 `key1=value1 key2=value2 ... + +To update the tests associated with with a manifest from a source +directory: + +.. code-block:: text + + manifestparser [options] update manifest from_directory -tag1 -tag2 --key1=value1 --key2=value2 ... + +Creating Manifests +`````````````````` + +manifestparser comes with a console script, `manifestparser create`, that +may be used to create a seed manifest structure from a directory of +files. Run `manifestparser help create` for usage information. + +Copying Manifests +````````````````` + +To copy tests and manifests from a source: + +.. code-block:: text + + manifestparser [options] copy from_manifest to_directory -tag1 -tag2 `key1=value1 key2=value2 ... + +Updating Tests +`````````````` + +To update the tests associated with with a manifest from a source +directory: + +.. code-block:: text + + manifestparser [options] update manifest from_directory -tag1 -tag2 `key1=value1 `key2=value2 ... + +Usage example +````````````` + +Here is an example of how to create manifests for a directory tree and +update the tests listed in the manifests from an external source. + +Creating Manifests +`````````````````` + +Let's say you want to make a series of manifests for a given directory structure containing `.js` test files: + +.. code-block:: text + + testing/mozmill/tests/firefox/ + testing/mozmill/tests/firefox/testAwesomeBar/ + testing/mozmill/tests/firefox/testPreferences/ + testing/mozmill/tests/firefox/testPrivateBrowsing/ + testing/mozmill/tests/firefox/testSessionStore/ + testing/mozmill/tests/firefox/testTechnicalTools/ + testing/mozmill/tests/firefox/testToolbar/ + testing/mozmill/tests/firefox/restartTests + +You can use `manifestparser create` to do this: + +.. code-block:: text + + $ manifestparser help create + Usage: manifestparser.py [options] create directory <...> + + create a manifest from a list of directories + + Options: + -p PATTERN, `pattern=PATTERN + glob pattern for files + -i IGNORE, `ignore=IGNORE + directories to ignore + -w IN_PLACE, --in-place=IN_PLACE + Write .ini files in place; filename to write to + +We only want `.js` files and we want to skip the `restartTests` directory. +We also want to write a manifest per directory, so I use the `--in-place` +option to write the manifests: + +.. code-block:: text + + manifestparser create . -i restartTests -p '*.js' -w manifest.ini + +This creates a manifest.ini per directory that we care about with the JS test files: + +.. code-block:: text + + testing/mozmill/tests/firefox/manifest.ini + testing/mozmill/tests/firefox/testAwesomeBar/manifest.ini + testing/mozmill/tests/firefox/testPreferences/manifest.ini + testing/mozmill/tests/firefox/testPrivateBrowsing/manifest.ini + testing/mozmill/tests/firefox/testSessionStore/manifest.ini + testing/mozmill/tests/firefox/testTechnicalTools/manifest.ini + testing/mozmill/tests/firefox/testToolbar/manifest.ini + +The top-level `manifest.ini` merely has `[include:]` references to the sub manifests: + +.. code-block:: text + + [include:testAwesomeBar/manifest.ini] + [include:testPreferences/manifest.ini] + [include:testPrivateBrowsing/manifest.ini] + [include:testSessionStore/manifest.ini] + [include:testTechnicalTools/manifest.ini] + [include:testToolbar/manifest.ini] + +Each sub-level manifest contains the (`.js`) test files relative to it. + +Updating the tests from manifests +````````````````````````````````` + +You may need to update tests as given in manifests from a different source directory. +`manifestparser update` was made for just this purpose: + +.. code-block:: text + + Usage: manifestparser [options] update manifest directory -tag1 -tag2 `key1=value1 --key2=value2 ... + + update the tests as listed in a manifest from a directory + +To update from a directory of tests in `~/mozmill/src/mozmill-tests/firefox/` run: + +.. code-block:: text + + manifestparser update manifest.ini ~/mozmill/src/mozmill-tests/firefox/ + +Tests +````` + +manifestparser includes a suite of tests. + +`test_manifest.txt` is a doctest that may be helpful in figuring out +how to use the API. Tests are run via `mach python-test testing/mozbase/manifestparser`. + +Using mach manifest skip-fails +`````````````````````````````` + +The first of the ``mach manifest`` subcommands is ``skip-fails``. This command +can be used to *automatically* edit manifests to skip tests that are failing +as well as file the corresponding bugs for the failures. This is particularly +useful when "greening up" a new platform. + +You may verify the proposed changes from ``skip-fails`` output and examine +any local manifest changes with ``hg status``. + +Here is the usage: + +.. code-block:: text + + $ ./mach manifest skip-fails --help + usage: mach [global arguments] manifest skip-fails [command arguments] + + Sub Command Arguments: + try_url Treeherder URL for try (please use quotes) + -b BUGZILLA, --bugzilla BUGZILLA + Bugzilla instance + -m META_BUG_ID, --meta-bug-id META_BUG_ID + Meta Bug id + -s, --turbo Skip all secondary failures + -t SAVE_TASKS, --save-tasks SAVE_TASKS + Save tasks to file + -T USE_TASKS, --use-tasks USE_TASKS + Use tasks from file + -f SAVE_FAILURES, --save-failures SAVE_FAILURES + Save failures to file + -F USE_FAILURES, --use-failures USE_FAILURES + Use failures from file + -M MAX_FAILURES, --max-failures MAX_FAILURES + Maximum number of failures to skip (-1 == no limit) + -v, --verbose Verbose mode + -d, --dry-run Determine manifest changes, but do not write them + $ + +``try_url`` --- Treeherder URL +------------------------------ +This is the url (usually in single quotes) from running tests in try, for example: +'https://treeherder.mozilla.org/jobs?repo=try&revision=babc28f495ee8af2e4f059e9cbd23e84efab7d0d' + +``--bugzilla BUGZILLA`` --- Bugzilla instance +--------------------------------------------- + +By default the Bugzilla instance is ``bugzilla.allizom.org``, but you may set it on the command +line to another value such as ``bugzilla.mozilla.org`` (or by setting the environment variable +``BUGZILLA``). + +``--meta-bug-id META_BUG_ID`` --- Meta Bug id +--------------------------------------------- + +Any new bugs that are filed will block (be dependents of) this "meta" bug (optional). + +``--turbo`` --- Skip all secondary failures +------------------------------------------- + +The default ``skip-fails`` behavior is to skip only the first failure (for a given label) for each test. +In `turbo` mode, all failures for this manifest + label will skipped. + +``--save-tasks SAVE_TASKS`` --- Save tasks to file +-------------------------------------------------- + +This feature is primarily for ``skip-fails`` development and debugging. +It will save the tasks (downloaded via mozci) to the specified JSON file +(which may be used in a future ``--use-tasks`` option) + +``--use-tasks USE_TASKS`` --- Use tasks from file +------------------------------------------------- +This feature is primarily for ``skip-fails`` development and debugging. +It will uses the tasks from the specified JSON file (instead of downloading them via mozci). +See also ``--save-tasks``. + +``--save-failures SAVE_FAILURES`` --- Save failures to file +----------------------------------------------------------- + +This feature is primarily for ``skip-fails`` development and debugging. +It will save the failures (calculated from the tasks) to the specified JSON file +(which may be used in a future ``--use-failures`` option) + +``--use-failures USE_FAILURES`` --- Use failures from file +---------------------------------------------------------- +This feature is primarily for ``skip-fails`` development and debugging. +It will uses the failures from the specified JSON file (instead of downloading them via mozci). +See also ``--save-failures``. + +``--max-failures MAX_FAILURES`` --- Maximum number of failures to skip +---------------------------------------------------------------------- +This feature is primarily for ``skip-fails`` development and debugging. +It will limit the number of failures that are skipped (default is -1 == no limit). + +``--verbose`` --- Verbose mode +------------------------------ +Increase verbosity of output. + +``--dry-run`` --- Dry run +------------------------- +In dry run mode, the manifest changes (and bugs top be filed) are determined, but not written. + + +Bugs +```` + +Please file any bugs or feature requests at + +https://bugzilla.mozilla.org/enter_bug.cgi?product=Testing&component=ManifestParser + +Or contact in #cia on irc.mozilla.org + +Design Considerations +````````````````````` + +Contrary to some opinion, manifestparser.py and the associated ``*.toml`` +format were not magically plucked from the sky but were descended upon +through several design considerations. + +* test manifests should be ordered. The current ``*.toml`` format supports + this (as did the ``*.ini`` format) + +* the manifest format should be easily human readable/writable And + programmatically editable. While the ``*.ini`` format worked for a long + time the underspecified syntax made it difficult to reliably parse. + The new ``*.toml`` format is widely accepted, as a formal syntax as well + as libraries to read and edit it (e.g. ``tomlkit``). + +* there should be a single file that may easily be + transported. Traditionally, test harnesses have lived in + mozilla-central. This is less true these days and it is increasingly + likely that more tests will not live in mozilla-central going + forward. So `manifestparser.py` should be highly consumable. To + this end, it is a single file, as appropriate to mozilla-central, + which is also a working python package deployed to PyPI for easy + installation. + +Historical Reference +```````````````````` + +Date-ordered list of links about how manifests came to be where they are today:: + +* https://wiki.mozilla.org/Auto-tools/Projects/UniversalManifest +* http://alice.nodelman.net/blog/post/2010/05/ +* http://alice.nodelman.net/blog/post/universal-manifest-for-unit-tests-a-proposal/ +* https://elvis314.wordpress.com/2010/07/05/improving-personal-hygiene-by-adjusting-mochitests/ +* https://elvis314.wordpress.com/2010/07/27/types-of-data-we-care-about-in-a-manifest/ +* https://bugzilla.mozilla.org/show_bug.cgi?id=585106 +* http://elvis314.wordpress.com/2011/05/20/converting-xpcshell-from-listing-directories-to-a-manifest/ +* https://bugzilla.mozilla.org/show_bug.cgi?id=616999 +* https://developer.mozilla.org/en/Writing_xpcshell-based_unit_tests#Adding_your_tests_to_the_xpcshell_manifest +* https://bugzilla.mozilla.org/show_bug.cgi?id=1821199 diff --git a/testing/mozbase/docs/mozcrash.rst b/testing/mozbase/docs/mozcrash.rst new file mode 100644 index 0000000000..750c46dd8f --- /dev/null +++ b/testing/mozbase/docs/mozcrash.rst @@ -0,0 +1,8 @@ +:mod:`mozcrash` --- Print stack traces from minidumps left behind by crashed processes +====================================================================================== + +Gets stack traces out of processes that have crashed and left behind +a minidump file using the Google Breakpad library. + +.. automodule:: mozcrash + :members: check_for_crashes diff --git a/testing/mozbase/docs/mozdebug.rst b/testing/mozbase/docs/mozdebug.rst new file mode 100644 index 0000000000..6a4be63f45 --- /dev/null +++ b/testing/mozbase/docs/mozdebug.rst @@ -0,0 +1,5 @@ +:mod:`mozdebug` --- Configure and launch compatible debuggers. +====================================================================================== + +.. automodule:: mozdebug + :members: get_debugger_info, get_default_debugger_name, DebuggerSearch diff --git a/testing/mozbase/docs/mozdevice.rst b/testing/mozbase/docs/mozdevice.rst new file mode 100644 index 0000000000..ea95a97d9f --- /dev/null +++ b/testing/mozbase/docs/mozdevice.rst @@ -0,0 +1,8 @@ +:mod:`mozdevice` --- Interact with Android devices +================================================== + +.. automodule:: mozdevice + :members: + :undoc-members: + :inherited-members: + :show-inheritance: diff --git a/testing/mozbase/docs/mozfile.rst b/testing/mozbase/docs/mozfile.rst new file mode 100644 index 0000000000..3ab5492e90 --- /dev/null +++ b/testing/mozbase/docs/mozfile.rst @@ -0,0 +1,9 @@ +:mod:`mozfile` --- File utilities for use in Mozilla testing +============================================================ + +mozfile is a convenience library for taking care of some common file-related +tasks in automated testing, such as extracting files or recursively removing +directories. + +.. automodule:: mozfile + :members: extract, extract_tarball, extract_zip, move, remove diff --git a/testing/mozbase/docs/mozgeckoprofiler.rst b/testing/mozbase/docs/mozgeckoprofiler.rst new file mode 100644 index 0000000000..8e1ae6090d --- /dev/null +++ b/testing/mozbase/docs/mozgeckoprofiler.rst @@ -0,0 +1,21 @@ +:mod:`mozgeckoprofiler.rst` --- Gecko Profiler utilities +======================================================== + +This module contains various utilities to work with the Firefox Profiler, Gecko's +built-in performance profiler. Gecko itself records the profiles, and can dump them +out to file once the browser shuts down. This package takes those files, symbolicates +them (turns raw memory addresses into function or symbol names), and provides utilities +like opening up a locally stored profile in the Firefox Profiler interface. This +is done by serving the profiles locally, and opening a custom url in profiler.firefox.com. + +:mod:`mozgeckoprofiler.rst` --- File origins in mozgeckoprofiler +---------------------------------------------------------------- +The symbolication files were originally imported from the following repos, +with permission from their respective authors. However, since then the code has +been updated for usage within mozbase. + +https://github.com/vdjeric/Snappy-Symbolication-Server/ +https://github.com/mstange/analyze-tryserver-profiles/ + +The dump_syms_mac binary was copied from the objdir of a Firefox build on Mac. It's a +byproduct of the regular Firefox build process and gets generated in objdir/dist/host/bin/. diff --git a/testing/mozbase/docs/mozhttpd.rst b/testing/mozbase/docs/mozhttpd.rst new file mode 100644 index 0000000000..172744e603 --- /dev/null +++ b/testing/mozbase/docs/mozhttpd.rst @@ -0,0 +1,22 @@ + +:mod:`mozhttpd` --- Serving up content to be consumed by the browser +==================================================================== + + +.. warning:: The mozhttpd module is considered obsolete. For new code, + please use wptserve_ which can do everything mozhttpd does + and more. + +.. _wptserve: https://pypi.python.org/pypi/wptserve + +:mod:`mozhttpd` --- Simple webserver +------------------------------------ + +.. automodule:: mozhttpd + :members: + +Interface +````````` + +.. autoclass:: MozHttpd + :members: diff --git a/testing/mozbase/docs/mozinfo.rst b/testing/mozbase/docs/mozinfo.rst new file mode 100644 index 0000000000..c31ff9f702 --- /dev/null +++ b/testing/mozbase/docs/mozinfo.rst @@ -0,0 +1,70 @@ +:mod:`mozinfo` --- Get system information +========================================= + +Throughout Mozilla python code, checking the underlying +platform is done in many different ways. The various checks needed +lead to a lot of copy+pasting, leaving the reader to wonder....is this +specific check necessary for (e.g.) an operating system? Because +information is not consolidated, checks are not done consistently, nor +is it defined what we are checking for. + +`mozinfo `_ +proposes to solve this problem. mozinfo is a bridge interface, +making the underlying (complex) plethora of OS and architecture +combinations conform to a subset of values of relevance to +Mozilla software. The current implementation exposes relevant keys and +values such as: ``os``, ``version``, ``bits``, and ``processor``. Additionally, the +service pack in use is available on the windows platform. + + +API Usage +--------- + +mozinfo is a python package. Downloading the software and running +``python setup.py develop`` will allow you to do ``import mozinfo`` +from python. +`mozinfo.py `_ +is the only file contained in this package, +so if you need a single-file solution, you can just download or call +this file through the web. + +The top level attributes (``os``, ``version``, ``bits``, ``processor``) are +available as module globals:: + + if mozinfo.os == 'win': ... + +In addition, mozinfo exports a dictionary, ``mozinfo.info``, that +contain these values. mozinfo also exports: + +- ``choices``: a dictionary of possible values for os, bits, and + processor +- ``main``: the console_script entry point for mozinfo +- ``unknown``: a singleton denoting a value that cannot be determined + +``unknown`` has the string representation ``"UNKNOWN"``. +``unknown`` will evaluate as ``False`` in python:: + + if not mozinfo.os: ... # unknown! + + +Command Line Usage +------------------ + +mozinfo comes with a command line program, ``mozinfo`` which may be used to +diagnose one's current system. + +Example output:: + + os: linux + version: Ubuntu 10.10 + bits: 32 + processor: x86 + +Three of these fields, os, bits, and processor, have a finite set of +choices. You may display the value of these choices using +``mozinfo --os``, ``mozinfo --bits``, and ``mozinfo --processor``. +``mozinfo --help`` documents command-line usage. + + +.. automodule:: mozinfo + :members: diff --git a/testing/mozbase/docs/mozinstall.rst b/testing/mozbase/docs/mozinstall.rst new file mode 100644 index 0000000000..7db40d73de --- /dev/null +++ b/testing/mozbase/docs/mozinstall.rst @@ -0,0 +1,29 @@ +:mod:`mozinstall` --- Install and uninstall Gecko-based applications +==================================================================== + +mozinstall is a small python module with several convenience methods +useful for installing and uninstalling a gecko-based application +(e.g. Firefox) on the desktop. + +Simple example +-------------- + +:: + + import mozinstall + import tempfile + + tempdir = tempfile.mkdtemp() + firefox_dmg = 'firefox-38.0a1.en-US.mac.dmg' + install_folder = mozinstall.install(src=firefox_dmg, dest=tempdir) + binary = mozinstall.get_binary(install_folder, 'Firefox') + # from here you can execute the binary directly + # ... + mozinstall.uninstall(install_folder) + +API Documentation +----------------- + +.. automodule:: mozinstall + :members: is_installer, install, get_binary, uninstall, + InstallError, InvalidBinary, InvalidSource diff --git a/testing/mozbase/docs/mozlog.rst b/testing/mozbase/docs/mozlog.rst new file mode 100644 index 0000000000..63e3614243 --- /dev/null +++ b/testing/mozbase/docs/mozlog.rst @@ -0,0 +1,520 @@ +:mod:`mozlog` --- Structured logging for test output +=============================================================== + +:py:mod:`mozlog` is a library designed for logging the +execution and results of test harnesses. The internal data model is a +stream of JSON-compatible objects, with one object per log entry. The +default output format is line-based, with one JSON object serialized +per line. + +:py:mod:`mozlog` is *not* based on the stdlib logging +module, although it shares several concepts with it. + +One notable difference between this module and the standard logging +module is the way that loggers are created. The structured logging +module does not require that loggers with a specific name are +singleton objects accessed through a factory function. Instead the +``StructuredLogger`` constructor may be used directly. However all +loggers with the same name share the same internal state (the "Borg" +pattern). In particular the list of handler functions is the same for +all loggers with the same name. + +Typically, you would only instantiate one logger object per +program. Two convenience methods are provided to set and get the +default logger in the program. + +Logging is threadsafe, with access to handlers protected by a +``threading.Lock``. However it is `not` process-safe. This means that +applications using multiple processes, e.g. via the +``multiprocessing`` module, should arrange for all logging to happen in +a single process. + +Data Format +----------- + +Structured loggers produce messages in a simple format designed to be +compatible with the JSON data model. Each message is a single object, +with the type of message indicated by the ``action`` key. It is +intended that the set of ``action`` values be closed; where there are +use cases for additional values they should be integrated into this +module rather than extended in an ad-hoc way. The set of keys present +on on all messages is: + +``action`` + The type of the message (string). + +``time`` + The timestamp of the message in ms since the epoch (int). + +``thread`` + The name of the thread emitting the message (string). + +``pid`` + The pid of the process creating the message (int). + +``source`` + Name of the logger creating the message (string). + +For each ``action`` there are is a further set of specific fields +describing the details of the event that caused the message to be +emitted: + +``suite_start`` + Emitted when the testsuite starts running. + + ``tests`` + A dict of test ids keyed by group. Groups are any logical grouping + of tests, for example a manifest, directory or tag. For convenience, + a list of test ids can be used instead. In this case all tests will + automatically be placed in the 'default' group name. Test ids can + either be strings or lists of strings (an example of the latter is + reftests where the id has the form [test_url, ref_type, ref_url]). + Test ids are assumed to be unique within a given testsuite. In cases + where the test list is not known upfront an empty dict or list may + be passed (dict). + + ``name`` + An optional string to identify the suite by. + + ``run_info`` + An optional dictionary describing the properties of the + build and test environment. This contains the information provided + by :doc:`mozinfo `, plus a boolean ``debug`` field indicating + whether the build under test is a debug build. + +``suite_end`` + Emitted when the testsuite is finished and no more results will be produced. + +``test_start`` + Emitted when a test is being started. + + ``test`` + A unique id for the test (string or list of strings). + + ``path`` + Optional path to the test relative to some base (typically the root of the + source tree). Mainly used when ``test`` id is not a path (string). + +``test_status`` + Emitted for a test which has subtests to record the result of a + single subtest. + + ``test`` + The same unique id for the test as in the ``test_start`` message. + + ``subtest`` + Name of the subtest (string). + + ``status`` + Result of the test (string enum; ``PASS``, ``FAIL``, + ``PRECONDITION_FAILED``, ``TIMEOUT``, ``NOTRUN``) + + ``expected`` + Expected result of the test. Omitted if the expected result is the + same as the actual result (string enum, same as ``status``). + + ``known_intermittent`` + A list of known intermittent statuses for that test. Omitted if there are + no intermittent statuses expected. (items in the list are string enum, same as ``status``) + +``test_end`` + Emitted to give the result of a test with no subtests, or the status + of the overall file when there are subtests. + + ``test`` + The same unique id for the test as in the ``test_start`` message. + + ``status`` + Either result of the test (if there are no subtests) in which case + (string enum ``PASS``, ``FAIL``, ``PRECONDITION_FAILED``, + ``TIMEOUT``, ``CRASH``, ``ASSERT``, , ``SKIP``) or the status of + the overall file where there are subtests (string enum ``OK``, + ``PRECONDITION_FAILED``, ``ERROR``, ``TIMEOUT``, ``CRASH``, + ``ASSERT``, ``SKIP``). + + ``expected`` + The expected status, or omitted if the expected status matches the + actual status (string enum, same as ``status``). + + ``known_intermittent`` + A list of known intermittent statuses for that test. Omitted if there are + no intermittent statuses expected. (items in the list are string enum, same as ``status``) + +``process_output`` + Output from a managed subprocess. + + ``process`` + pid of the subprocess. + + ``command`` + Command used to launch the subprocess. + + ``data`` + Data output by the subprocess. + +``log`` + General human-readable logging message, used to debug the harnesses + themselves rather than to provide input to other tools. + + ``level`` + Level of the log message (string enum ``CRITICAL``, ``ERROR``, + ``WARNING``, ``INFO``, ``DEBUG``). + + ``message`` + Text of the log message. + +``shutdown`` + This is a special action that can only be logged once per logger state. + It is sent when calling :meth:`StructuredLogger.shutdown` or implicitly + when exiting the context manager. + +Testsuite Protocol +------------------ + +When used for testsuites, the following structured logging messages must be emitted: + + * One ``suite_start`` message before any ``test_*`` messages + + * One ``test_start`` message per test that is run + + * One ``test_status`` message per subtest that is run. This might be + zero if the test type doesn't have the notion of subtests. + + * One ``test_end`` message per test that is run, after the + ``test_start`` and any ``test_status`` messages for that same test. + + * One ``suite_end`` message after all ``test_*`` messages have been + emitted. + +The above mandatory events may be interspersed with ``process_output`` +and ``log`` events, as required. + +Subtests +~~~~~~~~ + +The purpose of subtests is to deal with situations where a single test +produces more than one result, and the exact details of the number of +results is not known ahead of time. For example consider a test +harness that loads JavaScript-based tests in a browser. Each url +loaded would be a single test, with corresponding ``test_start`` and +``test_end`` messages. If there can be more than one JS-defined test +on a page, however, it it useful to track the results of those tests +separately. Therefore each of those tests is a subtest, and one +``test_status`` message must be generated for each subtest result. + +Subtests must have a name that is unique within their parent test. + +Whether or not a test has subtests changes the meaning of the +``status`` property on the test itself. When the test does not have +any subtests, this property is the actual test result such as ``PASS`` +or ``FAIL`` . When a test does have subtests, the test itself does not +have a result as-such; it isn't meaningful to describe it as having a +``PASS`` result, especially if the subtests did not all pass. Instead +this property is used to hold information about whether the test ran +without error. If no errors were detected the test must be given the +status ``OK``. Otherwise the test may get the status ``ERROR`` (for +e.g. uncaught JS exceptions), ``TIMEOUT`` (if no results were reported +in the allowed time) or ``CRASH`` (if the test caused the process +under test to crash). + +StructuredLogger Objects +------------------------ + +.. automodule:: mozlog.structuredlog + :members: set_default_logger, get_default_logger, LoggerShutdownError + +.. autoclass:: StructuredLogger + :members: add_handler, remove_handler, handlers, suite_start, + suite_end, test_start, test_status, test_end, + process_output, critical, error, warning, info, debug, + shutdown + +.. autoclass:: StructuredLogFileLike + :members: + +ProxyLogger Objects +------------------- + +Since :func:`mozlog.structuredlog.get_default_logger` return None when +the default logger is not initialized, it is not possible to directly +use it at the module level. + +With ProxyLogger, it is possible to write the following code: :: + + from mozlog import get_proxy_logger + + LOG = get_proxy_logger('component_name') + + + def my_function(): + LOG.info('logging with a module level object') + + +.. note:: + + mozlog still needs to be initialized before the first call occurs + to a ProxyLogger instance, for example with + :func:`mozlog.commandline.setup_logging`. + +.. automodule:: mozlog.proxy + :members: get_proxy_logger, ProxyLogger + +Handlers +-------- + +A handler is a callable that is called for each log message produced +and is responsible for handling the processing of that +message. The typical example of this is a ``StreamHandler`` which takes +a log message, invokes a formatter which converts the log to a string, +and writes it to a file. + +.. automodule:: mozlog.handlers + +.. autoclass:: BaseHandler + :members: + +.. autoclass:: StreamHandler + :members: + +.. autoclass:: LogLevelFilter + :members: + +.. autoclass:: BufferHandler + :members: + +Formatters +---------- + +Formatters are callables that take a log message, and return either a +string representation of that message, or ``None`` if that message +should not appear in the output. This allows formatters to both +exclude certain items and create internal buffers of the output so +that, for example, a single string might be returned for a +``test_end`` message indicating the overall result of the test, +including data provided in the ``test_status`` messages. + +Formatter modules are written so that they can take raw input on stdin +and write formatted output on stdout. This allows the formatters to be +invoked as part of a command line for post-processing raw log files. + +.. automodule:: mozlog.formatters.base + +.. autoclass:: BaseFormatter + :members: + +.. automodule:: mozlog.formatters.unittest + +.. autoclass:: UnittestFormatter + :members: + +.. automodule:: mozlog.formatters.xunit + +.. autoclass:: XUnitFormatter + :members: + +.. automodule:: mozlog.formatters.html + +.. autoclass:: HTMLFormatter + :members: + +.. automodule:: mozlog.formatters.machformatter + +.. autoclass:: MachFormatter + :members: + +.. automodule:: mozlog.formatters.tbplformatter + +.. autoclass:: TbplFormatter + :members: + +Processing Log Files +-------------------- + +The ``mozlog.reader`` module provides utilities for working +with structured log files. + +.. automodule:: mozlog.reader + :members: + +Integration with argparse +------------------------- + +The `mozlog.commandline` module provides integration with the `argparse` +module to provide uniform logging-related command line arguments to programs +using `mozlog`. Each known formatter gets a command line argument of the form +``--log-{name}``, which takes the name of a file to log to with that format, +or ``-`` to indicate stdout. + +.. automodule:: mozlog.commandline + :members: + +Simple Examples +--------------- + +Log to stdout:: + + from mozlog import structuredlog + from mozlog import handlers, formatters + logger = structuredlog.StructuredLogger("my-test-suite") + logger.add_handler(handlers.StreamHandler(sys.stdout, + formatters.JSONFormatter())) + logger.suite_start(["test-id-1"]) + logger.test_start("test-id-1") + logger.info("This is a message with action='LOG' and level='INFO'") + logger.test_status("test-id-1", "subtest-1", "PASS") + logger.test_end("test-id-1", "OK") + logger.suite_end() + +Log with a context manager:: + + from mozlog.structuredlog import StructuredLogger + from mozlog.handlers import StreamHandler + from mozlog.formatters import JSONFormatter + + with StructuredLogger("my-test-suite") as logger: + logger.add_handler(StreamHandler(sys.stdout, + JSONFormatter())) + logger.info("This is an info message") + +Populate an ``argparse.ArgumentParser`` with logging options, and +create a logger based on the value of those options, defaulting to +JSON output on stdout if nothing else is supplied:: + + import argparse + from mozlog import commandline + + parser = argparse.ArgumentParser() + # Here one would populate the parser with other options + commandline.add_logging_group(parser) + + args = parser.parse_args() + logger = commandline.setup_logging("testsuite-name", args, {"raw": sys.stdout}) + +Count the number of tests that timed out in a testsuite:: + + from mozlog import reader + + count = 0 + + def handle_test_end(data): + global count + if data["status"] == "TIMEOUT": + count += 1 + + reader.each_log(reader.read("my_test_run.log"), + {"test_end": handle_test_end}) + + print count + +More Complete Example +--------------------- + +This example shows a complete toy testharness set up to used +structured logging. It is available as `structured_example.py <_static/structured_example.py>`_: + +.. literalinclude:: _static/structured_example.py + +Each global function with a name starting +``test_`` represents a test. A passing test returns without +throwing. A failing test throws a :py:class:`TestAssertion` exception +via the :py:func:`assert_equals` function. Throwing anything else is +considered an error in the test. There is also a :py:func:`expected` +decorator that is used to annotate tests that are expected to do +something other than pass. + +The main entry point to the test runner is via that :py:func:`main` +function. This is responsible for parsing command line +arguments, and initiating the test run. Although the test harness +itself does not provide any command line arguments, the +:py:class:`ArgumentParser` object is populated by +:py:meth:`commandline.add_logging_group`, which provides a generic +set of structured logging arguments appropriate to all tools producing +structured logging. + +The values of these command line arguments are used to create a +:py:class:`mozlog.StructuredLogger` object populated with the +specified handlers and formatters in +:py:func:`commandline.setup_logging`. The third argument to this +function is the default arguments to use. In this case the default +is to output raw (i.e. JSON-formatted) logs to stdout. + +The main test harness is provided by the :py:class:`TestRunner` +class. This class is responsible for scheduling all the tests and +logging all the results. It is passed the :py:obj:`logger` object +created from the command line arguments. The :py:meth:`run` method +starts the test run. Before the run is started it logs a +``suite_start`` message containing the id of each test that will run, +and after the testrun is done it logs a ``suite_end`` message. + +Individual tests are run in the :py:meth:`run_test` method. For each +test this logs a ``test_start`` message. It then runs the test and +logs a ``test_end`` message containing the test name, status, expected +status, and any informational message about the reason for the +result. In this test harness there are no subtests, so the +``test_end`` message has the status of the test and there are no +``test_status`` messages. + +Example Output +~~~~~~~~~~~~~~ + +When run without providing any command line options, the raw +structured log messages are sent to stdout:: + + $ python structured_example.py + + {"source": "structured-example", "tests": ["test_that_has_an_error", "test_that_fails", "test_expected_fail", "test_that_passes"], "thread": "MainThread", "time": 1401446682787, "action": "suite_start", "pid": 18456} + {"source": "structured-example", "thread": "MainThread", "time": 1401446682787, "action": "log", "message": "Running tests", "level": "INFO", "pid": 18456} + {"source": "structured-example", "test": "test_that_has_an_error", "thread": "MainThread", "time": 1401446682787, "action": "test_start", "pid": 18456} + {"status": "ERROR", "thread": "MainThread", "pid": 18456, "source": "structured-example", "test": "test_that_has_an_error", "time": 1401446682788, "action": "test_end", "message": "Traceback (most recent call last):\n File \"structured_example.py\", line 61, in run_test\n func()\n File \"structured_example.py\", line 31, in test_that_has_an_error\n assert_equals(2, 1 + \"1\")\nTypeError: unsupported operand type(s) for +: 'int' and 'str'\n", "expected": "PASS"} + {"source": "structured-example", "test": "test_that_fails", "thread": "MainThread", "time": 1401446682788, "action": "test_start", "pid": 18456} + {"status": "FAIL", "thread": "MainThread", "pid": 18456, "source": "structured-example", "test": "test_that_fails", "time": 1401446682788, "action": "test_end", "message": "1 not equal to 2", "expected": "PASS"} + {"source": "structured-example", "test": "test_expected_fail", "thread": "MainThread", "time": 1401446682788, "action": "test_start", "pid": 18456} + {"status": "FAIL", "thread": "MainThread", "pid": 18456, "source": "structured-example", "test": "test_expected_fail", "time": 1401446682788, "action": "test_end", "message": "4 not equal to 5"} + {"source": "structured-example", "test": "test_that_passes", "thread": "MainThread", "time": 1401446682788, "action": "test_start", "pid": 18456} + {"status": "PASS", "source": "structured-example", "test": "test_that_passes", "thread": "MainThread", "time": 1401446682789, "action": "test_end", "pid": 18456} + {"source": "structured-example", "test": "test_with_known_intermittent", "thread": "MainThread", "time": 1401446682789, "action": "test_start", "pid": 18456} + {"status": "FAIL", thread": "MainThread", "pid": 18456, "source": "structured-example", "test": "test_with_known_intermittent", "time": 1401446682790, "action": "test_end", "expected": "PASS", "known_intermittent": ["FAIL", "TIMEOUT"]} + {"action": "suite_end", "source": "structured-example", "pid": 18456, "thread": "MainThread", "time": 1401446682790} + +The structured logging module provides a number of command line +options:: + + $ python structured_example.py --help + + usage: structured_example.py [-h] [--log-unittest LOG_UNITTEST] + [--log-raw LOG_RAW] [--log-html LOG_HTML] + [--log-xunit LOG_XUNIT] + [--log-mach LOG_MACH] + + optional arguments: + -h, --help show this help message and exit + + Output Logging: + Options for logging output. Each option represents a possible logging + format and takes a filename to write that format to, or '-' to write to + stdout. + + --log-unittest LOG_UNITTEST + Unittest style output + --log-raw LOG_RAW Raw structured log messages + --log-html LOG_HTML HTML report + --log-xunit LOG_XUNIT + xUnit compatible XML + --log-mach LOG_MACH Human-readable output + +In order to get human-readable output on stdout and the structured log +data to go to the file ``structured.log``, we would run:: + + $ python structured_example.py --log-mach=- --log-raw=structured.log + + 0:00.00 SUITE_START: MainThread 4 + 0:01.00 LOG: MainThread INFO Running tests + 0:01.00 TEST_START: MainThread test_that_has_an_error + 0:01.00 TEST_END: MainThread Harness status ERROR, expected PASS. Subtests passed 0/0. Unexpected 1 + 0:01.00 TEST_START: MainThread test_that_fails + 0:01.00 TEST_END: MainThread Harness status FAIL, expected PASS. Subtests passed 0/0. Unexpected 1 + 0:01.00 TEST_START: MainThread test_expected_fail + 0:02.00 TEST_END: MainThread Harness status FAIL. Subtests passed 0/0. Unexpected 0 + 0:02.00 TEST_START: MainThread test_that_passes + 0:02.00 TEST_END: MainThread Harness status PASS. Subtests passed 0/0. Unexpected 0 + 0:02.00 SUITE_END: MainThread diff --git a/testing/mozbase/docs/moznetwork.rst b/testing/mozbase/docs/moznetwork.rst new file mode 100644 index 0000000000..905433e8a7 --- /dev/null +++ b/testing/mozbase/docs/moznetwork.rst @@ -0,0 +1,8 @@ +:mod:`moznetwork` --- Get network information +============================================= + +.. automodule:: moznetwork + + .. automethod:: moznetwork.get_ip + + .. autoclass:: moznetwork.NetworkError diff --git a/testing/mozbase/docs/mozpower.rst b/testing/mozbase/docs/mozpower.rst new file mode 100644 index 0000000000..76be41d987 --- /dev/null +++ b/testing/mozbase/docs/mozpower.rst @@ -0,0 +1,112 @@ +:mod:`mozpower` --- Power-usage testing +======================================= + +Mozpower provides an interface through which power usage measurements +can be done on any OS and CPU combination (auto-detected) that has +been implemented within the module. It provides 2 methods to start +and stop the measurement gathering as well as methods to get the +result that can also be formatted into a perfherder data blob. + +Basic Usage +----------- + +Although multiple classes exist within the mozpower module, +the only one that should be used is MozPower which is accessible +from the top-level of the module. It handles which subclasses +should be used depending on the detected OS and CPU combination. + +.. code-block:: python + + from mozpower import MozPower + + mp = MozPower( + ipg_measure_duration=600, + sampling_rate=1000, + output_file_path='tempdir/dataprefix' + ) + mp.initialize_power_measurements() + + # Run test TEST_NAME + + mp.finalize_power_measurements( + test_name=TEST_NAME, + output_dir_path=env['MOZ_UPLOAD_DIR'] + ) + + # Get complete PERFHERDER_DATA + perfherder_data = mp.get_full_perfherder_data('raptor') + +All the possible known errors that can occur are also provided +at the top-level of the module. + +.. code-block:: python + + from mozpower import MozPower, IPGExecutableMissingError, OsCpuComboMissingError + + try: + mp = MozPower(ipg_measure_duration=600, sampling_rate=1000) + except IPGExecutableMissingError as e: + pass + except OsCpuComboMissingError as e: + pass + + +.. automodule:: mozpower + +.. _MozPower: + +MozPower Interface +------------------ + +The following class provides a basic interface to interact with the +power measurement tools that have been implemented. The tool used +to measure power depends on the OS and CPU combination, i.e. Intel-based +MacOS machines would use Intel Power Gadget, while ARM64-based Windows +machines would use the native Windows tool powercfg. + +MozPower +```````` + +.. autoclass:: mozpower.MozPower + +Measurement methods ++++++++++++++++++++ +.. automethod:: MozPower.initialize_power_measurements(self, **kwargs) +.. automethod:: MozPower.finalize_power_measurements(self, **kwargs) + +Informational methods ++++++++++++++++++++++ +.. automethod:: MozPower.get_perfherder_data(self) +.. automethod:: MozPower.get_full_perfherder_data(self, framework, lowerisbetter=True, alertthreshold=2.0) + +IPGEmptyFileError +````````````````` +.. autoexception:: mozpower.IPGEmptyFileError + +IPGExecutableMissingError +````````````````````````` +.. autoexception:: mozpower.IPGExecutableMissingError + +IPGMissingOutputFileError +````````````````````````` +.. autoexception:: mozpower.IPGMissingOutputFileError + +IPGTimeoutError +``````````````` +.. autoexception:: mozpower.IPGTimeoutError + +IPGUnknownValueTypeError +```````````````````````` +.. autoexception:: mozpower.IPGUnknownValueTypeError + +MissingProcessorInfoError +````````````````````````` +.. autoexception:: mozpower.MissingProcessorInfoError + +OsCpuComboMissingError +`````````````````````` +.. autoexception:: mozpower.OsCpuComboMissingError + +PlatformUnsupportedError +```````````````````````` +.. autoexception:: mozpower.PlatformUnsupportedError diff --git a/testing/mozbase/docs/mozprocess.rst b/testing/mozbase/docs/mozprocess.rst new file mode 100644 index 0000000000..ef90e5aa0c --- /dev/null +++ b/testing/mozbase/docs/mozprocess.rst @@ -0,0 +1,324 @@ +:mod:`mozprocess` --- Launch and manage processes +================================================= + +Mozprocess is a process-handling module that provides some additional +features beyond those available with python's subprocess: + +* better handling of child processes, especially on Windows +* the ability to timeout the process after some absolute period, or some + period without any data written to stdout/stderr +* the ability to specify output handlers that will be called + for each line of output produced by the process +* the ability to specify handlers that will be called on process timeout + and normal process termination + +Running a process +----------------- + +mozprocess consists of two classes: ProcessHandler inherits from ProcessHandlerMixin. + +Let's see how to run a process. +First, the class should be instantiated with at least one argument which is a command (or a list formed by the command followed by its arguments). +Then the process can be launched using the *run()* method. +Finally the *wait()* method will wait until end of execution. + +.. code-block:: python + + from mozprocess import processhandler + + # under Windows replace by command = ['dir', '/a'] + command = ['ls', '-l'] + p = processhandler.ProcessHandler(command) + print("execute command: %s" % p.commandline) + p.run() + p.wait() + +Note that using *ProcessHandler* instead of *ProcessHandlerMixin* will print the output of executed command. The attribute *commandline* provides the launched command. + +Collecting process output +------------------------- + +Let's now consider a basic shell script that will print numbers from 1 to 5 waiting 1 second between each. +This script will be used as a command to launch in further examples. + +**proc_sleep_echo.sh**: + +.. code-block:: sh + + #!/bin/sh + + for i in 1 2 3 4 5 + do + echo $i + sleep 1 + done + +If you are running under Windows, you won't be able to use the previous script (unless using Cygwin). +So you'll use the following script: + +**proc_sleep_echo.bat**: + +.. code-block:: bat + + @echo off + FOR %%A IN (1 2 3 4 5) DO ( + ECHO %%A + REM if you have TIMEOUT then use it instead of PING + REM TIMEOUT /T 1 /NOBREAK + PING -n 2 127.0.0.1 > NUL + ) + +Mozprocess allows the specification of custom output handlers to gather process output while running. +ProcessHandler will by default write all outputs on stdout. You can also provide (to ProcessHandler or ProcessHandlerMixin) a function or a list of functions that will be used as callbacks on each output line generated by the process. + +In the following example the command's output will be stored in a file *output.log* and printed in stdout: + +.. code-block:: python + + import sys + from mozprocess import processhandler + + fd = open('output.log', 'w') + + def tostdout(line): + sys.stdout.write("<%s>\n" % line) + + def tofile(line): + fd.write("<%s>\n" % line) + + # under Windows you'll replace by 'proc_sleep_echo.bat' + command = './proc_sleep_echo.sh' + outputs = [tostdout, tofile] + + p = processhandler.ProcessHandlerMixin(command, processOutputLine=outputs) + p.run() + p.wait() + + fd.close() + +The process output can be saved (*obj = ProcessHandler(..., storeOutput=True)*) so as it is possible to request it (*obj.output*) at any time. Note that the default value for *stroreOutput* is *True*, so it is not necessary to provide it in the parameters. + +.. code-block:: python + + import time + import sys + from mozprocess import processhandler + + command = './proc_sleep_echo.sh' # Windows: 'proc_sleep_echo.bat' + + p = processhandler.ProcessHandler(command, storeOutput=True) + p.run() + for i in xrange(10): + print(p.output) + time.sleep(0.5) + p.wait() + +In previous example, you will see the *p.output* list growing. + +Execution +--------- + +Status +`````` + +It is possible to query the status of the process via *poll()* that will return None if the process is still running, 0 if it ended without failures and a negative value if it was killed by a signal (Unix-only). + +.. code-block:: python + + import time + import signal + from mozprocess import processhandler + + command = './proc_sleep_echo.sh' + p = processhandler.ProcessHandler(command) + p.run() + time.sleep(2) + print("poll status: %s" % p.poll()) + time.sleep(1) + p.kill(signal.SIGKILL) + print("poll status: %s" % p.poll()) + +Timeout +``````` + +A timeout can be provided to the *run()* method. If the process last more than timeout seconds, it will be stopped. + +After execution, the property *timedOut* will be set to True if a timeout was reached. + +It is also possible to provide functions (*obj = ProcessHandler[Mixin](..., onTimeout=functions)*) that will be called if the timeout was reached. + +.. code-block:: python + + from mozprocess import processhandler + + def ontimeout(): + print("REACHED TIMEOUT") + + command = './proc_sleep_echo.sh' # Windows: 'proc_sleep_echo.bat' + functions = [ontimeout] + p = processhandler.ProcessHandler(command, onTimeout=functions) + p.run(timeout=2) + p.wait() + print("timedOut = %s" % p.timedOut) + +By default the process will be killed on timeout but it is possible to prevent this by setting *kill_on_timeout* to *False*. + +.. code-block:: python + + p = processhandler.ProcessHandler(command, onTimeout=functions, kill_on_timeout=False) + p.run(timeout=2) + p.wait() + print("timedOut = %s" % p.timedOut) + +In this case, no output will be available after the timeout, but the process will still be running. + +Waiting +``````` + +It is possible to wait until the process exits as already seen with the method *wait()*, or until the end of a timeout if given. Note that in last case the process is still alive after the timeout. + +.. code-block:: python + + command = './proc_sleep_echo.sh' # Windows: 'proc_sleep_echo.bat' + p = processhandler.ProcessHandler(command) + p.run() + p.wait(timeout=2) + print("timedOut = %s" % p.timedOut) + p.wait() + +Killing +``````` + +You can request to kill the process with the method *kill*. f the parameter "ignore_children" is set to False when the process handler class is initialized, all the process's children will be killed as well. + +Except on Windows, you can specify the signal with which to kill method the process (e.g.: *kill(signal.SIGKILL)*). + +.. code-block:: python + + import time + from mozprocess import processhandler + + command = './proc_sleep_echo.sh' # Windows: 'proc_sleep_echo.bat' + p = processhandler.ProcessHandler(command) + p.run() + time.sleep(2) + p.kill() + +End of execution +```````````````` + +You can provide a function or a list of functions to call at the end of the process using the initialization parameter *onFinish*. + +.. code-block:: python + + from mozprocess import processhandler + + def finish(): + print("Finished!!") + + command = './proc_sleep_echo.sh' # Windows: 'proc_sleep_echo.bat' + + p = processhandler.ProcessHandler(command, onFinish=finish) + p.run() + p.wait() + +Child management +---------------- + +Consider the following scripts: + +**proc_child.sh**: + +.. code-block:: sh + + #!/bin/sh + for i in a b c d e + do + echo $i + sleep 1 + done + +**proc_parent.sh**: + +.. code-block:: sh + + #!/bin/sh + ./proc_child.sh + for i in 1 2 3 4 5 + do + echo $i + sleep 1 + done + +For windows users consider: + +**proc_child.bat**: + +.. code-block:: bat + + @echo off + FOR %%A IN (a b c d e) DO ( + ECHO %%A + REM TIMEOUT /T 1 /NOBREAK + PING -n 2 127.0.0.1 > NUL + ) + +**proc_parent.bat**: + +.. code-block:: bat + + @echo off + call proc_child.bat + FOR %%A IN (1 2 3 4 5) DO ( + ECHO %%A + REM TIMEOUT /T 1 /NOBREAK + PING -n 2 127.0.0.1 > NUL + ) + +For processes that launch other processes, mozprocess allows you to get child running status, wait for child termination, and kill children. + +Ignoring children +````````````````` + +By default the *ignore_children* option is False. In that case, killing the main process will kill all its children at the same time. + +.. code-block:: python + + import time + from mozprocess import processhandler + + def finish(): + print("Finished") + + command = './proc_parent.sh' + p = processhandler.ProcessHandler(command, ignore_children=False, onFinish=finish) + p.run() + time.sleep(2) + print("kill") + p.kill() + +If *ignore_children* is set to *True*, killing will apply only to the main process that will wait children end of execution before stopping (join). + +.. code-block:: python + + import time + from mozprocess import processhandler + + def finish(): + print("Finished") + + command = './proc_parent.sh' + p = processhandler.ProcessHandler(command, ignore_children=True, onFinish=finish) + p.run() + time.sleep(2) + print("kill") + p.kill() + +API Documentation +----------------- + +.. module:: mozprocess +.. autoclass:: ProcessHandlerMixin + :members: __init__, timedOut, commandline, run, kill, processOutputLine, onTimeout, onFinish, wait +.. autoclass:: ProcessHandler + :members: diff --git a/testing/mozbase/docs/mozprofile.rst b/testing/mozbase/docs/mozprofile.rst new file mode 100644 index 0000000000..d5b6e351b9 --- /dev/null +++ b/testing/mozbase/docs/mozprofile.rst @@ -0,0 +1,94 @@ +:mod:`mozprofile` --- Create and modify Mozilla application profiles +==================================================================== + +Mozprofile_ is a python tool for creating and managing profiles for Mozilla's +applications (Firefox, Thunderbird, etc.). In addition to creating profiles, +mozprofile can install addons_ and set preferences. Mozprofile can be utilized +from the command line or as an API. + +The preferred way of setting up profile data (addons, permissions, preferences +etc) is by passing them to the profile_ constructor. + +Addons +------ + +.. automodule:: mozprofile.addons + :members: + +Addons may be installed individually or from a manifest. + +Example:: + + from mozprofile import FirefoxProfile + + # create new profile to pass to mozmill/mozrunner + profile = FirefoxProfile(addons=["adblock.xpi"]) + +Command Line Interface +---------------------- + +.. automodule:: mozprofile.cli + :members: + +The profile to be operated on may be specified with the ``--profile`` +switch. If a profile is not specified, one will be created in a +temporary directory which will be echoed to the terminal:: + + (mozmill)> mozprofile + /tmp/tmp4q1iEU.mozrunner + (mozmill)> ls /tmp/tmp4q1iEU.mozrunner + user.js + +To run mozprofile from the command line enter: +``mozprofile --help`` for a list of options. + +Permissions +----------- + +.. automodule:: mozprofile.permissions + :members: + +You can set permissions by creating a ``ServerLocations`` object that you pass +to the ``Profile`` constructor. Hosts can be added to it with +``add_host(host, port)``. ``port`` can be 0. + +Preferences +----------- + +.. automodule:: mozprofile.prefs + :members: + +Preferences can be set in several ways: + +- using the API: You can make a dictionary with the preferences and pass it to + the ``Profile`` constructor. You can also add more preferences with the + ``Profile.set_preferences`` method. +- using a JSON blob file: ``mozprofile --preferences myprefs.json`` +- using a ``.ini`` file: ``mozprofile --preferences myprefs.ini`` +- via the command line: ``mozprofile --pref key:value --pref key:value [...]`` + +When setting preferences from an ``.ini`` file or the ``--pref`` switch, +the value will be interpolated as an integer or a boolean +(``true``/``false``) if possible. + +Profile +-------------------- + +.. automodule:: mozprofile.profile + :members: + +Resources +----------- +Other Mozilla programs offer additional and overlapping functionality +for profiles. There is also substantive documentation on profiles and +their management. + +- profile documentation_ + + +.. _Mozprofile: https://hg.mozilla.org/mozilla-central/file/tip/testing/mozbase/mozprofile +.. _addons: https://developer.mozilla.org/en/addons +.. _mozprofile.profile: https://hg.mozilla.org/mozilla-central/file/tip/testing/mozbase/mozprofile/mozprofile/profile.py +.. _AddonManager: https://hg.mozilla.org/mozilla-central/file/tip/testing/mozbase/mozprofile/mozprofile/addons.py +.. _here: https://hg.mozilla.org/mozilla-central/file/tip/testing/mozbase/mozprofile/mozprofile/permissions.py +.. _documentation: http://support.mozilla.com/en-US/kb/Profiles diff --git a/testing/mozbase/docs/mozproxy.rst b/testing/mozbase/docs/mozproxy.rst new file mode 100644 index 0000000000..f6863d7e22 --- /dev/null +++ b/testing/mozbase/docs/mozproxy.rst @@ -0,0 +1,46 @@ +:mod:`mozproxy` --- Provides an HTTP proxy +========================================== + +Mozproxy let you launch an HTTP proxy when we need to run tests against +third-part websites in a reliable and reproducible way. + +Mozproxy provides an interface to a proxy software, and the currently +supported backend is **mitmproxy** for Desktop and Android. + +Mozproxy is used by Raptor to run performance test without having to interact +with the real web site. + +Mozproxy provide a function that returns a playback class. The usage pattern is +:: + + from mozproxy import get_playback + + config = {'playback_tool': 'mitmproxy'} + pb = get_playback(config) + pb.start() + try: + # do your test + finally: + pb.stop() + +**config** is a dict with the following options: + +- **playback_tool**: name of the backend. can be "mitmproxy", "mitmproxy-android" +- **playback_version**: playback tool version +- **playback_files**: playback recording path/manifest/URL +- **binary**: path of the browser binary +- **obj_path**: build dir +- **platform**: platform name (provided by mozinfo.os) +- **run_local**: if True, the test is running locally. +- **app**: tested app. Can be "firefox", "geckoview", "refbrow", "fenix" or "firefox" +- **host**: hostname for the policies.json file +- **local_profile_dir**: profile dir + + +Supported environment variables: + +- **MOZPROXY_DIR**: directory used by mozproxy for all data files, set by mozproxy +- **MOZ_UPLOAD_DIR**: upload directory path +- **GECKO_HEAD_REPOSITORY**: used to find the certutils binary path from the CI +- **GECKO_HEAD_REV**: used to find the certutils binary path from the CI +- **HOSTUTILS_MANIFEST_PATH**: used to find the certutils binary path from the CI diff --git a/testing/mozbase/docs/mozrunner.rst b/testing/mozbase/docs/mozrunner.rst new file mode 100644 index 0000000000..5020e76cbb --- /dev/null +++ b/testing/mozbase/docs/mozrunner.rst @@ -0,0 +1,183 @@ +:mod:`mozrunner` --- Manage remote and local gecko processes +============================================================ + +Mozrunner provides an API to manage a gecko-based application with an +arbitrary configuration profile. It currently supports local desktop +binaries such as Firefox and Thunderbird, as well as Firefox OS on +mobile devices and emulators. + + +Basic usage +----------- + +The simplest way to use mozrunner, is to instantiate a runner, start it +and then wait for it to finish: + +.. code-block:: python + + from mozrunner import FirefoxRunner + binary = 'path/to/firefox/binary' + runner = FirefoxRunner(binary=binary) + runner.start() + runner.wait() + +This automatically creates and uses a default mozprofile object. If you +wish to use a specialized or pre-existing profile, you can create a +:doc:`mozprofile ` object and pass it in: + +.. code-block:: python + + from mozprofile import FirefoxProfile + from mozrunner import FirefoxRunner + import os + + binary = 'path/to/firefox/binary' + profile_path = 'path/to/profile' + if os.path.exists(profile_path): + profile = FirefoxProfile.clone(path_from=profile_path) + else: + profile = FirefoxProfile(profile=profile_path) + runner = FirefoxRunner(binary=binary, profile=profile) + runner.start() + runner.wait() + + +Handling output +--------------- + +By default, mozrunner dumps the output of the gecko process to standard output. +It is possible to add arbitrary output handlers by passing them in via the +`process_args` argument. Be careful, passing in a handler overrides the default +behaviour. So if you want to use a handler in addition to dumping to stdout, you +need to specify that explicitly. For example: + +.. code-block:: python + + from mozrunner import FirefoxRunner + + def handle_output_line(line): + do_something(line) + + binary = 'path/to/firefox/binary' + process_args = { 'stream': sys.stdout, + 'processOutputLine': [handle_output_line] } + runner = FirefoxRunner(binary=binary, process_args=process_args) + +Mozrunner uses :doc:`mozprocess ` to manage the underlying gecko +process and handle output. See the :doc:`mozprocess documentation ` +for all available arguments accepted by `process_args`. + + +Handling timeouts +----------------- + +Sometimes gecko can hang, or maybe it is just taking too long. To handle this case you +may want to set a timeout. Mozrunner has two kinds of timeouts, the +traditional `timeout`, and the `outputTimeout`. These get passed into the +`runner.start()` method. Setting `timeout` will cause gecko to be killed after +the specified number of seconds, no matter what. Setting `outputTimeout` will cause +gecko to be killed after the specified number of seconds with no output. In both +cases the process handler's `onTimeout` callbacks will be triggered. + +.. code-block:: python + + from mozrunner import FirefoxRunner + + def on_timeout(): + print('timed out after 10 seconds with no output!') + + binary = 'path/to/firefox/binary' + process_args = { 'onTimeout': on_timeout } + runner = FirefoxRunner(binary=binary, process_args=process_args) + runner.start(outputTimeout=10) + runner.wait() + +The `runner.wait()` method also accepts a timeout argument. But unlike the arguments +to `runner.start()`, this one simply returns from the wait call and does not kill the +gecko process. + +.. code-block:: python + + runner.start(timeout=100) + + waiting = 0 + while runner.wait(timeout=1) is None: + waiting += 1 + print("Been waiting for %d seconds so far.." % waiting) + assert waiting <= 100 + + +Using a device runner +--------------------- + +The previous examples used a GeckoRuntimeRunner. If you want to control a +gecko process on a remote device, you need to use a DeviceRunner. The api is +nearly identical except you don't pass in a binary, instead you create a device +object. For example to run Firefox for Android on the emulator, you might do: + +.. code-block:: python + + from mozrunner import FennecEmulatorRunner + + avd_home = 'path/to/avd' + runner = FennecEmulatorRunner(app='org.mozilla.fennec', avd_home=avd_home) + runner.start() + runner.wait() + +Device runners have a `device` object. Remember that the gecko process runs on +the device. In the case of the emulator, it is possible to start the +device independently of the gecko process. + +.. code-block:: python + + runner.device.start() # launches the emulator + runner.start() # stops the gecko process (if started), installs the profile, (re)starts the gecko process + + +Runner API Documentation +------------------------ + +Application Runners +~~~~~~~~~~~~~~~~~~~ +.. automodule:: mozrunner.runners + :members: + +BaseRunner +~~~~~~~~~~ +.. autoclass:: mozrunner.base.BaseRunner + :members: + +GeckoRuntimeRunner +~~~~~~~~~~~~~~~~~~ +.. autoclass:: mozrunner.base.GeckoRuntimeRunner + :show-inheritance: + :members: + +BlinkRuntimeRunner +~~~~~~~~~~~~~~~~~~ +.. autoclass:: mozrunner.base.BlinkRuntimeRunner + :show-inheritance: + :members: + +DeviceRunner +~~~~~~~~~~~~ +.. autoclass:: mozrunner.base.DeviceRunner + :show-inheritance: + :members: + +Device API Documentation +------------------------ + +Generally using the device classes directly shouldn't be required, but in some +cases it may be desirable. + +Device +~~~~~~ +.. autoclass:: mozrunner.devices.Device + :members: + +EmulatorAVD +~~~~~~~~~~~ +.. autoclass:: mozrunner.devices.EmulatorAVD + :show-inheritance: + :members: diff --git a/testing/mozbase/docs/mozversion.rst b/testing/mozbase/docs/mozversion.rst new file mode 100644 index 0000000000..ca2be48c1f --- /dev/null +++ b/testing/mozbase/docs/mozversion.rst @@ -0,0 +1,70 @@ +:mod:`mozversion` --- Get application information +================================================= + +`mozversion `_ +provides version information such as the application name and the changesets +that it has been built from. This is commonly used in reporting or for +conditional logic based on the application under test. + +API Usage +--------- + +.. automodule:: mozversion + :members: get_version + +Examples +```````` + +Firefox:: + + import mozversion + + version = mozversion.get_version(binary='/path/to/firefox') + for (key, value) in sorted(version.items()): + if value: + print '%s: %s' % (key, value) + +Firefox for Android:: + + version = mozversion.get_version(binary='path/to/firefox.apk') + print version['application_changeset'] # gets hg revision of build + +Command Line Usage +------------------ + +mozversion comes with a command line program, ``mozversion`` which may be used to +get version information from an application. + +Usage:: + + mozversion [options] + +Options +``````` + +---binary +''''''''' + +This is the path to the target application binary or .apk. If this is omitted +then the current directory is checked for the existence of an +application.ini file. If not found, then it is assumed the target +application is a remote Firefox OS instance. + +Examples +```````` + +Firefox:: + + $ mozversion --binary=/path/to/firefox-bin + application_buildid: 20131205075310 + application_changeset: 39faf812aaec + application_name: Firefox + application_repository: http://hg.mozilla.org/releases/mozilla-release + application_version: 26.0 + platform_buildid: 20131205075310 + platform_changeset: 39faf812aaec + platform_repository: http://hg.mozilla.org/releases/mozilla-release + +Firefox for Android:: + + $ mozversion --binary=/path/to/firefox.apk diff --git a/testing/mozbase/docs/requirements.txt b/testing/mozbase/docs/requirements.txt new file mode 100644 index 0000000000..53dd4ca675 --- /dev/null +++ b/testing/mozbase/docs/requirements.txt @@ -0,0 +1 @@ +marionette_client diff --git a/testing/mozbase/docs/servingcontent.rst b/testing/mozbase/docs/servingcontent.rst new file mode 100644 index 0000000000..b1960d9447 --- /dev/null +++ b/testing/mozbase/docs/servingcontent.rst @@ -0,0 +1,11 @@ +Handling content for the browser +================================ + +It's often necessary to handle data for the browser. This can be accomplished +by using a local webserver or by setting up a proxy. + +.. toctree:: + :maxdepth: 2 + + mozhttpd + mozproxy diff --git a/testing/mozbase/docs/setuprunning.rst b/testing/mozbase/docs/setuprunning.rst new file mode 100644 index 0000000000..30845a5c7a --- /dev/null +++ b/testing/mozbase/docs/setuprunning.rst @@ -0,0 +1,20 @@ +Set up and running +------------------ + +Activities under this domain include installing the software, creating +a profile (a set of configuration settings), running a program in a +controlled environment such that it can be shut down safely, and +correctly handling the case where the system crashes. + +.. toctree:: + :maxdepth: 2 + + mozfile + mozgeckoprofiler + mozinstall + mozpower + mozprofile + mozprocess + mozrunner + mozcrash + mozdebug diff --git a/testing/mozbase/manifestparser/manifestparser/__init__.py b/testing/mozbase/manifestparser/manifestparser/__init__.py new file mode 100644 index 0000000000..c8d19d9712 --- /dev/null +++ b/testing/mozbase/manifestparser/manifestparser/__init__.py @@ -0,0 +1,8 @@ +# flake8: noqa +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +from .expression import * +from .ini import * +from .manifestparser import * diff --git a/testing/mozbase/manifestparser/manifestparser/cli.py b/testing/mozbase/manifestparser/manifestparser/cli.py new file mode 100644 index 0000000000..18fdaa88e8 --- /dev/null +++ b/testing/mozbase/manifestparser/manifestparser/cli.py @@ -0,0 +1,286 @@ +#!/usr/bin/env python +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +""" +Mozilla universal manifest parser +""" +import os +import sys +from optparse import OptionParser + +from .logger import Logger +from .manifestparser import ManifestParser, convert + + +class ParserError(Exception): + """error for exceptions while parsing the command line""" + + +def parse_args(_args): + """ + parse and return: + --keys=value (or --key value) + -tags + args + """ + + # return values + _dict = {} + tags = [] + args = [] + + # parse the arguments + key = None + for arg in _args: + if arg.startswith("---"): + raise ParserError("arguments should start with '-' or '--' only") + elif arg.startswith("--"): + if key: + raise ParserError("Key %s still open" % key) + key = arg[2:] + if "=" in key: + key, value = key.split("=", 1) + _dict[key] = value + key = None + continue + elif arg.startswith("-"): + if key: + raise ParserError("Key %s still open" % key) + tags.append(arg[1:]) + continue + else: + if key: + _dict[key] = arg + continue + args.append(arg) + + # return values + return (_dict, tags, args) + + +class CLICommand(object): + usage = "%prog [options] command" + + def __init__(self, parser): + self._parser = parser # master parser + self.logger = Logger() + + def parser(self): + return OptionParser( + usage=self.usage, description=self.__doc__, add_help_option=False + ) + + +class CopyCLI(CLICommand): + """ + To copy tests and manifests from a source + """ + + usage = "%prog [options] copy manifest directory -tag1 -tag2 --key1=value1 --key2=value2 ..." + + def __call__(self, global_options, args): + # parse the arguments + try: + kwargs, tags, args = parse_args(args) + except ParserError as e: + self._parser.error(str(e)) + + # make sure we have some manifests, otherwise it will + # be quite boring + if not len(args) == 2: + self.logger.error("missing arguments: manifest directory") + HelpCLI(self._parser)(global_options, ["copy"]) + return 1 + + # read the manifests + # TODO: should probably ensure these exist here + manifests = ManifestParser() + manifests.read(args[0]) + + # print the resultant query + manifests.copy(args[1], None, *tags, **kwargs) + return 0 + + +class CreateCLI(CLICommand): + """ + create a manifest from a list of directories + """ + + usage = "%prog [options] create directory <...>" + + def parser(self): + parser = CLICommand.parser(self) + parser.add_option( + "-p", "--pattern", dest="pattern", help="glob pattern for files" + ) + parser.add_option( + "-i", + "--ignore", + dest="ignore", + default=[], + action="append", + help="directories to ignore", + ) + parser.add_option( + "-w", + "--in-place", + dest="in_place", + help="Write .ini files in place; filename to write to", + ) + return parser + + def __call__(self, global_options, args): + parser = self.parser() + options, args = parser.parse_args(args) + + # need some directories + if not len(args): + self.logger.error("missing arguments: directory ...") + parser.print_usage() + return 1 + + # add the directories to the manifest + for arg in args: + assert os.path.exists(arg) + assert os.path.isdir(arg) + manifest = convert( + args, + pattern=options.pattern, + ignore=options.ignore, + write=options.in_place, + ) + if manifest: + print(manifest) + return 0 + + +class HelpCLI(CLICommand): + """ + get help on a command + """ + + usage = "%prog [options] help [command]" + + def __call__(self, global_options, args): + if len(args) == 1 and args[0] in commands: + commands[args[0]](self._parser).parser().print_help() + else: + self._parser.print_help() + print("\nCommands:") + for command in sorted(commands): + print(" %s : %s" % (command, commands[command].__doc__.strip())) + + +class UpdateCLI(CLICommand): + """ + update the tests as listed in a manifest from a directory + """ + + usage = "%prog [options] update manifest directory -tag1 -tag2 --key1=value1 --key2=value2 ..." + + def __call__(self, options, args): + # parse the arguments + try: + kwargs, tags, args = parse_args(args) + except ParserError as e: + self._parser.error(str(e)) + + # make sure we have some manifests, otherwise it will + # be quite boring + if not len(args) == 2: + self.logger.error("missing arguments: manifest directory") + HelpCLI(self._parser)(options, ["update"]) + return 1 + + # read the manifests + # TODO: should probably ensure these exist here + manifests = ManifestParser() + manifests.read(args[0]) + + # print the resultant query + manifests.update(args[1], None, *tags, **kwargs) + return 0 + + +class WriteCLI(CLICommand): + """ + write a manifest based on a query + """ + + usage = "%prog [options] write manifest -tag1 -tag2 --key1=value1 --key2=value2 ..." + + def __call__(self, options, args): + # parse the arguments + try: + kwargs, tags, args = parse_args(args) + except ParserError as e: + self._parser.error(str(e)) + + # make sure we have some manifests, otherwise it will + # be quite boring + if not args: + self.logger.error("missing arguments: manifest ...") + HelpCLI(self._parser)(options, ["write"]) + return 1 + + # read the manifests + # TODO: should probably ensure these exist here + manifests = ManifestParser() + manifests.read(*args) + + # print the resultant query + manifests.write(global_tags=tags, global_kwargs=kwargs) + return 0 + + +# command -> class mapping +commands = { + "copy": CopyCLI, + "create": CreateCLI, + "help": HelpCLI, + "update": UpdateCLI, + "write": WriteCLI, +} + + +def main(args=sys.argv[1:]): + """console_script entry point""" + + # set up an option parser + usage = "%prog [options] [command] ..." + description = "%s. Use `help` to display commands" % __doc__.strip() + parser = OptionParser(usage=usage, description=description) + parser.add_option( + "-s", + "--strict", + dest="strict", + action="store_true", + default=False, + help="adhere strictly to errors", + ) + parser.disable_interspersed_args() + + global_options, args = parser.parse_args(args) + + if not args: + HelpCLI(parser)(global_options, args) + parser.exit() + + # get the command + command = args[0] + if command not in commands: + parser.error( + "Command must be one of %s (you gave '%s')" + % (", ".join(sorted(commands.keys())), command) + ) + return 1 + + handler = commands[command](parser) + return handler(global_options, args[1:]) + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/testing/mozbase/manifestparser/manifestparser/expression.py b/testing/mozbase/manifestparser/manifestparser/expression.py new file mode 100644 index 0000000000..bea29b1f0c --- /dev/null +++ b/testing/mozbase/manifestparser/manifestparser/expression.py @@ -0,0 +1,324 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +import re +import sys +import traceback + +__all__ = ["parse", "ParseError", "ExpressionParser"] + +# expr.py +# from: +# http://k0s.org/mozilla/hg/expressionparser +# http://hg.mozilla.org/users/tmielczarek_mozilla.com/expressionparser + +# Implements a top-down parser/evaluator for simple boolean expressions. +# ideas taken from http://effbot.org/zone/simple-top-down-parsing.htm +# +# Rough grammar: +# expr := literal +# | '(' expr ')' +# | expr '&&' expr +# | expr '||' expr +# | expr '==' expr +# | expr '!=' expr +# | expr '<' expr +# | expr '>' expr +# | expr '<=' expr +# | expr '>=' expr +# literal := BOOL +# | INT +# | STRING +# | IDENT +# BOOL := true|false +# INT := [0-9]+ +# STRING := "[^"]*" +# IDENT := [A-Za-z_]\w* + +# Identifiers take their values from a mapping dictionary passed as the second +# argument. + +# Glossary (see above URL for details): +# - nud: null denotation +# - led: left detonation +# - lbp: left binding power +# - rbp: right binding power + + +class ident_token(object): + def __init__(self, scanner, value): + self.value = value + + def nud(self, parser): + # identifiers take their value from the value mappings passed + # to the parser + return parser.value(self.value) + + +class literal_token(object): + def __init__(self, scanner, value): + self.value = value + + def nud(self, parser): + return self.value + + +class eq_op_token(object): + "==" + + def led(self, parser, left): + return left == parser.expression(self.lbp) + + +class neq_op_token(object): + "!=" + + def led(self, parser, left): + return left != parser.expression(self.lbp) + + +class lt_op_token(object): + "<" + + def led(self, parser, left): + return left < parser.expression(self.lbp) + + +class gt_op_token(object): + ">" + + def led(self, parser, left): + return left > parser.expression(self.lbp) + + +class le_op_token(object): + "<=" + + def led(self, parser, left): + return left <= parser.expression(self.lbp) + + +class ge_op_token(object): + ">=" + + def led(self, parser, left): + return left >= parser.expression(self.lbp) + + +class not_op_token(object): + "!" + + def nud(self, parser): + return not parser.expression(100) + + +class and_op_token(object): + "&&" + + def led(self, parser, left): + right = parser.expression(self.lbp) + return left and right + + +class or_op_token(object): + "||" + + def led(self, parser, left): + right = parser.expression(self.lbp) + return left or right + + +class lparen_token(object): + "(" + + def nud(self, parser): + expr = parser.expression() + parser.advance(rparen_token) + return expr + + +class rparen_token(object): + ")" + + +class end_token(object): + """always ends parsing""" + + +# derived literal tokens + + +class bool_token(literal_token): + def __init__(self, scanner, value): + value = {"true": True, "false": False}[value] + literal_token.__init__(self, scanner, value) + + +class int_token(literal_token): + def __init__(self, scanner, value): + literal_token.__init__(self, scanner, int(value)) + + +class string_token(literal_token): + def __init__(self, scanner, value): + literal_token.__init__(self, scanner, value[1:-1]) + + +precedence = [ + (end_token, rparen_token), + (or_op_token,), + (and_op_token,), + (lt_op_token, gt_op_token, le_op_token, ge_op_token, eq_op_token, neq_op_token), + (lparen_token,), +] +for index, rank in enumerate(precedence): + for token in rank: + token.lbp = index # lbp = lowest left binding power + + +class ParseError(Exception): + """error parsing conditional expression""" + + +class ExpressionParser(object): + r""" + A parser for a simple expression language. + + The expression language can be described as follows:: + + EXPRESSION ::= LITERAL | '(' EXPRESSION ')' | '!' EXPRESSION | EXPRESSION OP EXPRESSION + OP ::= '==' | '!=' | '<' | '>' | '<=' | '>=' | '&&' | '||' + LITERAL ::= BOOL | INT | IDENT | STRING + BOOL ::= 'true' | 'false' + INT ::= [0-9]+ + IDENT ::= [a-zA-Z_]\w* + STRING ::= '"' [^\"] '"' | ''' [^\'] ''' + + At its core, expressions consist of booleans, integers, identifiers and. + strings. Booleans are one of *true* or *false*. Integers are a series + of digits. Identifiers are a series of English letters and underscores. + Strings are a pair of matching quote characters (single or double) with + zero or more characters inside. + + Expressions can be combined with operators: the equals (==) and not + equals (!=) operators compare two expressions and produce a boolean. The + and (&&) and or (||) operators take two expressions and produce the logical + AND or OR value of them, respectively. An expression can also be prefixed + with the not (!) operator, which produces its logical negation. + + Finally, any expression may be contained within parentheses for grouping. + + Identifiers take their values from the mapping provided. + """ + + scanner = None + + def __init__(self, text, valuemapping, strict=False): + """ + Initialize the parser + :param text: The expression to parse as a string. + :param valuemapping: A dict mapping identifier names to values. + :param strict: If true, referencing an identifier that was not + provided in :valuemapping: will raise an error. + """ + self.text = text + self.valuemapping = valuemapping + self.strict = strict + + def _tokenize(self): + """ + Lex the input text into tokens and yield them in sequence. + """ + if not ExpressionParser.scanner: + ExpressionParser.scanner = re.Scanner( + [ + # Note: keep these in sync with the class docstring above. + (r"true|false", bool_token), + (r"[a-zA-Z_]\w*", ident_token), + (r"[0-9]+", int_token), + (r'("[^"]*")|(\'[^\']*\')', string_token), + (r"==", eq_op_token()), + (r"!=", neq_op_token()), + (r"<=", le_op_token()), + (r">=", ge_op_token()), + (r"<", lt_op_token()), + (r">", gt_op_token()), + (r"\|\|", or_op_token()), + (r"!", not_op_token()), + (r"&&", and_op_token()), + (r"\(", lparen_token()), + (r"\)", rparen_token()), + (r"\s+", None), # skip whitespace + ] + ) + tokens, remainder = ExpressionParser.scanner.scan(self.text) + for t in tokens: + yield t + yield end_token() + + def value(self, ident): + """ + Look up the value of |ident| in the value mapping passed in the + constructor. + """ + if self.strict: + return self.valuemapping[ident] + else: + return self.valuemapping.get(ident, "") + + def advance(self, expected): + """ + Assert that the next token is an instance of |expected|, and advance + to the next token. + """ + if not isinstance(self.token, expected): + raise Exception("Unexpected token!") + self.token = next(self.iter) + + def expression(self, rbp=0): + """ + Parse and return the value of an expression until a token with + right binding power greater than rbp is encountered. + """ + t = self.token + self.token = next(self.iter) + left = t.nud(self) + while rbp < self.token.lbp: + t = self.token + self.token = next(self.iter) + left = t.led(self, left) + return left + + def parse(self): + """ + Parse and return the value of the expression in the text + passed to the constructor. Raises a ParseError if the expression + could not be parsed. + """ + try: + self.iter = self._tokenize() + self.token = next(self.iter) + return self.expression() + except Exception: + extype, ex, tb = sys.exc_info() + formatted = "".join(traceback.format_exception_only(extype, ex)) + pe = ParseError( + "could not parse: %s\nexception: %svariables: %s" + % (self.text, formatted, self.valuemapping) + ) + raise pe.with_traceback(tb) + + __call__ = parse + + +def parse(text, **values): + """ + Parse and evaluate a boolean expression. + :param text: The expression to parse, as a string. + :param values: A dict containing a name to value mapping for identifiers + referenced in *text*. + :rtype: the final value of the expression. + :raises: :py:exc::ParseError: will be raised if parsing fails. + """ + return ExpressionParser(text, values).parse() diff --git a/testing/mozbase/manifestparser/manifestparser/filters.py b/testing/mozbase/manifestparser/manifestparser/filters.py new file mode 100644 index 0000000000..3191f00cc9 --- /dev/null +++ b/testing/mozbase/manifestparser/manifestparser/filters.py @@ -0,0 +1,557 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +""" +A filter is a callable that accepts an iterable of test objects and a +dictionary of values, and returns a new iterable of test objects. It is +possible to define custom filters if the built-in ones are not enough. +""" + +import itertools +import os +from collections import defaultdict +from collections.abc import MutableSequence + +from .expression import ParseError, parse +from .logger import Logger +from .util import normsep + +# built-in filters + + +def _match(exprs, **values): + if any(parse(e, **values) for e in exprs.splitlines() if e): + return True + return False + + +def skip_if(tests, values): + """ + Sets disabled on all tests containing the `skip-if` tag and whose condition + is True. This filter is added by default. + """ + tag = "skip-if" + for test in tests: + if tag in test and _match(test[tag], **values): + test.setdefault("disabled", "{}: {}".format(tag, test[tag])) + yield test + + +def run_if(tests, values): + """ + Sets disabled on all tests containing the `run-if` tag and whose condition + is False. This filter is added by default. + """ + tag = "run-if" + for test in tests: + if tag in test and not _match(test[tag], **values): + test.setdefault("disabled", "{}: {}".format(tag, test[tag])) + yield test + + +def fail_if(tests, values): + """ + Sets expected to 'fail' on all tests containing the `fail-if` tag and whose + condition is True. This filter is added by default. + """ + tag = "fail-if" + for test in tests: + if tag in test and _match(test[tag], **values): + test["expected"] = "fail" + yield test + + +def enabled(tests, values): + """ + Removes all tests containing the `disabled` key. This filter can be + added by passing `disabled=False` into `active_tests`. + """ + for test in tests: + if "disabled" not in test: + yield test + + +def exists(tests, values): + """ + Removes all tests that do not exist on the file system. This filter is + added by default, but can be removed by passing `exists=False` into + `active_tests`. + """ + for test in tests: + if os.path.exists(test["path"]): + yield test + + +# built-in instance filters + + +class InstanceFilter(object): + """ + Generally only one instance of a class filter should be applied at a time. + Two instances of `InstanceFilter` are considered equal if they have the + same class name. This ensures only a single instance is ever added to + `filterlist`. This class also formats filters' __str__ method for easier + debugging. + """ + + unique = True + + __hash__ = super.__hash__ + + def __init__(self, *args, **kwargs): + self.fmt_args = ", ".join( + itertools.chain( + [str(a) for a in args], + ["{}={}".format(k, v) for k, v in kwargs.items()], + ) + ) + + def __eq__(self, other): + if self.unique: + return self.__class__ == other.__class__ + return self.__hash__() == other.__hash__() + + def __str__(self): + return "{}({})".format(self.__class__.__name__, self.fmt_args) + + +class subsuite(InstanceFilter): + """ + If `name` is None, removes all tests that have a `subsuite` key. + Otherwise removes all tests that do not have a subsuite matching `name`. + + It is possible to specify conditional subsuite keys using: + subsuite = foo,condition + + where 'foo' is the subsuite name, and 'condition' is the same type of + condition used for skip-if. If the condition doesn't evaluate to true, + the subsuite designation will be removed from the test. + + :param name: The name of the subsuite to run (default None) + """ + + def __init__(self, name=None): + InstanceFilter.__init__(self, name=name) + self.name = name + + def __call__(self, tests, values): + # Look for conditional subsuites, and replace them with the subsuite + # itself (if the condition is true), or nothing. + for test in tests: + subsuite = test.get("subsuite", "") + if "," in subsuite: + try: + subsuite, cond = subsuite.split(",") + except ValueError: + raise ParseError("subsuite condition can't contain commas") + matched = parse(cond, **values) + if matched: + test["subsuite"] = subsuite + else: + test["subsuite"] = "" + + # Filter on current subsuite + if self.name is None: + if not test.get("subsuite"): + yield test + elif test.get("subsuite", "") == self.name: + yield test + + +class chunk_by_slice(InstanceFilter): + """ + Basic chunking algorithm that splits tests evenly across total chunks. + + :param this_chunk: the current chunk, 1 <= this_chunk <= total_chunks + :param total_chunks: the total number of chunks + :param disabled: Whether to include disabled tests in the chunking + algorithm. If False, each chunk contains an equal number + of non-disabled tests. If True, each chunk contains an + equal number of tests (default False) + """ + + def __init__(self, this_chunk, total_chunks, disabled=False): + assert 1 <= this_chunk <= total_chunks + InstanceFilter.__init__(self, this_chunk, total_chunks, disabled=disabled) + self.this_chunk = this_chunk + self.total_chunks = total_chunks + self.disabled = disabled + + def __call__(self, tests, values): + tests = list(tests) + if self.disabled: + chunk_tests = tests[:] + else: + chunk_tests = [t for t in tests if "disabled" not in t] + + tests_per_chunk = float(len(chunk_tests)) / self.total_chunks + # pylint: disable=W1633 + start = int(round((self.this_chunk - 1) * tests_per_chunk)) + end = int(round(self.this_chunk * tests_per_chunk)) + + if not self.disabled: + # map start and end back onto original list of tests. Disabled + # tests will still be included in the returned list, but each + # chunk will contain an equal number of enabled tests. + if self.this_chunk == 1: + start = 0 + elif start < len(chunk_tests): + start = tests.index(chunk_tests[start]) + + if self.this_chunk == self.total_chunks: + end = len(tests) + elif end < len(chunk_tests): + end = tests.index(chunk_tests[end]) + return (t for t in tests[start:end]) + + +class chunk_by_dir(InstanceFilter): + """ + Basic chunking algorithm that splits directories of tests evenly at a + given depth. + + For example, a depth of 2 means all test directories two path nodes away + from the base are gathered, then split evenly across the total number of + chunks. The number of tests in each of the directories is not taken into + account (so chunks will not contain an even number of tests). All test + paths must be relative to the same root (typically the root of the source + repository). + + :param this_chunk: the current chunk, 1 <= this_chunk <= total_chunks + :param total_chunks: the total number of chunks + :param depth: the minimum depth of a subdirectory before it will be + considered unique + """ + + def __init__(self, this_chunk, total_chunks, depth): + InstanceFilter.__init__(self, this_chunk, total_chunks, depth) + self.this_chunk = this_chunk + self.total_chunks = total_chunks + self.depth = depth + + def __call__(self, tests, values): + tests_by_dir = defaultdict(list) + ordered_dirs = [] + for test in tests: + path = test["relpath"] + + if path.startswith(os.sep): + path = path[1:] + + dirs = path.split(os.sep) + dirs = dirs[: min(self.depth, len(dirs) - 1)] + path = os.sep.join(dirs) + + # don't count directories that only have disabled tests in them, + # but still yield disabled tests that are alongside enabled tests + if path not in ordered_dirs and "disabled" not in test: + ordered_dirs.append(path) + tests_by_dir[path].append(test) + + # pylint: disable=W1633 + tests_per_chunk = float(len(ordered_dirs)) / self.total_chunks + start = int(round((self.this_chunk - 1) * tests_per_chunk)) + end = int(round(self.this_chunk * tests_per_chunk)) + + for i in range(start, end): + for test in tests_by_dir.pop(ordered_dirs[i]): + yield test + + # find directories that only contain disabled tests. They still need to + # be yielded for reporting purposes. Put them all in chunk 1 for + # simplicity. + if self.this_chunk == 1: + disabled_dirs = [ + v for k, v in tests_by_dir.items() if k not in ordered_dirs + ] + for disabled_test in itertools.chain(*disabled_dirs): + yield disabled_test + + +class chunk_by_manifest(InstanceFilter): + """ + Chunking algorithm that tries to evenly distribute tests while ensuring + tests in the same manifest stay together. + + :param this_chunk: the current chunk, 1 <= this_chunk <= total_chunks + :param total_chunks: the total number of chunks + """ + + def __init__(self, this_chunk, total_chunks, *args, **kwargs): + InstanceFilter.__init__(self, this_chunk, total_chunks, *args, **kwargs) + self.this_chunk = this_chunk + self.total_chunks = total_chunks + + def __call__(self, tests, values): + tests = list(tests) + manifests = set(t["manifest"] for t in tests) + + tests_by_manifest = [] + for manifest in manifests: + mtests = [t for t in tests if t["manifest"] == manifest] + tests_by_manifest.append(mtests) + # Sort tests_by_manifest from largest manifest to shortest; include + # manifest name as secondary key to ensure consistent order across + # multiple runs. + tests_by_manifest.sort(reverse=True, key=lambda x: (len(x), x[0]["manifest"])) + + tests_by_chunk = [[] for i in range(self.total_chunks)] + for batch in tests_by_manifest: + # Sort to guarantee the chunk with the lowest score will always + # get the next batch of tests. + tests_by_chunk.sort( + key=lambda x: (len(x), x[0]["manifest"] if len(x) else "") + ) + tests_by_chunk[0].extend(batch) + + return (t for t in tests_by_chunk[self.this_chunk - 1]) + + +class chunk_by_runtime(InstanceFilter): + """ + Chunking algorithm that attempts to group tests into chunks based on their + average runtimes. It keeps manifests of tests together and pairs slow + running manifests with fast ones. + + :param this_chunk: the current chunk, 1 <= this_chunk <= total_chunks + :param total_chunks: the total number of chunks + :param runtimes: dictionary of manifest runtime data, of the form + {: } + """ + + def __init__(self, this_chunk, total_chunks, runtimes): + InstanceFilter.__init__(self, this_chunk, total_chunks, runtimes) + self.this_chunk = this_chunk + self.total_chunks = total_chunks + self.runtimes = {normsep(m): r for m, r in runtimes.items()} + self.logger = Logger() + + @classmethod + def get_manifest(cls, test): + manifest = normsep(test.get("ancestor_manifest", "")) + + # Ignore ancestor_manifests that live at the root (e.g, don't have a + # path separator). The only time this should happen is when they are + # generated by the build system and we shouldn't count generated + # manifests for chunking purposes. + if not manifest or "/" not in manifest: + manifest = normsep(test["manifest_relpath"]) + return manifest + + def get_chunked_manifests(self, manifests): + # Find runtimes for all relevant manifests. + runtimes = [(self.runtimes[m], m) for m in manifests if m in self.runtimes] + + # Compute the average to use as a default for manifests that don't exist. + times = [r[0] for r in runtimes] + # pylint --py3k W1619 + # pylint: disable=W1633 + avg = round(sum(times) / len(times), 2) if times else 0 + missing = sorted([m for m in manifests if m not in self.runtimes]) + self.logger.debug( + "Applying average runtime of {}s to the following missing manifests:\n{}".format( + avg, " " + "\n ".join(missing) + ) + ) + runtimes.extend([(avg, m) for m in missing]) + + # Each chunk is of the form [, ]. + chunks = [[0, []] for i in range(self.total_chunks)] + + # Sort runtimes from slowest -> fastest. + for runtime, manifest in sorted(runtimes, reverse=True): + # Sort chunks from fastest -> slowest. This guarantees the fastest + # chunk will be assigned the slowest remaining manifest. + chunks.sort(key=lambda x: (x[0], len(x[1]), x[1])) + chunks[0][0] += runtime + chunks[0][1].append(manifest) + + # Sort one last time so we typically get chunks ordered from fastest to + # slowest. + chunks.sort(key=lambda x: (x[0], len(x[1]))) + return chunks + + def __call__(self, tests, values): + tests = list(tests) + manifests = set(self.get_manifest(t) for t in tests) + chunks = self.get_chunked_manifests(manifests) + runtime, this_manifests = chunks[self.this_chunk - 1] + # pylint --py3k W1619 + # pylint: disable=W1633 + self.logger.debug( + "Cumulative test runtime is around {} minutes (average is {} minutes)".format( + round(runtime / 60), + round(sum([c[0] for c in chunks]) / (60 * len(chunks))), + ) + ) + return (t for t in tests if self.get_manifest(t) in this_manifests) + + +class tags(InstanceFilter): + """ + Removes tests that don't contain any of the given tags. This overrides + InstanceFilter's __eq__ method, so multiple instances can be added. + Multiple tag filters is equivalent to joining tags with the AND operator. + + To define a tag in a manifest, add a `tags` attribute to a test or DEFAULT + section. Tests can have multiple tags, in which case they should be + whitespace delimited. For example: + + .. code-block:: toml + + ['test_foobar.html'] + tags = 'foo bar' + + :param tags: A tag or list of tags to filter tests on + """ + + unique = False + + def __init__(self, tags): + InstanceFilter.__init__(self, tags) + if isinstance(tags, str): + tags = [tags] + self.tags = tags + + def __call__(self, tests, values): + for test in tests: + if "tags" not in test: + continue + + test_tags = [t.strip() for t in test["tags"].split()] + if any(t in self.tags for t in test_tags): + yield test + + +class failures(InstanceFilter): + """ + .. code-block:: toml + + ['test_fooar.html'] + fail-if = [ + "keyword", # + ] + + :param keywords: A keyword to filter tests on + """ + + def __init__(self, keyword): + InstanceFilter.__init__(self, keyword) + self.keyword = keyword.strip('"') + + def __call__(self, tests, values): + for test in tests: + for key in ["skip-if", "fail-if"]: + if key not in test: + continue + + matched = [ + self.keyword in e and parse(e, **values) + for e in test[key].splitlines() + if e + ] + if any(matched): + test["expected"] = "fail" + yield test + + +class pathprefix(InstanceFilter): + """ + Removes tests that don't start with any of the given test paths. + + :param paths: A list of test paths (or manifests) to filter on + """ + + def __init__(self, paths): + InstanceFilter.__init__(self, paths) + if isinstance(paths, str): + paths = [paths] + self.paths = paths + self.missing = set() + + def __call__(self, tests, values): + seen = set() + for test in tests: + for testpath in self.paths: + tp = os.path.normpath(testpath) + + if tp.endswith(".ini") or tp.endswith(".toml"): + mpaths = [test["manifest_relpath"]] + if "ancestor_manifest" in test: + mpaths.append(test["ancestor_manifest"]) + + if os.path.isabs(tp): + root = test["manifest"][: -len(test["manifest_relpath"]) - 1] + mpaths = [os.path.join(root, m) for m in mpaths] + + # only return tests that are in this manifest + if not any(os.path.normpath(m) == tp for m in mpaths): + continue + else: + # only return tests that start with this path + path = test["relpath"] + if os.path.isabs(tp): + path = test["path"] + + if not os.path.normpath(path).startswith(tp): + continue + + # any test path that points to a single file will be run no + # matter what, even if it's disabled + if "disabled" in test and os.path.normpath(test["relpath"]) == tp: + del test["disabled"] + + seen.add(tp) + yield test + break + + self.missing = set(self.paths) - seen + + +# filter container + +DEFAULT_FILTERS = ( + skip_if, + run_if, + fail_if, +) +""" +By default :func:`~.active_tests` will run the :func:`~.skip_if`, +:func:`~.run_if` and :func:`~.fail_if` filters. +""" + + +class filterlist(MutableSequence): + """ + A MutableSequence that raises TypeError when adding a non-callable and + ValueError if the item is already added. + """ + + def __init__(self, items=None): + self.items = [] + if items: + self.items = list(items) + + def _validate(self, item): + if not callable(item): + raise TypeError("Filters must be callable!") + if item in self: + raise ValueError("Filter {} is already applied!".format(item)) + + def __getitem__(self, key): + return self.items[key] + + def __setitem__(self, key, value): + self._validate(value) + self.items[key] = value + + def __delitem__(self, key): + del self.items[key] + + def __len__(self): + return len(self.items) + + def insert(self, index, value): + self._validate(value) + self.items.insert(index, value) diff --git a/testing/mozbase/manifestparser/manifestparser/ini.py b/testing/mozbase/manifestparser/manifestparser/ini.py new file mode 100644 index 0000000000..b5ffe7a2f0 --- /dev/null +++ b/testing/mozbase/manifestparser/manifestparser/ini.py @@ -0,0 +1,208 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +import io +import os +import sys + +__all__ = ["read_ini", "combine_fields"] + + +class IniParseError(Exception): + def __init__(self, fp, linenum, msg): + if isinstance(fp, str): + path = fp + elif hasattr(fp, "name"): + path = fp.name + else: + path = getattr(fp, "path", "unknown") + msg = "Error parsing manifest file '{}', line {}: {}".format(path, linenum, msg) + super(IniParseError, self).__init__(msg) + + +def read_ini( + fp, + defaults=None, + default="DEFAULT", + comments=None, + separators=None, + strict=True, + handle_defaults=True, + document=False, +): + """ + read an .ini file and return a list of [(section, values)] + - fp : file pointer or path to read + - defaults : default set of variables + - default : name of the section for the default section + - comments : characters that if they start a line denote a comment + - separators : strings that denote key, value separation in order + - strict : whether to be strict about parsing + - handle_defaults : whether to incorporate defaults into each section + """ + + # variables + defaults = defaults or {} + default_section = {} + comments = comments or ("#",) + separators = separators or ("=", ":") + sections = [] + key = value = None + section_names = set() + if isinstance(fp, str): + fp = io.open(fp, encoding="utf-8") + + # read the lines + section = default + current_section = {} + current_section_name = "" + key_indent = 0 + for linenum, line in enumerate(fp.read().splitlines(), start=1): + stripped = line.strip() + + # ignore blank lines + if not stripped: + # reset key and value to avoid continuation lines + key = value = None + continue + + # ignore comment lines + if any(stripped.startswith(c) for c in comments): + continue + + # strip inline comments (borrowed from configparser) + comment_start = sys.maxsize + inline_prefixes = {p: -1 for p in comments} + while comment_start == sys.maxsize and inline_prefixes: + next_prefixes = {} + for prefix, i in inline_prefixes.items(): + index = stripped.find(prefix, i + 1) + if index == -1: + continue + next_prefixes[prefix] = index + if index == 0 or (index > 0 and stripped[index - 1].isspace()): + comment_start = min(comment_start, index) + inline_prefixes = next_prefixes + + if comment_start != sys.maxsize: + stripped = stripped[:comment_start].rstrip() + + # check for a new section + if len(stripped) > 2 and stripped[0] == "[" and stripped[-1] == "]": + section = stripped[1:-1].strip() + key = value = None + key_indent = 0 + + # deal with DEFAULT section + if section.lower() == default.lower(): + if strict: + assert default not in section_names + section_names.add(default) + current_section = default_section + current_section_name = "DEFAULT" + continue + + if strict: + # make sure this section doesn't already exist + assert ( + section not in section_names + ), "Section '%s' already found in '%s'" % (section, section_names) + + section_names.add(section) + current_section = {} + current_section_name = section + sections.append((section, current_section)) + continue + + # if there aren't any sections yet, something bad happen + if not section_names: + raise IniParseError( + fp, + linenum, + "Expected a comment or section, " "instead found '{}'".format(stripped), + ) + + # continuation line ? + line_indent = len(line) - len(line.lstrip(" ")) + if key and line_indent > key_indent: + value = "%s%s%s" % (value, os.linesep, stripped) + if strict: + # make sure the value doesn't contain assignments + if " = " in value: + raise IniParseError( + fp, + linenum, + "Should not assign in {} condition for {}".format( + key, current_section_name + ), + ) + current_section[key] = value + continue + + # (key, value) pair + for separator in separators: + if separator in stripped: + key, value = stripped.split(separator, 1) + key = key.strip() + value = value.strip() + key_indent = line_indent + + # make sure this key isn't already in the section + if key: + assert ( + key not in current_section + ), f"Found duplicate key {key} in section {section}" + + if strict: + # make sure this key isn't empty + assert key + # make sure the value doesn't contain assignments + if " = " in value: + raise IniParseError( + fp, + linenum, + "Should not assign in {} condition for {}".format( + key, current_section_name + ), + ) + + current_section[key] = value + break + else: + # something bad happened! + raise IniParseError(fp, linenum, "Unexpected line '{}'".format(stripped)) + + # merge global defaults with the DEFAULT section + defaults = combine_fields(defaults, default_section) + if handle_defaults: + # merge combined defaults into each section + sections = [(i, combine_fields(defaults, j)) for i, j in sections] + return sections, defaults, None + + +def combine_fields(global_vars, local_vars): + """ + Combine the given manifest entries according to the semantics of specific fields. + This is used to combine manifest level defaults with a per-test definition. + """ + if not global_vars: + return local_vars + if not local_vars: + return global_vars.copy() + field_patterns = { + "args": "%s %s", + "prefs": "%s %s", + "skip-if": "%s\n%s", # consider implicit logical OR: "%s ||\n%s" + "support-files": "%s %s", + } + final_mapping = global_vars.copy() + for field_name, value in local_vars.items(): + if field_name not in field_patterns or field_name not in global_vars: + final_mapping[field_name] = value + continue + global_value = global_vars[field_name] + pattern = field_patterns[field_name] + final_mapping[field_name] = pattern % (global_value, value) + + return final_mapping diff --git a/testing/mozbase/manifestparser/manifestparser/logger.py b/testing/mozbase/manifestparser/manifestparser/logger.py new file mode 100644 index 0000000000..807f959098 --- /dev/null +++ b/testing/mozbase/manifestparser/manifestparser/logger.py @@ -0,0 +1,76 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +import os + + +class Logger(object): + """ + ManifestParser needs to ensure a singleton for mozlog as documented here: + + https://firefox-source-docs.mozilla.org/mozbase/mozlog.html#mozlog-structured-logging-for-test-output + + Logging is threadsafe, with access to handlers protected by a + threading.Lock. However it is not process-safe. This means that + applications using multiple processes, e.g. via the multiprocessing + module, should arrange for all logging to happen in a single process. + + The test: + `testing/mochitest/tests/python/test_mochitest_integration.py::test_output_testfile_in_dupe_manifests` + creates two ManifestParser instances and runs them at the same + tripping over the condition (above) resulting in this exception: + + [task 2023-08-02T17:16:41.636Z] File "/builds/worker/checkouts/gecko/testing/mozbase/mozlog/mozlog/handlers/base.py", line 113, in __call__ + [task 2023-08-02T17:16:41.636Z] self.stream.write(formatted) + [task 2023-08-02T17:16:41.636Z] ValueError: I/O operation on closed file + """ + + logger = None + CI = False # True if we are running in CI + + def __init__(self): + "Lazily will create an instance of mozlog" + pass + + def _initialize(self): + "Creates an instance of mozlog, if needed" + if "TASK_ID" in os.environ: + Logger.CI = True # We are running in CI + else: + Logger.CI = False + if Logger.logger is None: + component = "manifestparser" + import mozlog + + Logger.logger = mozlog.get_default_logger(component) + if Logger.logger is None: + Logger.logger = mozlog.unstructured.getLogger(component) + + def critical(self, *args, **kwargs): + self._initialize() + Logger.logger.critical(*args, **kwargs) + + def debug(self, *args, **kwargs): + self._initialize() + Logger.logger.debug(*args, **kwargs) + + def debug_ci(self, *args, **kwargs): + "Log to INFO level in CI else DEBUG level" + self._initialize() + if Logger.CI: + Logger.logger.info(*args, **kwargs) + else: + Logger.logger.debug(*args, **kwargs) + + def error(self, *args, **kwargs): + self._initialize() + Logger.logger.error(*args, **kwargs) + + def info(self, *args, **kwargs): + self._initialize() + Logger.logger.info(*args, **kwargs) + + def warning(self, *args, **kwargs): + self._initialize() + Logger.logger.warning(*args, **kwargs) diff --git a/testing/mozbase/manifestparser/manifestparser/manifestparser.py b/testing/mozbase/manifestparser/manifestparser/manifestparser.py new file mode 100644 index 0000000000..63eaeefe05 --- /dev/null +++ b/testing/mozbase/manifestparser/manifestparser/manifestparser.py @@ -0,0 +1,938 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +import codecs +import fnmatch +import io +import json +import os +import shutil +import sys +import types +from io import StringIO + +from .filters import DEFAULT_FILTERS, enabled, filterlist +from .filters import exists as _exists +from .ini import read_ini +from .logger import Logger +from .toml import read_toml + +__all__ = ["ManifestParser", "TestManifest", "convert"] + +relpath = os.path.relpath + + +# path normalization + + +def normalize_path(path): + """normalize a relative path""" + if sys.platform.startswith("win"): + return path.replace("/", os.path.sep) + return path + + +def denormalize_path(path): + """denormalize a relative path""" + if sys.platform.startswith("win"): + return path.replace(os.path.sep, "/") + return path + + +# objects for parsing manifests + + +class ManifestParser(object): + """read .ini manifests""" + + def __init__( + self, + manifests=(), + defaults=None, + strict=True, + rootdir=None, + finder=None, + handle_defaults=True, + use_toml=True, + document=False, + ): + """Creates a ManifestParser from the given manifest files. + + :param manifests: An iterable of file paths or file objects corresponding + to manifests. If a file path refers to a manifest file that + does not exist, an IOError is raised. + :param defaults: Variables to pre-define in the environment for evaluating + expressions in manifests. + :param strict: If False, the provided manifests may contain references to + listed (test) files that do not exist without raising an + IOError during reading, and certain errors in manifests + are not considered fatal. Those errors include duplicate + section names, redefining variables, and defining empty + variables. + :param rootdir: The directory used as the basis for conversion to and from + relative paths during manifest reading. + :param finder: If provided, this finder object will be used for filesystem + interactions. Finder objects are part of the mozpack package, + documented at + http://firefox-source-docs.mozilla.org/python/mozpack.html#module-mozpack.files + :param handle_defaults: If not set, do not propagate manifest defaults to individual + test objects. Callers are expected to manage per-manifest + defaults themselves via the manifest_defaults member + variable in this case. + :param use_toml: If True *.toml configration files will be used iff present in the same location as *.ini files (applies to included files as well). If False only *.ini files will be considered. (defaults to True) + :param document: If True *.toml configration will preserve the parsed document from `tomlkit` in self.source_documents[filename] (defaults to False) + """ + self._defaults = defaults or {} + self.tests = [] + self.manifest_defaults = {} + self.source_files = set() + self.source_documents = {} # source document for each filename (optional) + self.strict = strict + self.rootdir = rootdir + self._root = None + self.finder = finder + self._handle_defaults = handle_defaults + self.use_toml = use_toml + self.document = document + self.logger = Logger() + if manifests: + self.read(*manifests) + + def path_exists(self, path): + if self.finder: + return self.finder.get(path) is not None + return os.path.exists(path) + + @property + def root(self): + if not self._root: + if self.rootdir is None: + self._root = "" + else: + assert os.path.isabs(self.rootdir) + self._root = self.rootdir + os.path.sep + return self._root + + def relative_to_root(self, path): + # Microoptimization, because relpath is quite expensive. + # We know that rootdir is an absolute path or empty. If path + # starts with rootdir, then path is also absolute and the tail + # of the path is the relative path (possibly non-normalized, + # when here is unknown). + # For this to work rootdir needs to be terminated with a path + # separator, so that references to sibling directories with + # a common prefix don't get misscomputed (e.g. /root and + # /rootbeer/file). + # When the rootdir is unknown, the relpath needs to be left + # unchanged. We use an empty string as rootdir in that case, + # which leaves relpath unchanged after slicing. + if path.startswith(self.root): + return path[len(self.root) :] + else: + return relpath(path, self.root) + + # methods for reading manifests + def _get_fp_filename(self, filename): + # get directory of this file if not file-like object + if isinstance(filename, str): + # If we're using mercurial as our filesystem via a finder + # during manifest reading, the getcwd() calls that happen + # with abspath calls will not be meaningful, so absolute + # paths are required. + if self.finder: + assert os.path.isabs(filename) + filename = os.path.abspath(filename) + if self.finder: + fp = codecs.getreader("utf-8")(self.finder.get(filename).open()) + else: + fp = io.open(filename, encoding="utf-8") + else: + fp = filename + if hasattr(fp, "name"): + filename = os.path.abspath(fp.name) + else: + filename = None + return fp, filename + + def _read(self, root, filename, defaults, parentmanifest=None): + """ + Internal recursive method for reading and parsing manifests. + Stores all found tests in self.tests + :param root: The base path + :param filename: File object or string path for the base manifest file + :param defaults: Options that apply to all items + :param parentmanifest: Filename of the parent manifest, relative to rootdir (default None) + """ + + def read_file(type): + include_file = section.split(type, 1)[-1] + include_file = normalize_path(include_file) + if not os.path.isabs(include_file): + include_file = os.path.join(here, include_file) + file_base, file_ext = os.path.splitext(include_file) + if file_ext == ".ini": + toml_name = file_base + ".toml" + if self.path_exists(toml_name): + if self.use_toml: + include_file = toml_name + else: + self.logger.debug_ci( + f"NOTE TOML include file present, but not used: {toml_name}" + ) + elif file_ext != ".toml": + raise IOError( + f"manfestparser file extension not supported: {include_file}" + ) + if not self.path_exists(include_file): + message = "Included file '%s' does not exist" % include_file + if self.strict: + raise IOError(message) + else: + sys.stderr.write("%s\n" % message) + return + return include_file + + # assume we are reading an INI file + read_fn = read_ini + fp, filename = self._get_fp_filename(filename) + manifest_defaults_filename = filename # does not change if TOML is present + if filename is None: + filename_rel = None + here = root + file_base = file_ext = None + else: + self.source_files.add(filename) + filename_rel = self.relative_to_root(filename) + here = os.path.dirname(filename) + file_base, file_ext = os.path.splitext(filename) + if file_ext == ".ini": + toml_name = file_base + ".toml" + if self.path_exists(toml_name): + if self.use_toml: + fp, filename = self._get_fp_filename(toml_name) + read_fn = read_toml + else: + self.logger.debug_ci( + f"NOTE TOML present, but not used: {toml_name}" + ) + elif file_ext == ".toml": + read_fn = read_toml + else: + raise IOError(f"manfestparser file extension not supported: {filename}") + defaults["here"] = here + + # read the configuration + sections, defaults, document = read_fn( + fp=fp, + defaults=defaults, + strict=self.strict, + handle_defaults=self._handle_defaults, + document=self.document, + ) + if filename is not None: + self.source_documents[filename] = document + if parentmanifest and filename: + # A manifest can be read multiple times, via "include:", optionally + # with section-specific variables. These variables only apply to + # the included manifest when included via the same parent manifest, + # so they must be associated with (parentmanifest, filename). + # + # |defaults| is a combination of variables, in the following order: + # - The defaults of the ancestor manifests if self._handle_defaults + # is True. + # - Any variables from the "[include:...]" section. + # - The defaults of the included manifest. + self.manifest_defaults[ + (parentmanifest, manifest_defaults_filename) + ] = defaults + if manifest_defaults_filename != filename: + self.manifest_defaults[(parentmanifest, filename)] = defaults + else: + self.manifest_defaults[manifest_defaults_filename] = defaults + if manifest_defaults_filename != filename: + self.manifest_defaults[filename] = defaults + + # get the tests + for section, data in sections: + # a file to include + # TODO: keep track of included file structure: + # self.manifests = {'manifest.ini': 'relative/path.ini'} + if section.startswith("include:"): + include_file = read_file("include:") + if include_file: + include_defaults = data.copy() + self._read( + root, + include_file, + include_defaults, + parentmanifest=filename_rel, + ) + continue + + # otherwise an item + test = data.copy() + test["name"] = section + + # Will be None if the manifest being read is a file-like object. + test["manifest"] = filename + test["manifest_relpath"] = None + if filename: + test["manifest_relpath"] = filename_rel + + # determine the path + path = test.get("path", section) + _relpath = path + if "://" not in path: # don't futz with URLs + path = normalize_path(path) + if here and not os.path.isabs(path): + # Profiling indicates 25% of manifest parsing is spent + # in this call to normpath, but almost all calls return + # their argument unmodified, so we avoid the call if + # '..' if not present in the path. + path = os.path.join(here, path) + if ".." in path: + path = os.path.normpath(path) + _relpath = self.relative_to_root(path) + + test["path"] = path + test["relpath"] = _relpath + + if parentmanifest is not None: + # If a test was included by a parent manifest we may need to + # indicate that in the test object for the sake of identifying + # a test, particularly in the case a test file is included by + # multiple manifests. + test["ancestor_manifest"] = parentmanifest + + # append the item + self.tests.append(test) + + def read(self, *filenames, **defaults): + """ + read and add manifests from file paths or file-like objects + + filenames -- file paths or file-like objects to read as manifests + defaults -- default variables + """ + + # ensure all files exist + missing = [ + filename + for filename in filenames + if isinstance(filename, str) and not self.path_exists(filename) + ] + if missing: + raise IOError("Missing files: %s" % ", ".join(missing)) + + # default variables + _defaults = defaults.copy() or self._defaults.copy() + _defaults.setdefault("here", None) + + # process each file + for filename in filenames: + # set the per file defaults + defaults = _defaults.copy() + here = None + if isinstance(filename, str): + here = os.path.dirname(os.path.abspath(filename)) + elif hasattr(filename, "name"): + here = os.path.dirname(os.path.abspath(filename.name)) + if here: + defaults["here"] = here # directory of master .ini file + + if self.rootdir is None: + # set the root directory + # == the directory of the first manifest given + self.rootdir = here + + self._read(here, filename, defaults) + + # methods for querying manifests + + def query(self, *checks, **kw): + """ + general query function for tests + - checks : callable conditions to test if the test fulfills the query + """ + tests = kw.get("tests", None) + if tests is None: + tests = self.tests + retval = [] + for test in tests: + for check in checks: + if not check(test): + break + else: + retval.append(test) + return retval + + def get(self, _key=None, inverse=False, tags=None, tests=None, **kwargs): + # TODO: pass a dict instead of kwargs since you might hav + # e.g. 'inverse' as a key in the dict + + # TODO: tags should just be part of kwargs with None values + # (None == any is kinda weird, but probably still better) + + # fix up tags + if tags: + tags = set(tags) + else: + tags = set() + + # make some check functions + if inverse: + + def has_tags(test): + return not tags.intersection(test.keys()) + + def dict_query(test): + for key, value in list(kwargs.items()): + if test.get(key) == value: + return False + return True + + else: + + def has_tags(test): + return tags.issubset(test.keys()) + + def dict_query(test): + for key, value in list(kwargs.items()): + if test.get(key) != value: + return False + return True + + # query the tests + tests = self.query(has_tags, dict_query, tests=tests) + + # if a key is given, return only a list of that key + # useful for keys like 'name' or 'path' + if _key: + return [test[_key] for test in tests] + + # return the tests + return tests + + def manifests(self, tests=None): + """ + return manifests in order in which they appear in the tests + If |tests| is not set, the order of the manifests is unspecified. + """ + if tests is None: + manifests = [] + # Make sure to return all the manifests, even ones without tests. + for m in list(self.manifest_defaults.keys()): + if isinstance(m, tuple): + _parentmanifest, manifest = m + else: + manifest = m + if manifest not in manifests: + manifests.append(manifest) + return manifests + + manifests = [] + for test in tests: + manifest = test.get("manifest") + if not manifest: + continue + if manifest not in manifests: + manifests.append(manifest) + return manifests + + def paths(self): + return [i["path"] for i in self.tests] + + # methods for auditing + + def missing(self, tests=None): + """ + return list of tests that do not exist on the filesystem + """ + if tests is None: + tests = self.tests + existing = list(_exists(tests, {})) + return [t for t in tests if t not in existing] + + def check_missing(self, tests=None): + missing = self.missing(tests=tests) + if missing: + missing_paths = [test["path"] for test in missing] + if self.strict: + raise IOError( + "Strict mode enabled, test paths must exist. " + "The following test(s) are missing: %s" + % json.dumps(missing_paths, indent=2) + ) + print( + "Warning: The following test(s) are missing: %s" + % json.dumps(missing_paths, indent=2), + file=sys.stderr, + ) + return missing + + def verifyDirectory(self, directories, pattern=None, extensions=None): + """ + checks what is on the filesystem vs what is in a manifest + returns a 2-tuple of sets: + (missing_from_filesystem, missing_from_manifest) + """ + + files = set([]) + if isinstance(directories, str): + directories = [directories] + + # get files in directories + for directory in directories: + for dirpath, _dirnames, fnames in os.walk(directory, topdown=True): + filenames = fnames + # only add files that match a pattern + if pattern: + filenames = fnmatch.filter(filenames, pattern) + + # only add files that have one of the extensions + if extensions: + filenames = [ + filename + for filename in filenames + if os.path.splitext(filename)[-1] in extensions + ] + + files.update( + [os.path.join(dirpath, filename) for filename in filenames] + ) + + paths = set(self.paths()) + missing_from_filesystem = paths.difference(files) + missing_from_manifest = files.difference(paths) + return (missing_from_filesystem, missing_from_manifest) + + # methods for output + + def write( + self, + fp=sys.stdout, + rootdir=None, + global_tags=None, + global_kwargs=None, + local_tags=None, + local_kwargs=None, + ): + """ + write a manifest given a query + global and local options will be munged to do the query + globals will be written to the top of the file + locals (if given) will be written per test + """ + + # open file if `fp` given as string + close = False + if isinstance(fp, str): + fp = open(fp, "w") + close = True + + # root directory + if rootdir is None: + rootdir = self.rootdir + + # sanitize input + global_tags = global_tags or set() + local_tags = local_tags or set() + global_kwargs = global_kwargs or {} + local_kwargs = local_kwargs or {} + + # create the query + tags = set([]) + tags.update(global_tags) + tags.update(local_tags) + kwargs = {} + kwargs.update(global_kwargs) + kwargs.update(local_kwargs) + + # get matching tests + tests = self.get(tags=tags, **kwargs) + + # print the .ini manifest + if global_tags or global_kwargs: + print("[DEFAULT]", file=fp) + for tag in global_tags: + print("%s =" % tag, file=fp) + for key, value in list(global_kwargs.items()): + print("%s = %s" % (key, value), file=fp) + print(file=fp) + + for t in tests: + test = t.copy() # don't overwrite + + path = test["name"] + if not os.path.isabs(path): + path = test["path"] + if self.rootdir: + path = relpath(test["path"], self.rootdir) + path = denormalize_path(path) + print("[%s]" % path, file=fp) + + # reserved keywords: + reserved = [ + "path", + "name", + "here", + "manifest", + "manifest_relpath", + "relpath", + "ancestor_manifest", + ] + for key in sorted(test.keys()): + if key in reserved: + continue + if key in global_kwargs: + continue + if key in global_tags and not test[key]: + continue + print("%s = %s" % (key, test[key]), file=fp) + print(file=fp) + + if close: + # close the created file + fp.close() + + def __str__(self): + fp = StringIO() + self.write(fp=fp) + value = fp.getvalue() + return value + + def copy(self, directory, rootdir=None, *tags, **kwargs): + """ + copy the manifests and associated tests + - directory : directory to copy to + - rootdir : root directory to copy to (if not given from manifests) + - tags : keywords the tests must have + - kwargs : key, values the tests must match + """ + # XXX note that copy does *not* filter the tests out of the + # resulting manifest; it just stupidly copies them over. + # ideally, it would reread the manifests and filter out the + # tests that don't match *tags and **kwargs + + # destination + if not os.path.exists(directory): + os.path.makedirs(directory) + else: + # sanity check + assert os.path.isdir(directory) + + # tests to copy + tests = self.get(tags=tags, **kwargs) + if not tests: + return # nothing to do! + + # root directory + if rootdir is None: + rootdir = self.rootdir + + # copy the manifests + tests + manifests = [relpath(manifest, rootdir) for manifest in self.manifests()] + for manifest in manifests: + destination = os.path.join(directory, manifest) + dirname = os.path.dirname(destination) + if not os.path.exists(dirname): + os.makedirs(dirname) + else: + # sanity check + assert os.path.isdir(dirname) + shutil.copy(os.path.join(rootdir, manifest), destination) + + missing = self.check_missing(tests) + tests = [test for test in tests if test not in missing] + for test in tests: + if os.path.isabs(test["name"]): + continue + source = test["path"] + destination = os.path.join(directory, relpath(test["path"], rootdir)) + shutil.copy(source, destination) + # TODO: ensure that all of the tests are below the from_dir + + def update(self, from_dir, rootdir=None, *tags, **kwargs): + """ + update the tests as listed in a manifest from a directory + - from_dir : directory where the tests live + - rootdir : root directory to copy to (if not given from manifests) + - tags : keys the tests must have + - kwargs : key, values the tests must match + """ + + # get the tests + tests = self.get(tags=tags, **kwargs) + + # get the root directory + if not rootdir: + rootdir = self.rootdir + + # copy them! + for test in tests: + if not os.path.isabs(test["name"]): + _relpath = relpath(test["path"], rootdir) + source = os.path.join(from_dir, _relpath) + if not os.path.exists(source): + message = "Missing test: '%s' does not exist!" + if self.strict: + raise IOError(message) + print(message + " Skipping.", file=sys.stderr) + continue + destination = os.path.join(rootdir, _relpath) + shutil.copy(source, destination) + + # directory importers + + @classmethod + def _walk_directories(cls, directories, callback, pattern=None, ignore=()): + """ + internal function to import directories + """ + + if isinstance(pattern, str): + patterns = [pattern] + else: + patterns = pattern + ignore = set(ignore) + + if not patterns: + + def accept_filename(filename): + return True + + else: + + def accept_filename(filename): + for pattern in patterns: + if fnmatch.fnmatch(filename, pattern): + return True + + if not ignore: + + def accept_dirname(dirname): + return True + + else: + + def accept_dirname(dirname): + return dirname not in ignore + + rootdirectories = directories[:] + seen_directories = set() + for rootdirectory in rootdirectories: + # let's recurse directories using list + directories = [os.path.realpath(rootdirectory)] + while directories: + directory = directories.pop(0) + if directory in seen_directories: + # eliminate possible infinite recursion due to + # symbolic links + continue + seen_directories.add(directory) + + files = [] + subdirs = [] + for name in sorted(os.listdir(directory)): + path = os.path.join(directory, name) + if os.path.isfile(path): + # os.path.isfile follow symbolic links, we don't + # need to handle them here. + if accept_filename(name): + files.append(name) + continue + elif os.path.islink(path): + # eliminate symbolic links + path = os.path.realpath(path) + + # we must have a directory here + if accept_dirname(name): + subdirs.append(name) + # this subdir is added for recursion + directories.insert(0, path) + + # here we got all subdirs and files filtered, we can + # call the callback function if directory is not empty + if subdirs or files: + callback(rootdirectory, directory, subdirs, files) + + @classmethod + def populate_directory_manifests( + cls, directories, filename, pattern=None, ignore=(), overwrite=False + ): + """ + walks directories and writes manifests of name `filename` in-place; + returns `cls` instance populated with the given manifests + + filename -- filename of manifests to write + pattern -- shell pattern (glob) or patterns of filenames to match + ignore -- directory names to ignore + overwrite -- whether to overwrite existing files of given name + """ + + manifest_dict = {} + + if os.path.basename(filename) != filename: + raise IOError("filename should not include directory name") + + # no need to hit directories more than once + _directories = directories + directories = [] + for directory in _directories: + if directory not in directories: + directories.append(directory) + + def callback(directory, dirpath, dirnames, filenames): + """write a manifest for each directory""" + + manifest_path = os.path.join(dirpath, filename) + if (dirnames or filenames) and not ( + os.path.exists(manifest_path) and overwrite + ): + with open(manifest_path, "w") as manifest: + for dirname in dirnames: + print( + "[include:%s]" % os.path.join(dirname, filename), + file=manifest, + ) + for _filename in filenames: + print("[%s]" % _filename, file=manifest) + + # add to list of manifests + manifest_dict.setdefault(directory, manifest_path) + + # walk the directories to gather files + cls._walk_directories(directories, callback, pattern=pattern, ignore=ignore) + # get manifests + manifests = [manifest_dict[directory] for directory in _directories] + + # create a `cls` instance with the manifests + return cls(manifests=manifests) + + @classmethod + def from_directories( + cls, directories, pattern=None, ignore=(), write=None, relative_to=None + ): + """ + convert directories to a simple manifest; returns ManifestParser instance + + pattern -- shell pattern (glob) or patterns of filenames to match + ignore -- directory names to ignore + write -- filename or file-like object of manifests to write; + if `None` then a StringIO instance will be created + relative_to -- write paths relative to this path; + if false then the paths are absolute + """ + + # determine output + opened_manifest_file = None # name of opened manifest file + absolute = not relative_to # whether to output absolute path names as names + if isinstance(write, str): + opened_manifest_file = write + write = open(write, "w") + if write is None: + write = StringIO() + + # walk the directories, generating manifests + def callback(directory, dirpath, dirnames, filenames): + # absolute paths + filenames = [os.path.join(dirpath, filename) for filename in filenames] + # ensure new manifest isn't added + filenames = [ + filename for filename in filenames if filename != opened_manifest_file + ] + # normalize paths + if not absolute and relative_to: + filenames = [relpath(filename, relative_to) for filename in filenames] + + # write to manifest + write_content = "\n".join( + ["[{}]".format(denormalize_path(filename)) for filename in filenames] + ) + print(write_content, file=write) + + cls._walk_directories(directories, callback, pattern=pattern, ignore=ignore) + + if opened_manifest_file: + # close file + write.close() + manifests = [opened_manifest_file] + else: + # manifests/write is a file-like object; + # rewind buffer + write.flush() + write.seek(0) + manifests = [write] + + # make a ManifestParser instance + return cls(manifests=manifests) + + +convert = ManifestParser.from_directories + + +class TestManifest(ManifestParser): + """ + apply logic to manifests; this is your integration layer :) + specific harnesses may subclass from this if they need more logic + """ + + def __init__(self, *args, **kwargs): + ManifestParser.__init__(self, *args, **kwargs) + self.filters = filterlist(DEFAULT_FILTERS) + self.last_used_filters = [] + + def active_tests( + self, exists=True, disabled=True, filters=None, noDefaultFilters=False, **values + ): + """ + Run all applied filters on the set of tests. + + :param exists: filter out non-existing tests (default True) + :param disabled: whether to return disabled tests (default True) + :param values: keys and values to filter on (e.g. `os = linux mac`) + :param filters: list of filters to apply to the tests + :returns: list of test objects that were not filtered out + """ + tests = [i.copy() for i in self.tests] # shallow copy + + # mark all tests as passing + for test in tests: + test["expected"] = test.get("expected", "pass") + + # make a copy so original doesn't get modified + if noDefaultFilters: + fltrs = [] + else: + fltrs = self.filters[:] + + if exists: + if self.strict: + self.check_missing(tests) + else: + fltrs.append(_exists) + + if not disabled: + fltrs.append(enabled) + + if filters: + fltrs += filters + + self.last_used_filters = fltrs[:] + for fn in fltrs: + tests = fn(tests, values) + return list(tests) + + def test_paths(self): + return [test["path"] for test in self.active_tests()] + + def fmt_filters(self, filters=None): + filters = filters or self.last_used_filters + names = [] + for f in filters: + if isinstance(f, types.FunctionType): + names.append(f.__name__) + else: + names.append(str(f)) + return ", ".join(names) diff --git a/testing/mozbase/manifestparser/manifestparser/toml.py b/testing/mozbase/manifestparser/manifestparser/toml.py new file mode 100644 index 0000000000..e028a4b0d7 --- /dev/null +++ b/testing/mozbase/manifestparser/manifestparser/toml.py @@ -0,0 +1,321 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +import io +import os +import re + +from .ini import combine_fields + +__all__ = ["read_toml", "alphabetize_toml_str", "add_skip_if", "sort_paths"] + +FILENAME_REGEX = r"^([A-Za-z0-9_./-]*)([Bb][Uu][Gg])([-_]*)([0-9]+)([A-Za-z0-9_./-]*)$" +DEFAULT_SECTION = "DEFAULT" + + +def sort_paths_keyfn(k): + sort_paths_keyfn.rx = getattr(sort_paths_keyfn, "rx", None) # static + if sort_paths_keyfn.rx is None: + sort_paths_keyfn.rx = re.compile(FILENAME_REGEX) + name = str(k) + if name == DEFAULT_SECTION: + return "" + m = sort_paths_keyfn.rx.findall(name) + if len(m) == 1 and len(m[0]) == 5: + prefix = m[0][0] # text before "Bug" + bug = m[0][1] # the word "Bug" + underbar = m[0][2] # underbar or dash (optional) + num = m[0][3] # the bug id + suffix = m[0][4] # text after the bug id + name = f"{prefix}{bug.lower()}{underbar}{int(num):09d}{suffix}" + return name + return name + + +def sort_paths(paths): + """ + Returns a list of paths (tests) in a manifest in alphabetical order. + Ensures DEFAULT is first and filenames with a bug number are + in the proper order. + """ + return sorted(paths, key=sort_paths_keyfn) + + +def parse_toml_str(contents): + """ + Parse TOML contents using toml + """ + import toml + + error = None + manifest = None + try: + manifest = toml.loads(contents) + except toml.TomlDecodeError as pe: + error = str(pe) + return error, manifest + + +def parse_tomlkit_str(contents): + """ + Parse TOML contents using tomlkit + """ + import tomlkit + from tomlkit.exceptions import TOMLKitError + + error = None + manifest = None + try: + manifest = tomlkit.parse(contents) + except TOMLKitError as pe: + error = str(pe) + return error, manifest + + +def read_toml( + fp, + defaults=None, + default=DEFAULT_SECTION, + _comments=None, + _separators=None, + strict=True, + handle_defaults=True, + document=False, +): + """ + read a .toml file and return a list of [(section, values)] + - fp : file pointer or path to read + - defaults : default set of variables + - default : name of the section for the default section + - comments : characters that if they start a line denote a comment + - separators : strings that denote key, value separation in order + - strict : whether to be strict about parsing + - handle_defaults : whether to incorporate defaults into each section + - document: read TOML with tomlkit and return source in test["document"] + """ + + # variables + defaults = defaults or {} + default_section = {} + sections = [] + if isinstance(fp, str): + filename = fp + fp = io.open(fp, encoding="utf-8") + elif hasattr(fp, "name"): + filename = fp.name + else: + filename = "unknown" + contents = fp.read() + inline_comment_rx = re.compile(r"\s#.*$") + + if document: # Use tomlkit to parse the file contents + error, manifest = parse_tomlkit_str(contents) + else: + error, manifest = parse_toml_str(contents) + if error: + raise IOError(f"Error parsing TOML manifest file {filename}: {error}") + + # handle each section of the manifest + for section in manifest.keys(): + current_section = {} + for key in manifest[section].keys(): + val = manifest[section][key] + if isinstance(val, bool): # must coerce to lowercase string + if val: + val = "true" + else: + val = "false" + elif isinstance(val, list): + new_vals = "" + for v in val: + if len(new_vals) > 0: + new_vals += os.linesep + new_val = str(v).strip() # coerce to str + comment_found = inline_comment_rx.search(new_val) + if comment_found: + new_val = new_val[0 : comment_found.span()[0]] + if " = " in new_val: + raise Exception( + f"Should not assign in {key} condition for {section}" + ) + new_vals += new_val + val = new_vals + else: + val = str(val).strip() # coerce to str + comment_found = inline_comment_rx.search(val) + if comment_found: + val = val[0 : comment_found.span()[0]] + if " = " in val: + raise Exception( + f"Should not assign in {key} condition for {section}" + ) + current_section[key] = val + if section.lower() == default.lower(): + default_section = current_section + # DEFAULT does NOT appear in the output + else: + sections.append((section, current_section)) + + # merge global defaults with the DEFAULT section + defaults = combine_fields(defaults, default_section) + if handle_defaults: + # merge combined defaults into each section + sections = [(i, combine_fields(defaults, j)) for i, j in sections] + + if not document: + manifest = None + return sections, defaults, manifest + + +def alphabetize_toml_str(manifest): + """ + Will take a TOMLkit manifest document (i.e. from a previous invocation + of read_toml(..., document=True) and accessing the document + from mp.source_documents[filename]) and return it as a string + in sorted order by section (i.e. test file name, taking bug ids into consideration). + """ + + from tomlkit import document, dumps, table + from tomlkit.items import Table + + preamble = "" + new_manifest = document() + first_section = False + sections = {} + + for k, v in manifest.body: + if k is None: + preamble += v.as_string() + continue + if not isinstance(v, Table): + raise Exception(f"MP TOML illegal keyval in preamble: {k} = {v}") + section = None + if not first_section: + if k == DEFAULT_SECTION: + new_manifest.add(k, v) + else: + new_manifest.add(DEFAULT_SECTION, table()) + first_section = True + else: + values = v.items() + if len(values) == 1: + for kk, vv in values: + if isinstance(vv, Table): # unquoted, dotted key + section = f"{k}.{kk}" + sections[section] = vv + if section is None: + section = str(k).strip("'\"") + sections[section] = v + + if not first_section: + new_manifest.add(DEFAULT_SECTION, table()) + + for section in sort_paths([k for k in sections.keys() if k != DEFAULT_SECTION]): + new_manifest.add(section, sections[section]) + + manifest_str = dumps(new_manifest) + # tomlkit fixups + manifest_str = preamble + manifest_str.replace('"",]', "]") + while manifest_str.endswith("\n\n"): + manifest_str = manifest_str[:-1] + return manifest_str + + +def _simplify_comment(comment): + """Remove any leading #, but preserve leading whitespace in comment""" + + length = len(comment) + i = 0 + j = -1 # remove exactly one space + while i < length and comment[i] in " #": + i += 1 + if comment[i] == " ": + j += 1 + comment = comment[i:] + if j > 0: + comment = " " * j + comment + return comment.rstrip() + + +def add_skip_if(manifest, filename, condition, bug=None): + """ + Will take a TOMLkit manifest document (i.e. from a previous invocation + of read_toml(..., document=True) and accessing the document + from mp.source_documents[filename]) and return it as a string + in sorted order by section (i.e. test file name, taking bug ids into consideration). + """ + from tomlkit import array + from tomlkit.items import Comment, String, Whitespace + + if filename not in manifest: + raise Exception(f"TOML manifest does not contain section: {filename}") + keyvals = manifest[filename] + first = None + first_comment = "" + skip_if = None + existing = False # this condition is already present + if "skip-if" in keyvals: + skip_if = keyvals["skip-if"] + if len(skip_if) == 1: + for e in skip_if._iter_items(): + if not first: + if not isinstance(e, Whitespace): + first = e.as_string().strip('"') + else: + c = e.as_string() + if c != ",": + first_comment += c + if skip_if.trivia is not None: + first_comment += skip_if.trivia.comment + mp_array = array() + if skip_if is None: # add the first one line entry to the table + mp_array.add_line(condition, indent="", add_comma=False, newline=False) + if bug is not None: + mp_array.comment(bug) + skip_if = {"skip-if": mp_array} + keyvals.update(skip_if) + else: + if first is not None: + if first == condition: + existing = True + if first_comment is not None: + mp_array.add_line( + first, indent=" ", comment=_simplify_comment(first_comment) + ) + else: + mp_array.add_line(first, indent=" ") + if len(skip_if) > 1: + e_condition = None + e_comment = None + for e in skip_if._iter_items(): + if isinstance(e, String): + if e_condition is not None: + if e_comment is not None: + mp_array.add_line( + e_condition, indent=" ", comment=e_comment + ) + e_comment = None + else: + mp_array.add_line(e_condition, indent=" ") + e_condition = None + if len(e) > 0: + e_condition = e.as_string().strip('"') + if e_condition == condition: + existing = True + elif isinstance(e, Comment): + e_comment = _simplify_comment(e.as_string()) + if e_condition is not None: + if e_comment is not None: + mp_array.add_line(e_condition, indent=" ", comment=e_comment) + else: + mp_array.add_line(e_condition, indent=" ") + if not existing: + if bug is not None: + mp_array.add_line(condition, indent=" ", comment=bug) + else: + mp_array.add_line(condition, indent=" ") + mp_array.add_line("", indent="") # fixed in write_toml_str + skip_if = {"skip-if": mp_array} + del keyvals["skip-if"] + keyvals.update(skip_if) diff --git a/testing/mozbase/manifestparser/manifestparser/util.py b/testing/mozbase/manifestparser/manifestparser/util.py new file mode 100644 index 0000000000..6cfe57de5c --- /dev/null +++ b/testing/mozbase/manifestparser/manifestparser/util.py @@ -0,0 +1,51 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import ast +import os + + +def normsep(path): + """ + Normalize path separators, by using forward slashes instead of whatever + :py:const:`os.sep` is. + """ + if os.sep != "/": + # Python 2 is happy to do things like byte_string.replace(u'foo', + # u'bar'), but not Python 3. + if isinstance(path, bytes): + path = path.replace(os.sep.encode("ascii"), b"/") + else: + path = path.replace(os.sep, "/") + if os.altsep and os.altsep != "/": + if isinstance(path, bytes): + path = path.replace(os.altsep.encode("ascii"), b"/") + else: + path = path.replace(os.altsep, "/") + return path + + +def evaluate_list_from_string(list_string): + """ + This is a utility function for converting a string obtained from a manifest + into a list. If the string is not a valid list when converted, an error will be + raised from `ast.eval_literal`. For example, you can convert entries like this + into a list: + ``` + test_settings= + ["hello", "world"], + [1, 10, 100], + values= + 5, + 6, + 7, + 8, + ``` + """ + parts = [ + x.strip(",") + for x in list_string.strip(",").replace("\r", "").split("\n") + if x.strip() + ] + return ast.literal_eval("[" + ",".join(parts) + "]") diff --git a/testing/mozbase/manifestparser/setup.py b/testing/mozbase/manifestparser/setup.py new file mode 100644 index 0000000000..57dea37d08 --- /dev/null +++ b/testing/mozbase/manifestparser/setup.py @@ -0,0 +1,38 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +from setuptools import setup + +PACKAGE_NAME = "manifestparser" +PACKAGE_VERSION = "2.4.0" + +DEPS = [ + "mozlog >= 6.0", + "toml >= 0.10.2", + "tomlkit >= 0.12.3", +] +setup( + name=PACKAGE_NAME, + version=PACKAGE_VERSION, + description="Library to create and manage test manifests", + long_description="see https://firefox-source-docs.mozilla.org/mozbase/index.html", + classifiers=[ + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.5", + ], + # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers + keywords="mozilla manifests", + author="Mozilla Automation and Testing Team", + author_email="tools@lists.mozilla.org", + url="https://wiki.mozilla.org/Auto-tools/Projects/Mozbase", + license="MPL", + zip_safe=False, + packages=["manifestparser"], + install_requires=DEPS, + entry_points=""" + [console_scripts] + manifestparser = manifestparser.cli:main + """, +) diff --git a/testing/mozbase/manifestparser/tests/broken-skip-if.toml b/testing/mozbase/manifestparser/tests/broken-skip-if.toml new file mode 100644 index 0000000000..c8b6b19998 --- /dev/null +++ b/testing/mozbase/manifestparser/tests/broken-skip-if.toml @@ -0,0 +1,4 @@ +[DEFAULT] +skip-if = [ + "os = 'win'", +] diff --git a/testing/mozbase/manifestparser/tests/comment-example.toml b/testing/mozbase/manifestparser/tests/comment-example.toml new file mode 100644 index 0000000000..8562d83ef4 --- /dev/null +++ b/testing/mozbase/manifestparser/tests/comment-example.toml @@ -0,0 +1,11 @@ +# See https://bugzilla.mozilla.org/show_bug.cgi?id=813674 + +["test_0180_fileInUse_xp_win_complete.js"] +["test_0181_fileInUse_xp_win_partial.js"] +["test_0182_rmrfdirFileInUse_xp_win_complete.js"] +["test_0183_rmrfdirFileInUse_xp_win_partial.js"] +["test_0184_fileInUse_xp_win_complete.js"] +["test_0185_fileInUse_xp_win_partial.js"] +["test_0186_rmrfdirFileInUse_xp_win_complete.js"] +["test_0187_rmrfdirFileInUse_xp_win_partial.js"] +# [test_0202_app_launch_apply_update_dirlocked.js] # Test disabled, bug 757632 diff --git a/testing/mozbase/manifestparser/tests/default-skipif.toml b/testing/mozbase/manifestparser/tests/default-skipif.toml new file mode 100644 index 0000000000..e986f29b8c --- /dev/null +++ b/testing/mozbase/manifestparser/tests/default-skipif.toml @@ -0,0 +1,32 @@ +[DEFAULT] +skip-if = [ + "os == 'win' && debug", # a pesky comment +] + +[test1] +skip-if = [ + "debug", +] + +[test2] +skip-if = [ + "os == 'linux'", +] + +[test3] +skip-if = [ + "os == 'win'", +] + +[test4] +skip-if = [ + "os == 'win' && debug", +] + +[test5] +foo = "bar" + +[test6] +skip-if = [ + "debug # a second pesky inline comment", # inline comments are discouraged +] diff --git a/testing/mozbase/manifestparser/tests/default-subsuite.toml b/testing/mozbase/manifestparser/tests/default-subsuite.toml new file mode 100644 index 0000000000..3c897c05e4 --- /dev/null +++ b/testing/mozbase/manifestparser/tests/default-subsuite.toml @@ -0,0 +1,5 @@ +[test1] +subsuite = "baz" + +[test2] +subsuite = "foo" diff --git a/testing/mozbase/manifestparser/tests/default-suppfiles.toml b/testing/mozbase/manifestparser/tests/default-suppfiles.toml new file mode 100644 index 0000000000..52dd7c68a7 --- /dev/null +++ b/testing/mozbase/manifestparser/tests/default-suppfiles.toml @@ -0,0 +1,8 @@ +[DEFAULT] +support-files = "foo.js" # a comment + +[test7] +[test8] +support-files = "bar.js" # another comment +[test9] +foo = "bar" diff --git a/testing/mozbase/manifestparser/tests/edit-manifest-after.toml b/testing/mozbase/manifestparser/tests/edit-manifest-after.toml new file mode 100644 index 0000000000..1e3099b008 --- /dev/null +++ b/testing/mozbase/manifestparser/tests/edit-manifest-after.toml @@ -0,0 +1,37 @@ +# This is an example of comment at the top of a manifest + +[DEFAULT] + +["bug_3.js"] +# This is a comment about Bug 3 +# DO NOT ADD MORE TESTS HERE +skip-if = [ + "os == 'linux'", + "verify", # Bug 33333 +] + +["bug_20.js"] +skip-if = [ + "os == 'mac'", # Bug 20 + "os == 'windows'", # Bug 20 +] + +["bug_100.js"] +skip-if = [ + "debug", # Bug 100 + "apple_catalina", # Bug 200 +] + +["test_bar.html"] +skip-if = [ + "os == 'mac'", # Bug 111 + "os == 'linux'", # Bug 222 + "os == 'win'", # Bug 333 + "tsan", # Bug 444 +] + +["test_foo.html"] +skip-if = [ + "os == 'mac' && !debug", # bug 31415 + "os == 'mac' && debug", +] diff --git a/testing/mozbase/manifestparser/tests/edit-manifest-before.toml b/testing/mozbase/manifestparser/tests/edit-manifest-before.toml new file mode 100644 index 0000000000..bd48666903 --- /dev/null +++ b/testing/mozbase/manifestparser/tests/edit-manifest-before.toml @@ -0,0 +1,23 @@ +# This is an example of comment at the top of a manifest + +["bug_100.js"] +skip-if = [ + "debug", # Bug 100 +] + +["bug_3.js"] +# This is a comment about Bug 3 +skip-if = ["os == 'linux'"] +# DO NOT ADD MORE TESTS HERE + +['bug_20.js'] + +["test_foo.html"] +skip-if = ["os == 'mac' && !debug"] # bug 31415 + +["test_bar.html"] +skip-if = [ + "os == 'mac'", # Bug 111 + "os == 'linux'", # Bug 222 + "os == 'win'", # Bug 333 +] diff --git a/testing/mozbase/manifestparser/tests/filter-example.toml b/testing/mozbase/manifestparser/tests/filter-example.toml new file mode 100644 index 0000000000..044470e4cc --- /dev/null +++ b/testing/mozbase/manifestparser/tests/filter-example.toml @@ -0,0 +1,20 @@ +# illustrate test filters based on various categories + +[windowstest] +skip-if = [ + "os != 'win'", +] + +[fleem] +skip-if = [ + "os == 'mac'", +] + +[linuxtest] +skip-if = [ + "os == 'mac'", + "os == 'win'", +] +fail-if = [ + "os == 'mac'", +] diff --git a/testing/mozbase/manifestparser/tests/fleem b/testing/mozbase/manifestparser/tests/fleem new file mode 100644 index 0000000000..744817b823 --- /dev/null +++ b/testing/mozbase/manifestparser/tests/fleem @@ -0,0 +1 @@ +# dummy spot for "fleem" test diff --git a/testing/mozbase/manifestparser/tests/include-example.toml b/testing/mozbase/manifestparser/tests/include-example.toml new file mode 100644 index 0000000000..e8865d4915 --- /dev/null +++ b/testing/mozbase/manifestparser/tests/include-example.toml @@ -0,0 +1,11 @@ +[DEFAULT] +foo = "bar" + +["include:include/bar.toml"] + +[fleem] + +["include:include/foo.toml"] +red = "roses" +blue = "violets" +yellow = "daffodils" diff --git a/testing/mozbase/manifestparser/tests/include-invalid.toml b/testing/mozbase/manifestparser/tests/include-invalid.toml new file mode 100644 index 0000000000..35534e3e90 --- /dev/null +++ b/testing/mozbase/manifestparser/tests/include-invalid.toml @@ -0,0 +1 @@ +["include:invalid.toml"] diff --git a/testing/mozbase/manifestparser/tests/include/bar.ini b/testing/mozbase/manifestparser/tests/include/bar.ini new file mode 100644 index 0000000000..bcb312d1db --- /dev/null +++ b/testing/mozbase/manifestparser/tests/include/bar.ini @@ -0,0 +1,4 @@ +[DEFAULT] +foo = fleem + +[crash-handling] \ No newline at end of file diff --git a/testing/mozbase/manifestparser/tests/include/bar.toml b/testing/mozbase/manifestparser/tests/include/bar.toml new file mode 100644 index 0000000000..b6fb12e3fd --- /dev/null +++ b/testing/mozbase/manifestparser/tests/include/bar.toml @@ -0,0 +1,4 @@ +[DEFAULT] +foo = "fleem" + +[crash-handling] diff --git a/testing/mozbase/manifestparser/tests/include/crash-handling b/testing/mozbase/manifestparser/tests/include/crash-handling new file mode 100644 index 0000000000..8e19a63751 --- /dev/null +++ b/testing/mozbase/manifestparser/tests/include/crash-handling @@ -0,0 +1 @@ +# dummy spot for "crash-handling" test diff --git a/testing/mozbase/manifestparser/tests/include/flowers b/testing/mozbase/manifestparser/tests/include/flowers new file mode 100644 index 0000000000..a25acfbe21 --- /dev/null +++ b/testing/mozbase/manifestparser/tests/include/flowers @@ -0,0 +1 @@ +# dummy spot for "flowers" test diff --git a/testing/mozbase/manifestparser/tests/include/foo.ini b/testing/mozbase/manifestparser/tests/include/foo.ini new file mode 100644 index 0000000000..cfc90ace83 --- /dev/null +++ b/testing/mozbase/manifestparser/tests/include/foo.ini @@ -0,0 +1,5 @@ +[DEFAULT] +blue = ocean + +[flowers] +yellow = submarine \ No newline at end of file diff --git a/testing/mozbase/manifestparser/tests/include/foo.toml b/testing/mozbase/manifestparser/tests/include/foo.toml new file mode 100644 index 0000000000..ac2454e31d --- /dev/null +++ b/testing/mozbase/manifestparser/tests/include/foo.toml @@ -0,0 +1,5 @@ +[DEFAULT] +blue = "ocean" + +[flowers] +yellow = "submarine" diff --git a/testing/mozbase/manifestparser/tests/just-defaults.toml b/testing/mozbase/manifestparser/tests/just-defaults.toml new file mode 100644 index 0000000000..cbf1eb1927 --- /dev/null +++ b/testing/mozbase/manifestparser/tests/just-defaults.toml @@ -0,0 +1,2 @@ +[DEFAULT] +foo = "bar" diff --git a/testing/mozbase/manifestparser/tests/manifest.toml b/testing/mozbase/manifestparser/tests/manifest.toml new file mode 100644 index 0000000000..bb992ad9af --- /dev/null +++ b/testing/mozbase/manifestparser/tests/manifest.toml @@ -0,0 +1,23 @@ +[DEFAULT] +subsuite = "mozbase" + +["test_chunking.py"] + +["test_convert_directory.py"] + +["test_convert_symlinks.py"] +disabled = "https://bugzilla.mozilla.org/show_bug.cgi?id=920938" + +["test_default_overrides.py"] + +["test_expressionparser.py"] + +["test_filters.py"] + +["test_manifestparser.py"] + +["test_read_ini.py"] + +["test_testmanifest.py"] + +["test_util.py"] diff --git a/testing/mozbase/manifestparser/tests/missing-path.toml b/testing/mozbase/manifestparser/tests/missing-path.toml new file mode 100644 index 0000000000..919d8e04da --- /dev/null +++ b/testing/mozbase/manifestparser/tests/missing-path.toml @@ -0,0 +1,2 @@ +[foo] +[bar] diff --git a/testing/mozbase/manifestparser/tests/mozmill-example.toml b/testing/mozbase/manifestparser/tests/mozmill-example.toml new file mode 100644 index 0000000000..6ea81d102e --- /dev/null +++ b/testing/mozbase/manifestparser/tests/mozmill-example.toml @@ -0,0 +1,80 @@ +["testAddons/testDisableEnablePlugin.js"] +["testAddons/testGetAddons.js"] +["testAddons/testSearchAddons.js"] +["testAwesomeBar/testAccessLocationBar.js"] +["testAwesomeBar/testCheckItemHighlight.js"] +["testAwesomeBar/testEscapeAutocomplete.js"] +["testAwesomeBar/testFaviconInAutocomplete.js"] +["testAwesomeBar/testGoButton.js"] +["testAwesomeBar/testLocationBarSearches.js"] +["testAwesomeBar/testPasteLocationBar.js"] +["testAwesomeBar/testSuggestHistoryBookmarks.js"] +["testAwesomeBar/testVisibleItemsMax.js"] +["testBookmarks/testAddBookmarkToMenu.js"] +["testCookies/testDisableCookies.js"] +["testCookies/testEnableCookies.js"] +["testCookies/testRemoveAllCookies.js"] +["testCookies/testRemoveCookie.js"] +["testDownloading/testCloseDownloadManager.js"] +["testDownloading/testDownloadStates.js"] +["testDownloading/testOpenDownloadManager.js"] +["testFindInPage/testFindInPage.js"] +["testFormManager/testAutoCompleteOff.js"] +["testFormManager/testBasicFormCompletion.js"] +["testFormManager/testClearFormHistory.js"] +["testFormManager/testDisableFormManager.js"] +["testGeneral/testGoogleSuggestions.js"] +["testGeneral/testStopReloadButtons.js"] +["testInstallation/testBreakpadInstalled.js"] +["testLayout/testNavigateFTP.js"] +["testPasswordManager/testPasswordNotSaved.js"] +["testPasswordManager/testPasswordSavedAndDeleted.js"] +["testPopups/testPopupsAllowed.js"] +["testPopups/testPopupsBlocked.js"] +["testPreferences/testPaneRetention.js"] +["testPreferences/testPreferredLanguage.js"] +["testPreferences/testRestoreHomepageToDefault.js"] +["testPreferences/testSetToCurrentPage.js"] +["testPreferences/testSwitchPanes.js"] +["testPrivateBrowsing/testAboutPrivateBrowsing.js"] +["testPrivateBrowsing/testCloseWindow.js"] +["testPrivateBrowsing/testDisabledElements.js"] +["testPrivateBrowsing/testDisabledPermissions.js"] +["testPrivateBrowsing/testDownloadManagerClosed.js"] +["testPrivateBrowsing/testGeolocation.js"] +["testPrivateBrowsing/testStartStopPBMode.js"] +["testPrivateBrowsing/testTabRestoration.js"] +["testPrivateBrowsing/testTabsDismissedOnStop.js"] +["testSearch/testAddMozSearchProvider.js"] +["testSearch/testFocusAndSearch.js"] +["testSearch/testGetMoreSearchEngines.js"] +["testSearch/testOpenSearchAutodiscovery.js"] +["testSearch/testRemoveSearchEngine.js"] +["testSearch/testReorderSearchEngines.js"] +["testSearch/testRestoreDefaults.js"] +["testSearch/testSearchSelection.js"] +["testSearch/testSearchSuggestions.js"] +["testSecurity/testBlueLarry.js"] +["testSecurity/testDefaultPhishingEnabled.js"] +["testSecurity/testDefaultSecurityPrefs.js"] +["testSecurity/testEncryptedPageWarning.js"] +["testSecurity/testGreenLarry.js"] +["testSecurity/testGreyLarry.js"] +["testSecurity/testIdentityPopupOpenClose.js"] +["testSecurity/testSSLDisabledErrorPage.js"] +["testSecurity/testSafeBrowsingNotificationBar.js"] +["testSecurity/testSafeBrowsingWarningPages.js"] +["testSecurity/testSecurityInfoViaMoreInformation.js"] +["testSecurity/testSecurityNotification.js"] +["testSecurity/testSubmitUnencryptedInfoWarning.js"] +["testSecurity/testUnknownIssuer.js"] +["testSecurity/testUntrustedConnectionErrorPage.js"] +["testSessionStore/testUndoTabFromContextMenu.js"] +["testTabbedBrowsing/testBackgroundTabScrolling.js"] +["testTabbedBrowsing/testCloseTab.js"] +["testTabbedBrowsing/testNewTab.js"] +["testTabbedBrowsing/testNewWindow.js"] +["testTabbedBrowsing/testOpenInBackground.js"] +["testTabbedBrowsing/testOpenInForeground.js"] +["testTechnicalTools/testAccessPageInfoDialog.js"] +["testToolbar/testBackForwardButtons.js"] diff --git a/testing/mozbase/manifestparser/tests/mozmill-restart-example.toml b/testing/mozbase/manifestparser/tests/mozmill-restart-example.toml new file mode 100644 index 0000000000..5e08bdb45e --- /dev/null +++ b/testing/mozbase/manifestparser/tests/mozmill-restart-example.toml @@ -0,0 +1,26 @@ +[DEFAULT] +type = "restart" + +["restartTests/testExtensionInstallUninstall/test2.js"] +foo = "bar" + +["restartTests/testExtensionInstallUninstall/test1.js"] +foo = "baz" + +["restartTests/testExtensionInstallUninstall/test3.js"] +["restartTests/testSoftwareUpdateAutoProxy/test2.js"] +["restartTests/testSoftwareUpdateAutoProxy/test1.js"] +["restartTests/testPrimaryPassword/test1.js"] +["restartTests/testExtensionInstallGetAddons/test2.js"] +["restartTests/testExtensionInstallGetAddons/test1.js"] +["restartTests/testMultipleExtensionInstallation/test2.js"] +["restartTests/testMultipleExtensionInstallation/test1.js"] +["restartTests/testThemeInstallUninstall/test2.js"] +["restartTests/testThemeInstallUninstall/test1.js"] +["restartTests/testThemeInstallUninstall/test3.js"] +["restartTests/testDefaultBookmarks/test1.js"] +["softwareUpdate/testFallbackUpdate/test2.js"] +["softwareUpdate/testFallbackUpdate/test1.js"] +["softwareUpdate/testFallbackUpdate/test3.js"] +["softwareUpdate/testDirectUpdate/test2.js"] +["softwareUpdate/testDirectUpdate/test1.js"] diff --git a/testing/mozbase/manifestparser/tests/no-tests.toml b/testing/mozbase/manifestparser/tests/no-tests.toml new file mode 100644 index 0000000000..cbf1eb1927 --- /dev/null +++ b/testing/mozbase/manifestparser/tests/no-tests.toml @@ -0,0 +1,2 @@ +[DEFAULT] +foo = "bar" diff --git a/testing/mozbase/manifestparser/tests/parent/include/first/manifest.ini b/testing/mozbase/manifestparser/tests/parent/include/first/manifest.ini new file mode 100644 index 0000000000..828525c18f --- /dev/null +++ b/testing/mozbase/manifestparser/tests/parent/include/first/manifest.ini @@ -0,0 +1,3 @@ +[parent:../manifest.ini] + +[testFirst.js] diff --git a/testing/mozbase/manifestparser/tests/parent/include/first/manifest.toml b/testing/mozbase/manifestparser/tests/parent/include/first/manifest.toml new file mode 100644 index 0000000000..e58d36b8f5 --- /dev/null +++ b/testing/mozbase/manifestparser/tests/parent/include/first/manifest.toml @@ -0,0 +1,3 @@ +["parent:../manifest.ini"] + +['testFirst.js'] diff --git a/testing/mozbase/manifestparser/tests/parent/include/manifest.ini b/testing/mozbase/manifestparser/tests/parent/include/manifest.ini new file mode 100644 index 0000000000..fb9756d6af --- /dev/null +++ b/testing/mozbase/manifestparser/tests/parent/include/manifest.ini @@ -0,0 +1,8 @@ +[DEFAULT] +top = data + +[include:first/manifest.ini] +disabled = YES + +[include:second/manifest.ini] +disabled = NO diff --git a/testing/mozbase/manifestparser/tests/parent/include/manifest.toml b/testing/mozbase/manifestparser/tests/parent/include/manifest.toml new file mode 100644 index 0000000000..e48011f5fb --- /dev/null +++ b/testing/mozbase/manifestparser/tests/parent/include/manifest.toml @@ -0,0 +1,8 @@ +[DEFAULT] +top = "data" + +["include:first/manifest.ini"] +disabled = "YES" + +["include:second/manifest.ini"] +disabled = "NO" diff --git a/testing/mozbase/manifestparser/tests/parent/include/second/manifest.ini b/testing/mozbase/manifestparser/tests/parent/include/second/manifest.ini new file mode 100644 index 0000000000..31f0537566 --- /dev/null +++ b/testing/mozbase/manifestparser/tests/parent/include/second/manifest.ini @@ -0,0 +1,3 @@ +[parent:../manifest.ini] + +[testSecond.js] diff --git a/testing/mozbase/manifestparser/tests/parent/include/second/manifest.toml b/testing/mozbase/manifestparser/tests/parent/include/second/manifest.toml new file mode 100644 index 0000000000..1990ee13ab --- /dev/null +++ b/testing/mozbase/manifestparser/tests/parent/include/second/manifest.toml @@ -0,0 +1,3 @@ +["parent:../manifest.ini"] + +['testSecond.js'] diff --git a/testing/mozbase/manifestparser/tests/parent/level_1/level_1.ini b/testing/mozbase/manifestparser/tests/parent/level_1/level_1.ini new file mode 100644 index 0000000000..ac7c370c3e --- /dev/null +++ b/testing/mozbase/manifestparser/tests/parent/level_1/level_1.ini @@ -0,0 +1,5 @@ +[DEFAULT] +x = level_1 + +[test_1] +[test_2] diff --git a/testing/mozbase/manifestparser/tests/parent/level_1/level_1.toml b/testing/mozbase/manifestparser/tests/parent/level_1/level_1.toml new file mode 100644 index 0000000000..13e92e1eaf --- /dev/null +++ b/testing/mozbase/manifestparser/tests/parent/level_1/level_1.toml @@ -0,0 +1,5 @@ +[DEFAULT] +x = "level_1" + +[test_1] +[test_2] diff --git a/testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_2.ini b/testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_2.ini new file mode 100644 index 0000000000..ada6a510d7 --- /dev/null +++ b/testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_2.ini @@ -0,0 +1,3 @@ +[parent:../level_1.ini] + +[test_2] diff --git a/testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_2.toml b/testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_2.toml new file mode 100644 index 0000000000..5e78db3b2e --- /dev/null +++ b/testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_2.toml @@ -0,0 +1,3 @@ +["parent:../level_1.ini"] + +[test_2] diff --git a/testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_3/level_3.ini b/testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_3/level_3.ini new file mode 100644 index 0000000000..2edd647fcc --- /dev/null +++ b/testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_3/level_3.ini @@ -0,0 +1,3 @@ +[parent:../level_2.ini] + +[test_3] diff --git a/testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_3/level_3.toml b/testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_3/level_3.toml new file mode 100644 index 0000000000..ff3e0a466a --- /dev/null +++ b/testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_3/level_3.toml @@ -0,0 +1,3 @@ +["parent:../level_2.ini"] + +[test_3] diff --git a/testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_3/level_3_default.ini b/testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_3/level_3_default.ini new file mode 100644 index 0000000000..d6aae60ae1 --- /dev/null +++ b/testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_3/level_3_default.ini @@ -0,0 +1,6 @@ +[parent:../level_2.ini] + +[DEFAULT] +x = level_3 + +[test_3] diff --git a/testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_3/level_3_default.toml b/testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_3/level_3_default.toml new file mode 100644 index 0000000000..786139d888 --- /dev/null +++ b/testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_3/level_3_default.toml @@ -0,0 +1,6 @@ +["parent:../level_2.ini"] + +[DEFAULT] +x = "level_3" + +[test_3] diff --git a/testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_3/test_3 b/testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_3/test_3 new file mode 100644 index 0000000000..f5de587529 --- /dev/null +++ b/testing/mozbase/manifestparser/tests/parent/level_1/level_2/level_3/test_3 @@ -0,0 +1 @@ +# dummy spot for "test_3" test diff --git a/testing/mozbase/manifestparser/tests/parent/level_1/level_2/test_2 b/testing/mozbase/manifestparser/tests/parent/level_1/level_2/test_2 new file mode 100644 index 0000000000..5b77e04f31 --- /dev/null +++ b/testing/mozbase/manifestparser/tests/parent/level_1/level_2/test_2 @@ -0,0 +1 @@ +# dummy spot for "test_2" test diff --git a/testing/mozbase/manifestparser/tests/parent/level_1/test_1 b/testing/mozbase/manifestparser/tests/parent/level_1/test_1 new file mode 100644 index 0000000000..dccbf04e4d --- /dev/null +++ b/testing/mozbase/manifestparser/tests/parent/level_1/test_1 @@ -0,0 +1 @@ +# dummy spot for "test_1" test diff --git a/testing/mozbase/manifestparser/tests/parent/root/dummy b/testing/mozbase/manifestparser/tests/parent/root/dummy new file mode 100644 index 0000000000..e69de29bb2 diff --git a/testing/mozbase/manifestparser/tests/parse-error.toml b/testing/mozbase/manifestparser/tests/parse-error.toml new file mode 100644 index 0000000000..93b2bad268 --- /dev/null +++ b/testing/mozbase/manifestparser/tests/parse-error.toml @@ -0,0 +1 @@ +xyz = "123" diff --git a/testing/mozbase/manifestparser/tests/path-example.toml b/testing/mozbase/manifestparser/tests/path-example.toml new file mode 100644 index 0000000000..fcd4967082 --- /dev/null +++ b/testing/mozbase/manifestparser/tests/path-example.toml @@ -0,0 +1,2 @@ +[foo] +path = "fleem" diff --git a/testing/mozbase/manifestparser/tests/relative-path.toml b/testing/mozbase/manifestparser/tests/relative-path.toml new file mode 100644 index 0000000000..032f699fd3 --- /dev/null +++ b/testing/mozbase/manifestparser/tests/relative-path.toml @@ -0,0 +1,5 @@ +[foo] +path = "../fleem" + +[bar] +path = "../testsSIBLING/example" diff --git a/testing/mozbase/manifestparser/tests/subsuite.toml b/testing/mozbase/manifestparser/tests/subsuite.toml new file mode 100644 index 0000000000..1fc81cf837 --- /dev/null +++ b/testing/mozbase/manifestparser/tests/subsuite.toml @@ -0,0 +1,13 @@ +[test1] +subsuite='bar,foo=="bar"' # this has a comment + +[test2] +subsuite='bar,foo=="bar"' + +[test3] +subsuite='baz' + +[test4] +[test5] +[test6] +subsuite='bar,foo=="szy" || foo=="bar"' diff --git a/testing/mozbase/manifestparser/tests/test_chunking.py b/testing/mozbase/manifestparser/tests/test_chunking.py new file mode 100644 index 0000000000..87b30fa6c7 --- /dev/null +++ b/testing/mozbase/manifestparser/tests/test_chunking.py @@ -0,0 +1,308 @@ +#!/usr/bin/env python + +import os +import random +from collections import defaultdict +from itertools import chain +from unittest import TestCase + +import mozunit +from manifestparser.filters import chunk_by_dir, chunk_by_runtime, chunk_by_slice + +here = os.path.dirname(os.path.abspath(__file__)) + + +class ChunkBySlice(TestCase): + """Test chunking related filters""" + + def generate_tests(self, num, disabled=None): + disabled = disabled or [] + tests = [] + for i in range(num): + test = {"name": "test%i" % i} + if i in disabled: + test["disabled"] = "" + tests.append(test) + return tests + + def run_all_combos(self, num_tests, disabled=None): + tests = self.generate_tests(num_tests, disabled=disabled) + + for total in range(1, num_tests + 1): + res = [] + res_disabled = [] + for chunk in range(1, total + 1): + f = chunk_by_slice(chunk, total) + res.append(list(f(tests, {}))) + if disabled: + f.disabled = True + res_disabled.append(list(f(tests, {}))) + + lengths = [len([t for t in c if "disabled" not in t]) for c in res] + # the chunk with the most tests should have at most one more test + # than the chunk with the least tests + self.assertLessEqual(max(lengths) - min(lengths), 1) + + # chaining all chunks back together should equal the original list + # of tests + self.assertEqual(list(chain.from_iterable(res)), list(tests)) + + if disabled: + lengths = [len(c) for c in res_disabled] + self.assertLessEqual(max(lengths) - min(lengths), 1) + self.assertEqual(list(chain.from_iterable(res_disabled)), list(tests)) + + def test_chunk_by_slice(self): + chunk = chunk_by_slice(1, 1) + self.assertEqual(list(chunk([], {})), []) + + self.run_all_combos(num_tests=1) + self.run_all_combos(num_tests=10, disabled=[1, 2]) + + num_tests = 67 + disabled = list(i for i in range(num_tests) if i % 4 == 0) + self.run_all_combos(num_tests=num_tests, disabled=disabled) + + def test_two_times_more_chunks_than_tests(self): + # test case for bug 1182817 + tests = self.generate_tests(5) + + total_chunks = 10 + for i in range(1, total_chunks + 1): + # ensure IndexError is not raised + chunk_by_slice(i, total_chunks)(tests, {}) + + +class ChunkByDir(TestCase): + """Test chunking related filters""" + + def generate_tests(self, dirs): + """ + :param dirs: dict of the form, + { : } + """ + i = 0 + for d, num in dirs.items(): + for _ in range(num): + i += 1 + name = "test%i" % i + test = {"name": name, "relpath": os.path.join(d, name)} + yield test + + def run_all_combos(self, dirs): + tests = list(self.generate_tests(dirs)) + + deepest = max(len(t["relpath"].split(os.sep)) - 1 for t in tests) + for depth in range(1, deepest + 1): + + def num_groups(tests): + unique = set() + for rp in [t["relpath"] for t in tests]: + p = rp.split(os.sep) + p = p[: min(depth, len(p) - 1)] + unique.add(os.sep.join(p)) + return len(unique) + + for total in range(1, num_groups(tests) + 1): + res = [] + for this in range(1, total + 1): + f = chunk_by_dir(this, total, depth) + res.append(list(f(tests, {}))) + + lengths = list(map(num_groups, res)) + # the chunk with the most dirs should have at most one more + # dir than the chunk with the least dirs + self.assertLessEqual(max(lengths) - min(lengths), 1) + + all_chunks = list(chain.from_iterable(res)) + # chunk_by_dir will mess up order, but chained chunks should + # contain all of the original tests and be the same length + self.assertEqual(len(all_chunks), len(tests)) + for t in tests: + self.assertIn(t, all_chunks) + + def test_chunk_by_dir(self): + chunk = chunk_by_dir(1, 1, 1) + self.assertEqual(list(chunk([], {})), []) + + dirs = { + "a": 2, + } + self.run_all_combos(dirs) + + dirs = { + "": 1, + "foo": 1, + "bar": 0, + "/foobar": 1, + } + self.run_all_combos(dirs) + + dirs = { + "a": 1, + "b": 1, + "a/b": 2, + "a/c": 1, + } + self.run_all_combos(dirs) + + dirs = { + "a": 5, + "a/b": 4, + "a/b/c": 7, + "a/b/c/d": 1, + "a/b/c/e": 3, + "b/c": 2, + "b/d": 5, + "b/d/e": 6, + "c": 8, + "c/d/e/f/g/h/i/j/k/l": 5, + "c/d/e/f/g/i/j/k/l/m/n": 2, + "c/e": 1, + } + self.run_all_combos(dirs) + + +class ChunkByRuntime(TestCase): + """Test chunking related filters""" + + def generate_tests(self, dirs): + """ + :param dirs: dict of the form, + { : } + """ + i = 0 + for d, num in dirs.items(): + for _ in range(num): + i += 1 + name = "test%i" % i + manifest = os.path.join(d, "manifest.toml") + test = { + "name": name, + "relpath": os.path.join(d, name), + "manifest": manifest, + "manifest_relpath": manifest, + } + yield test + + def get_runtimes(self, tests): + runtimes = defaultdict(int) + for test in tests: + runtimes[test["manifest_relpath"]] += random.randint(0, 100) + return runtimes + + def chunk_by_round_robin(self, tests, total, runtimes): + tests_by_manifest = [] + for manifest, runtime in runtimes.items(): + mtests = [t for t in tests if t["manifest_relpath"] == manifest] + tests_by_manifest.append((runtime, mtests)) + tests_by_manifest.sort(key=lambda x: x[0], reverse=False) + + chunks = [[] for i in range(total)] + d = 1 # direction + i = 0 + for runtime, batch in tests_by_manifest: + chunks[i].extend(batch) + + # "draft" style (last pick goes first in the next round) + if (i == 0 and d == -1) or (i == total - 1 and d == 1): + d = -d + else: + i += d + + # make sure this test algorithm is valid + all_chunks = list(chain.from_iterable(chunks)) + self.assertEqual(len(all_chunks), len(tests)) + for t in tests: + self.assertIn(t, all_chunks) + return chunks + + def run_all_combos(self, dirs): + tests = list(self.generate_tests(dirs)) + runtimes = self.get_runtimes(tests) + + for total in range(1, len(dirs) + 1): + chunks = [] + for this in range(1, total + 1): + f = chunk_by_runtime(this, total, runtimes) + ret = list(f(tests, {})) + chunks.append(ret) + + # chunk_by_runtime will mess up order, but chained chunks should + # contain all of the original tests and be the same length + all_chunks = list(chain.from_iterable(chunks)) + self.assertEqual(len(all_chunks), len(tests)) + for t in tests: + self.assertIn(t, all_chunks) + + # calculate delta between slowest and fastest chunks + def runtime_delta(chunks): + totals = [] + for chunk in chunks: + manifests = set([t["manifest_relpath"] for t in chunk]) + total = sum(runtimes[m] for m in manifests) + totals.append(total) + return max(totals) - min(totals) + + delta = runtime_delta(chunks) + + # redo the chunking a second time using a round robin style + # algorithm + chunks = self.chunk_by_round_robin(tests, total, runtimes) + # sanity check the round robin algorithm + all_chunks = list(chain.from_iterable(chunks)) + self.assertEqual(len(all_chunks), len(tests)) + for t in tests: + self.assertIn(t, all_chunks) + + # since chunks will never have exactly equal runtimes, it's hard + # to tell if they were chunked optimally. Make sure it at least + # beats a naive round robin approach. + self.assertLessEqual(delta, runtime_delta(chunks)) + + def test_chunk_by_runtime(self): + random.seed(42) + + chunk = chunk_by_runtime(1, 1, {}) + self.assertEqual(list(chunk([], {})), []) + + dirs = { + "a": 2, + } + self.run_all_combos(dirs) + + dirs = { + "": 1, + "foo": 1, + "bar": 0, + "/foobar": 1, + } + self.run_all_combos(dirs) + + dirs = { + "a": 1, + "b": 1, + "a/b": 2, + "a/c": 1, + } + self.run_all_combos(dirs) + + dirs = { + "a": 5, + "a/b": 4, + "a/b/c": 7, + "a/b/c/d": 1, + "a/b/c/e": 3, + "b/c": 2, + "b/d": 5, + "b/d/e": 6, + "c": 8, + "c/d/e/f/g/h/i/j/k/l": 5, + "c/d/e/f/g/i/j/k/l/m/n": 2, + "c/e": 1, + } + self.run_all_combos(dirs) + + +if __name__ == "__main__": + mozunit.main() diff --git a/testing/mozbase/manifestparser/tests/test_convert_directory.py b/testing/mozbase/manifestparser/tests/test_convert_directory.py new file mode 100755 index 0000000000..cebb804ec1 --- /dev/null +++ b/testing/mozbase/manifestparser/tests/test_convert_directory.py @@ -0,0 +1,277 @@ +#!/usr/bin/env python + +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import shutil +import tempfile +import unittest + +import mozunit +from manifestparser import ManifestParser, convert + +here = os.path.dirname(os.path.abspath(__file__)) + +# In some cases tempfile.mkdtemp() may returns a path which contains +# symlinks. Some tests here will then break, as the manifestparser.convert +# function returns paths that does not contains symlinks. +# +# Workaround is to use the following function, if absolute path of temp dir +# must be compared. + + +def create_realpath_tempdir(): + """ + Create a tempdir without symlinks. + """ + return os.path.realpath(tempfile.mkdtemp()) + + +class TestDirectoryConversion(unittest.TestCase): + """test conversion of a directory tree to a manifest structure""" + + def create_stub(self, directory=None): + """stub out a directory with files in it""" + + files = ("foo", "bar", "fleem") + if directory is None: + directory = create_realpath_tempdir() + for i in files: + open(os.path.join(directory, i), "w").write(i) + subdir = os.path.join(directory, "subdir") + os.mkdir(subdir) + open(os.path.join(subdir, "subfile"), "w").write("baz") + return directory + + def test_directory_to_manifest(self): + """ + Test our ability to convert a static directory structure to a + manifest. + """ + + # create a stub directory + stub = self.create_stub() + try: + stub = stub.replace(os.path.sep, "/") + self.assertTrue(os.path.exists(stub) and os.path.isdir(stub)) + + # Make a manifest for it + manifest = convert([stub]) + out_tmpl = """[%(stub)s/bar] + +[%(stub)s/fleem] + +[%(stub)s/foo] + +[%(stub)s/subdir/subfile] + +""" # noqa + self.assertEqual(str(manifest), out_tmpl % dict(stub=stub)) + except BaseException: + raise + finally: + shutil.rmtree(stub) # cleanup + + def test_convert_directory_manifests_in_place(self): + """ + keep the manifests in place + """ + + stub = self.create_stub() + try: + ManifestParser.populate_directory_manifests([stub], filename="manifest.ini") + self.assertEqual( + sorted(os.listdir(stub)), + ["bar", "fleem", "foo", "manifest.ini", "subdir"], + ) + parser = ManifestParser() + parser.read(os.path.join(stub, "manifest.ini")) + self.assertEqual( + [i["name"] for i in parser.tests], ["subfile", "bar", "fleem", "foo"] + ) + parser = ManifestParser() + parser.read(os.path.join(stub, "subdir", "manifest.ini")) + self.assertEqual(len(parser.tests), 1) + self.assertEqual(parser.tests[0]["name"], "subfile") + except BaseException: + raise + finally: + shutil.rmtree(stub) + + def test_convert_directory_manifests_in_place_toml(self): + """ + keep the manifests in place (TOML) + """ + + stub = self.create_stub() + try: + ManifestParser.populate_directory_manifests([stub], filename="manifest.ini") + self.assertEqual( + sorted(os.listdir(stub)), + ["bar", "fleem", "foo", "manifest.ini", "subdir"], + ) + parser = ManifestParser(use_toml=True) + parser.read(os.path.join(stub, "manifest.ini")) + self.assertEqual( + [i["name"] for i in parser.tests], ["subfile", "bar", "fleem", "foo"] + ) + parser = ManifestParser(use_toml=True) + parser.read(os.path.join(stub, "subdir", "manifest.ini")) + self.assertEqual(len(parser.tests), 1) + self.assertEqual(parser.tests[0]["name"], "subfile") + except BaseException: + raise + finally: + shutil.rmtree(stub) + + def test_manifest_ignore(self): + """test manifest `ignore` parameter for ignoring directories""" + + stub = self.create_stub() + try: + ManifestParser.populate_directory_manifests( + [stub], filename="manifest.ini", ignore=("subdir",) + ) + parser = ManifestParser(use_toml=False) + parser.read(os.path.join(stub, "manifest.ini")) + self.assertEqual([i["name"] for i in parser.tests], ["bar", "fleem", "foo"]) + self.assertFalse( + os.path.exists(os.path.join(stub, "subdir", "manifest.ini")) + ) + except BaseException: + raise + finally: + shutil.rmtree(stub) + + def test_manifest_ignore_toml(self): + """test manifest `ignore` parameter for ignoring directories (TOML)""" + + stub = self.create_stub() + try: + ManifestParser.populate_directory_manifests( + [stub], filename="manifest.ini", ignore=("subdir",) + ) + parser = ManifestParser(use_toml=True) + parser.read(os.path.join(stub, "manifest.ini")) + self.assertEqual([i["name"] for i in parser.tests], ["bar", "fleem", "foo"]) + self.assertFalse( + os.path.exists(os.path.join(stub, "subdir", "manifest.ini")) + ) + except BaseException: + raise + finally: + shutil.rmtree(stub) + + def test_pattern(self): + """test directory -> manifest with a file pattern""" + + stub = self.create_stub() + try: + parser = convert([stub], pattern="f*", relative_to=stub) + self.assertEqual([i["name"] for i in parser.tests], ["fleem", "foo"]) + + # test multiple patterns + parser = convert([stub], pattern=("f*", "s*"), relative_to=stub) + self.assertEqual( + [i["name"] for i in parser.tests], ["fleem", "foo", "subdir/subfile"] + ) + except BaseException: + raise + finally: + shutil.rmtree(stub) + + def test_update(self): + """ + Test our ability to update tests from a manifest and a directory of + files + """ + + # boilerplate + tempdir = create_realpath_tempdir() + for i in range(10): + open(os.path.join(tempdir, str(i)), "w").write(str(i)) + + # otherwise empty directory with a manifest file + newtempdir = create_realpath_tempdir() + manifest_file = os.path.join(newtempdir, "manifest.ini") + manifest_contents = str(convert([tempdir], relative_to=tempdir)) + with open(manifest_file, "w") as f: + f.write(manifest_contents) + + # get the manifest + manifest = ManifestParser(manifests=(manifest_file,), use_toml=False) + + # All of the tests are initially missing: + paths = [str(i) for i in range(10)] + self.assertEqual([i["name"] for i in manifest.missing()], paths) + + # But then we copy one over: + self.assertEqual(manifest.get("name", name="1"), ["1"]) + manifest.update(tempdir, name="1") + self.assertEqual(sorted(os.listdir(newtempdir)), ["1", "manifest.ini"]) + + # Update that one file and copy all the "tests": + open(os.path.join(tempdir, "1"), "w").write("secret door") + manifest.update(tempdir) + self.assertEqual( + sorted(os.listdir(newtempdir)), + ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "manifest.ini"], + ) + self.assertEqual( + open(os.path.join(newtempdir, "1")).read().strip(), "secret door" + ) + + # clean up: + shutil.rmtree(tempdir) + shutil.rmtree(newtempdir) + + def test_update_toml(self): + """ + Test our ability to update tests from a manifest and a directory of + files (TOML) + """ + + # boilerplate + tempdir = create_realpath_tempdir() + for i in range(10): + open(os.path.join(tempdir, str(i)), "w").write(str(i)) + + # otherwise empty directory with a manifest file + newtempdir = create_realpath_tempdir() + manifest_file = os.path.join(newtempdir, "manifest.toml") + manifest_contents = str(convert([tempdir], relative_to=tempdir)) + with open(manifest_file, "w") as f: + f.write(manifest_contents) + + # get the manifest + manifest = ManifestParser(manifests=(manifest_file,), use_toml=True) + + # All of the tests are initially missing: + paths = [str(i) for i in range(10)] + self.assertEqual([i["name"] for i in manifest.missing()], paths) + + # But then we copy one over: + self.assertEqual(manifest.get("name", name="1"), ["1"]) + manifest.update(tempdir, name="1") + self.assertEqual(sorted(os.listdir(newtempdir)), ["1", "manifest.toml"]) + + # Update that one file and copy all the "tests": + open(os.path.join(tempdir, "1"), "w").write("secret door") + manifest.update(tempdir) + self.assertEqual( + sorted(os.listdir(newtempdir)), + ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "manifest.toml"], + ) + self.assertEqual( + open(os.path.join(newtempdir, "1")).read().strip(), "secret door" + ) + + # clean up: + shutil.rmtree(tempdir) + shutil.rmtree(newtempdir) + + +if __name__ == "__main__": + mozunit.main() diff --git a/testing/mozbase/manifestparser/tests/test_convert_symlinks.py b/testing/mozbase/manifestparser/tests/test_convert_symlinks.py new file mode 100755 index 0000000000..61054c8b78 --- /dev/null +++ b/testing/mozbase/manifestparser/tests/test_convert_symlinks.py @@ -0,0 +1,137 @@ +#!/usr/bin/env python + +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import shutil +import tempfile +import unittest + +import mozunit +from manifestparser import ManifestParser, convert + + +class TestSymlinkConversion(unittest.TestCase): + """ + test conversion of a directory tree with symlinks to a manifest structure + """ + + def create_stub(self, directory=None): + """stub out a directory with files in it""" + + files = ("foo", "bar", "fleem") + if directory is None: + directory = tempfile.mkdtemp() + for i in files: + open(os.path.join(directory, i), "w").write(i) + subdir = os.path.join(directory, "subdir") + os.mkdir(subdir) + open(os.path.join(subdir, "subfile"), "w").write("baz") + return directory + + def test_relpath(self): + """test convert `relative_to` functionality""" + + oldcwd = os.getcwd() + stub = self.create_stub() + try: + # subdir with in-memory manifest + files = ["../bar", "../fleem", "../foo", "subfile"] + subdir = os.path.join(stub, "subdir") + os.chdir(subdir) + parser = convert([stub], relative_to=".") + self.assertEqual([i["name"] for i in parser.tests], files) + except BaseException: + raise + finally: + shutil.rmtree(stub) + os.chdir(oldcwd) + + @unittest.skipIf( + not hasattr(os, "symlink"), "symlinks unavailable on this platform" + ) + def test_relpath_symlink(self): + """ + Ensure `relative_to` works in a symlink. + Not available on windows. + """ + + oldcwd = os.getcwd() + workspace = tempfile.mkdtemp() + try: + tmpdir = os.path.join(workspace, "directory") + os.makedirs(tmpdir) + linkdir = os.path.join(workspace, "link") + os.symlink(tmpdir, linkdir) + self.create_stub(tmpdir) + + # subdir with in-memory manifest + files = ["../bar", "../fleem", "../foo", "subfile"] + subdir = os.path.join(linkdir, "subdir") + os.chdir(os.path.realpath(subdir)) + for directory in (tmpdir, linkdir): + parser = convert([directory], relative_to=".") + self.assertEqual([i["name"] for i in parser.tests], files) + finally: + shutil.rmtree(workspace) + os.chdir(oldcwd) + + # a more complicated example + oldcwd = os.getcwd() + workspace = tempfile.mkdtemp() + try: + tmpdir = os.path.join(workspace, "directory") + os.makedirs(tmpdir) + linkdir = os.path.join(workspace, "link") + os.symlink(tmpdir, linkdir) + self.create_stub(tmpdir) + files = ["../bar", "../fleem", "../foo", "subfile"] + subdir = os.path.join(linkdir, "subdir") + subsubdir = os.path.join(subdir, "sub") + os.makedirs(subsubdir) + linksubdir = os.path.join(linkdir, "linky") + linksubsubdir = os.path.join(subsubdir, "linky") + os.symlink(subdir, linksubdir) + os.symlink(subdir, linksubsubdir) + for dest in (subdir,): + os.chdir(dest) + for directory in (tmpdir, linkdir): + parser = convert([directory], relative_to=".") + self.assertEqual([i["name"] for i in parser.tests], files) + finally: + shutil.rmtree(workspace) + os.chdir(oldcwd) + + @unittest.skipIf( + not hasattr(os, "symlink"), "symlinks unavailable on this platform" + ) + def test_recursion_symlinks(self): + workspace = tempfile.mkdtemp() + self.addCleanup(shutil.rmtree, workspace) + + # create two dirs + os.makedirs(os.path.join(workspace, "dir1")) + os.makedirs(os.path.join(workspace, "dir2")) + + # create cyclical symlinks + os.symlink(os.path.join("..", "dir1"), os.path.join(workspace, "dir2", "ldir1")) + os.symlink(os.path.join("..", "dir2"), os.path.join(workspace, "dir1", "ldir2")) + + # create one file in each dir + open(os.path.join(workspace, "dir1", "f1.txt"), "a").close() + open(os.path.join(workspace, "dir1", "ldir2", "f2.txt"), "a").close() + + data = [] + + def callback(rootdirectory, directory, subdirs, files): + for f in files: + data.append(f) + + ManifestParser._walk_directories([workspace], callback) + self.assertEqual(sorted(data), ["f1.txt", "f2.txt"]) + + +if __name__ == "__main__": + mozunit.main() diff --git a/testing/mozbase/manifestparser/tests/test_default_overrides.py b/testing/mozbase/manifestparser/tests/test_default_overrides.py new file mode 100755 index 0000000000..8b648cf6cd --- /dev/null +++ b/testing/mozbase/manifestparser/tests/test_default_overrides.py @@ -0,0 +1,138 @@ +#!/usr/bin/env python + +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import re +import unittest + +import mozunit +from manifestparser import ManifestParser, combine_fields + +here = os.path.dirname(os.path.abspath(__file__)) + + +def deepstrip(txt): + "Collapses all repeated blanks to one blank, and strips" + return re.sub(r" +", " ", txt).strip() + + +class TestDefaultSkipif(unittest.TestCase): + """Tests applying a skip-if condition in [DEFAULT] and || with the value for the test""" + + def test_defaults_toml(self): + default = os.path.join(here, "default-skipif.toml") + parser = ManifestParser(manifests=(default,), use_toml=True) + for test in parser.tests: + if test["name"] == "test1": + self.assertEqual( + deepstrip(test["skip-if"]), "os == 'win' && debug\ndebug" + ) + elif test["name"] == "test2": + self.assertEqual( + deepstrip(test["skip-if"]), "os == 'win' && debug\nos == 'linux'" + ) + elif test["name"] == "test3": + self.assertEqual( + deepstrip(test["skip-if"]), "os == 'win' && debug\nos == 'win'" + ) + elif test["name"] == "test4": + self.assertEqual( + deepstrip(test["skip-if"]), + "os == 'win' && debug\nos == 'win' && debug", + ) + elif test["name"] == "test5": + self.assertEqual(deepstrip(test["skip-if"]), "os == 'win' && debug") + elif test["name"] == "test6": + self.assertEqual( + deepstrip(test["skip-if"]), "os == 'win' && debug\ndebug" + ) + + +class TestDefaultSupportFiles(unittest.TestCase): + """Tests combining support-files field in [DEFAULT] with the value for a test""" + + def test_defaults_toml(self): + default = os.path.join(here, "default-suppfiles.toml") + parser = ManifestParser(manifests=(default,), use_toml=True) + expected_supp_files = { + "test7": "foo.js", + "test8": "foo.js bar.js", + "test9": "foo.js", + } + for test in parser.tests: + expected = expected_supp_files[test["name"]] + self.assertEqual(test["support-files"], expected) + + +class TestOmitDefaults(unittest.TestCase): + """Tests passing omit-defaults prevents defaults from propagating to definitions.""" + + def test_defaults_toml(self): + manifests = ( + os.path.join(here, "default-suppfiles.toml"), + os.path.join(here, "default-skipif.toml"), + ) + parser = ManifestParser( + manifests=manifests, handle_defaults=False, use_toml=True + ) + expected_supp_files = { + "test8": "bar.js", + } + expected_skip_ifs = { + "test1": "debug", + "test2": "os == 'linux'", + "test3": "os == 'win'", + "test4": "os == 'win' && debug", + "test6": "debug", + } + for test in parser.tests: + for field, expectations in ( + ("support-files", expected_supp_files), + ("skip-if", expected_skip_ifs), + ): + expected = expectations.get(test["name"]) + if not expected: + self.assertNotIn(field, test) + else: + self.assertEqual(test[field].strip(), expected) + + expected_defaults = { + os.path.join(here, "default-suppfiles.toml"): { + "support-files": "foo.js", + }, + os.path.join(here, "default-skipif.toml"): { + "skip-if": "os == 'win' && debug", + }, + } + for path, defaults in expected_defaults.items(): + self.assertIn(path, parser.manifest_defaults) + actual_defaults = parser.manifest_defaults[path] + for key, value in defaults.items(): + self.assertIn(key, actual_defaults) + self.assertEqual(value, actual_defaults[key].strip()) + + +class TestSubsuiteDefaults(unittest.TestCase): + """Test that subsuites are handled correctly when managing defaults + outside of the manifest parser.""" + + def test_subsuite_defaults_toml(self): + manifest = os.path.join(here, "default-subsuite.toml") + parser = ManifestParser( + manifests=(manifest,), handle_defaults=False, use_toml=True + ) + expected_subsuites = { + "test1": "baz", + "test2": "foo", + } + defaults = parser.manifest_defaults[manifest] + for test in parser.tests: + value = combine_fields(defaults, test) + self.assertEqual(expected_subsuites[value["name"]], value["subsuite"]) + + +if __name__ == "__main__": + mozunit.main() diff --git a/testing/mozbase/manifestparser/tests/test_expressionparser.py b/testing/mozbase/manifestparser/tests/test_expressionparser.py new file mode 100755 index 0000000000..2d0eb1be07 --- /dev/null +++ b/testing/mozbase/manifestparser/tests/test_expressionparser.py @@ -0,0 +1,154 @@ +#!/usr/bin/env python + +import unittest + +import mozunit +from manifestparser import parse + + +class ExpressionParserTest(unittest.TestCase): + """Test the conditional expression parser.""" + + def test_basic(self): + self.assertEqual(parse("1"), 1) + self.assertEqual(parse("100"), 100) + self.assertEqual(parse("true"), True) + self.assertEqual(parse("false"), False) + self.assertEqual("", parse('""')) + self.assertEqual(parse('"foo bar"'), "foo bar") + self.assertEqual(parse("'foo bar'"), "foo bar") + self.assertEqual(parse("foo", foo=1), 1) + self.assertEqual(parse("bar", bar=True), True) + self.assertEqual(parse("abc123", abc123="xyz"), "xyz") + + def test_equality(self): + self.assertTrue(parse("true == true")) + self.assertTrue(parse("false == false")) + self.assertTrue(parse("1 == 1")) + self.assertTrue(parse("100 == 100")) + self.assertTrue(parse('"some text" == "some text"')) + self.assertTrue(parse("true != false")) + self.assertTrue(parse("1 != 2")) + self.assertTrue(parse('"text" != "other text"')) + self.assertTrue(parse("foo == true", foo=True)) + self.assertTrue(parse("foo == 1", foo=1)) + self.assertTrue(parse('foo == "bar"', foo="bar")) + self.assertTrue(parse("foo == bar", foo=True, bar=True)) + self.assertTrue(parse("true == foo", foo=True)) + self.assertTrue(parse("foo != true", foo=False)) + self.assertTrue(parse("foo != 2", foo=1)) + self.assertTrue(parse('foo != "bar"', foo="abc")) + self.assertTrue(parse("foo != bar", foo=True, bar=False)) + self.assertTrue(parse("true != foo", foo=False)) + self.assertTrue(parse("!false")) + + def test_conjunctures(self): + self.assertTrue(parse("true && true")) + self.assertTrue(parse("true || false")) + self.assertFalse(parse("false || false")) + self.assertFalse(parse("true && false")) + self.assertTrue(parse("true || false && false")) + + def test_parentheses(self): + self.assertTrue(parse("(true)")) + self.assertEqual(parse("(10)"), 10) + self.assertEqual(parse('("foo")'), "foo") + self.assertEqual(parse("(foo)", foo=1), 1) + self.assertTrue(parse("(true == true)"), True) + self.assertTrue(parse("(true != false)")) + self.assertTrue(parse("(true && true)")) + self.assertTrue(parse("(true || false)")) + self.assertTrue(parse("(true && true || false)")) + self.assertFalse(parse("(true || false) && false")) + self.assertTrue(parse("(true || false) && true")) + self.assertTrue(parse("true && (true || false)")) + self.assertTrue(parse("true && (true || false)")) + self.assertTrue(parse("(true && false) || (true && (true || false))")) + + def test_comments(self): + # comments in expressions work accidentally, via an implementation + # detail - the '#' character doesn't match any of the regular + # expressions we specify as tokens, and thus are ignored. + # However, having explicit tests for them means that should the + # implementation ever change, comments continue to work, even if that + # means a new implementation must handle them explicitly. + self.assertTrue(parse("true == true # it does!")) + self.assertTrue(parse("false == false # it does")) + self.assertTrue(parse("false != true # it doesnt")) + self.assertTrue(parse('"string with #" == "string with #" # really, it does')) + self.assertTrue( + parse('"string with #" != "string with # but not the same" # no match!') + ) + + def test_not(self): + """ + Test the ! operator. + """ + self.assertTrue(parse("!false")) + self.assertTrue(parse("!(false)")) + self.assertFalse(parse("!true")) + self.assertFalse(parse("!(true)")) + self.assertTrue(parse("!true || true)")) + self.assertTrue(parse("true || !true)")) + self.assertFalse(parse("!true && true")) + self.assertFalse(parse("true && !true")) + + def test_lesser_than(self): + """ + Test the < operator. + """ + self.assertTrue(parse("1 < 2")) + self.assertFalse(parse("3 < 2")) + self.assertTrue(parse("false || (1 < 2)")) + self.assertTrue(parse("1 < 2 && true")) + self.assertTrue(parse("true && 1 < 2")) + self.assertTrue(parse("!(5 < 1)")) + self.assertTrue(parse("'abc' < 'def'")) + self.assertFalse(parse("1 < 1")) + self.assertFalse(parse("'abc' < 'abc'")) + + def test_greater_than(self): + """ + Test the > operator. + """ + self.assertTrue(parse("2 > 1")) + self.assertFalse(parse("2 > 3")) + self.assertTrue(parse("false || (2 > 1)")) + self.assertTrue(parse("2 > 1 && true")) + self.assertTrue(parse("true && 2 > 1")) + self.assertTrue(parse("!(1 > 5)")) + self.assertTrue(parse("'def' > 'abc'")) + self.assertFalse(parse("1 > 1")) + self.assertFalse(parse("'abc' > 'abc'")) + + def test_lesser_or_equals_than(self): + """ + Test the <= operator. + """ + self.assertTrue(parse("1 <= 2")) + self.assertFalse(parse("3 <= 2")) + self.assertTrue(parse("false || (1 <= 2)")) + self.assertTrue(parse("1 < 2 && true")) + self.assertTrue(parse("true && 1 <= 2")) + self.assertTrue(parse("!(5 <= 1)")) + self.assertTrue(parse("'abc' <= 'def'")) + self.assertTrue(parse("1 <= 1")) + self.assertTrue(parse("'abc' <= 'abc'")) + + def test_greater_or_equals_than(self): + """ + Test the > operator. + """ + self.assertTrue(parse("2 >= 1")) + self.assertFalse(parse("2 >= 3")) + self.assertTrue(parse("false || (2 >= 1)")) + self.assertTrue(parse("2 >= 1 && true")) + self.assertTrue(parse("true && 2 >= 1")) + self.assertTrue(parse("!(1 >= 5)")) + self.assertTrue(parse("'def' >= 'abc'")) + self.assertTrue(parse("1 >= 1")) + self.assertTrue(parse("'abc' >= 'abc'")) + + +if __name__ == "__main__": + mozunit.main() diff --git a/testing/mozbase/manifestparser/tests/test_filters.py b/testing/mozbase/manifestparser/tests/test_filters.py new file mode 100644 index 0000000000..158741205e --- /dev/null +++ b/testing/mozbase/manifestparser/tests/test_filters.py @@ -0,0 +1,333 @@ +#!/usr/bin/env python + +import os +from copy import deepcopy +from pprint import pprint + +import mozpack.path as mozpath +import mozunit +import pytest +from manifestparser.filters import ( + enabled, + fail_if, + failures, + filterlist, + pathprefix, + run_if, + skip_if, + subsuite, + tags, +) + +here = os.path.dirname(os.path.abspath(__file__)) + + +def test_data_model(): + def foo(x, y): + return x + + def bar(x, y): + return x + + def baz(x, y): + return x + + fl = filterlist() + + fl.extend([foo, bar]) + assert len(fl) == 2 + assert foo in fl + + fl.append(baz) + assert fl[2] == baz + + fl.remove(baz) + assert baz not in fl + + item = fl.pop() + assert item == bar + + assert fl.index(foo) == 0 + + del fl[0] + assert foo not in fl + with pytest.raises(IndexError): + fl[0] + + +def test_add_non_callable_to_list(): + fl = filterlist() + with pytest.raises(TypeError): + fl.append("foo") + + +def test_add_duplicates_to_list(): + def foo(x, y): + return x + + def bar(x, y): + return x + + sub = subsuite("foo") + fl = filterlist([foo, bar, sub]) + assert len(fl) == 3 + assert fl[0] == foo + + with pytest.raises(ValueError): + fl.append(foo) + + with pytest.raises(ValueError): + fl.append(subsuite("bar")) + + +def test_add_two_tags_filters(): + tag1 = tags("foo") + tag2 = tags("bar") + fl = filterlist([tag1]) + + with pytest.raises(ValueError): + fl.append(tag1) + + fl.append(tag2) + assert len(fl) == 2 + + +def test_filters_run_in_order(): + def a(x, y): + return x + + def b(x, y): + return x + + def c(x, y): + return x + + def d(x, y): + return x + + def e(x, y): + return x + + def f(x, y): + return x + + fl = filterlist([a, b]) + fl.append(c) + fl.extend([d, e]) + fl += [f] + assert [i for i in fl] == [a, b, c, d, e, f] + + +@pytest.fixture(scope="module") +def create_tests(): + def inner(*paths, **defaults): + tests = [] + for p in paths: + path = p + if isinstance(path, tuple): + path, kwargs = path + else: + kwargs = {} + + path = mozpath.normpath(path) + manifest = kwargs.pop( + "manifest", + defaults.pop( + "manifest", mozpath.join(mozpath.dirname(path), "manifest.ini") + ), + ) + test = { + "name": mozpath.basename(path), + "path": "/root/" + path, + "relpath": path, + "manifest": "/root/" + manifest, + "manifest_relpath": manifest, + } + test.update(**defaults) + test.update(**kwargs) + tests.append(test) + + # dump tests to stdout for easier debugging on failure + print("The 'create_tests' fixture returned:") + pprint(tests, indent=2) + return tests + + return inner + + +@pytest.fixture +def tests(create_tests): + return create_tests( + "test0", + ("test1", {"skip-if": "foo == 'bar'\nintermittent&&!debug"}), + ("test2", {"run-if": "foo == 'bar'"}), + ("test3", {"fail-if": "foo == 'bar'"}), + ("test4", {"disabled": "some reason"}), + ("test5", {"subsuite": "baz"}), + ("test6", {"subsuite": "baz,foo == 'bar'"}), + ("test7", {"tags": "foo bar"}), + ( + "test8", + {"skip-if": "\nbaz\nfoo == 'bar'\nfoo == 'baz'\nintermittent && debug"}, + ), + ) + + +def test_skip_if(tests): + ref = deepcopy(tests) + tests = list(skip_if(tests, {})) + assert len(tests) == len(ref) + + tests = deepcopy(ref) + tests = list(skip_if(tests, {"foo": "bar"})) + assert "disabled" in tests[1] + assert "disabled" in tests[8] + + +def test_run_if(tests): + ref = deepcopy(tests) + tests = list(run_if(tests, {})) + assert "disabled" in tests[2] + + tests = deepcopy(ref) + tests = list(run_if(tests, {"foo": "bar"})) + assert "disabled" not in tests[2] + + +def test_fail_if(tests): + ref = deepcopy(tests) + tests = list(fail_if(tests, {})) + assert "expected" not in tests[3] + + tests = deepcopy(ref) + tests = list(fail_if(tests, {"foo": "bar"})) + assert tests[3]["expected"] == "fail" + + +def test_enabled(tests): + ref = deepcopy(tests) + tests = list(enabled(tests, {})) + assert ref[4] not in tests + + +def test_subsuite(tests): + sub1 = subsuite() + sub2 = subsuite("baz") + + ref = deepcopy(tests) + tests = list(sub1(tests, {})) + assert ref[5] not in tests + assert len(tests) == len(ref) - 1 + + tests = deepcopy(ref) + tests = list(sub2(tests, {})) + assert len(tests) == 1 + assert ref[5] in tests + + +def test_subsuite_condition(tests): + sub1 = subsuite() + sub2 = subsuite("baz") + + ref = deepcopy(tests) + + tests = list(sub1(tests, {"foo": "bar"})) + assert ref[5] not in tests + assert ref[6] not in tests + + tests = deepcopy(ref) + tests = list(sub2(tests, {"foo": "bar"})) + assert len(tests) == 2 + assert tests[0]["name"] == "test5" + assert tests[1]["name"] == "test6" + + +def test_tags(tests): + ftags1 = tags([]) + ftags2 = tags(["bar", "baz"]) + + ref = deepcopy(tests) + tests = list(ftags1(tests, {})) + assert len(tests) == 0 + + tests = deepcopy(ref) + tests = list(ftags2(tests, {})) + assert len(tests) == 1 + assert ref[7] in tests + + +def test_failures(tests): + ref = deepcopy(tests) + fail1 = failures("intermittent") + tests = list(fail1(tests, {"intermittent": True, "debug": True})) + assert len(tests) == 1 + + tests = deepcopy(ref) + tests = list(fail1(tests, {"intermittent": True})) + assert len(tests) == 1 + + tests = deepcopy(ref) + tests = list(fail1(tests, {})) + assert len(tests) == 0 + + tests = deepcopy(ref) + tests = list(fail1(tests, {"intermittent": False, "debug": True})) + assert len(tests) == 0 + + +def test_pathprefix(create_tests): + tests = create_tests( + "test0", + "subdir/test1", + "subdir/test2", + ("subdir/test3", {"manifest": "manifest.ini"}), + ( + "other/test4", + { + "manifest": "manifest-common.toml", + "ancestor_manifest": "other/manifest.ini", + }, + ), + ) + + def names(items): + return sorted(i["name"] for i in items) + + # relative directory + prefix = pathprefix("subdir") + filtered = prefix(tests, {}) + assert names(filtered) == ["test1", "test2", "test3"] + + # absolute directory + prefix = pathprefix(["/root/subdir"]) + filtered = prefix(tests, {}) + assert names(filtered) == ["test1", "test2", "test3"] + + # relative manifest + prefix = pathprefix(["subdir/manifest.ini"]) + filtered = prefix(tests, {}) + assert names(filtered) == ["test1", "test2"] + + # absolute manifest + prefix = pathprefix(["/root/subdir/manifest.ini"]) + filtered = prefix(tests, {}) + assert names(filtered) == ["test1", "test2"] + + # mixed test and manifest + prefix = pathprefix(["subdir/test2", "manifest.ini"]) + filtered = prefix(tests, {}) + assert names(filtered) == ["test0", "test2", "test3"] + + # relative ancestor manifest + prefix = pathprefix(["other/manifest.ini"]) + filtered = prefix(tests, {}) + assert names(filtered) == ["test4"] + + # absolute ancestor manifest + prefix = pathprefix(["/root/other/manifest.ini"]) + filtered = prefix(tests, {}) + assert names(filtered) == ["test4"] + + +if __name__ == "__main__": + mozunit.main() diff --git a/testing/mozbase/manifestparser/tests/test_manifestparser.py b/testing/mozbase/manifestparser/tests/test_manifestparser.py new file mode 100755 index 0000000000..f1774cfffb --- /dev/null +++ b/testing/mozbase/manifestparser/tests/test_manifestparser.py @@ -0,0 +1,627 @@ +#!/usr/bin/env python + +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import shutil +import tempfile +import unittest +from io import StringIO + +import manifestparser.toml +import mozunit +from manifestparser import ManifestParser +from tomlkit import TOMLDocument + +here = os.path.dirname(os.path.abspath(__file__)) + + +class TestManifestParser(unittest.TestCase): + """ + Test the manifest parser + + You must have manifestparser installed before running these tests. + Run ``python manifestparser.py setup develop`` with setuptools installed. + """ + + def test_sanity_toml(self): + """Ensure basic parser is sane (TOML)""" + + parser = ManifestParser(use_toml=True) + mozmill_example = os.path.join(here, "mozmill-example.toml") + parser.read(mozmill_example) + tests = parser.tests + self.assertEqual( + len(tests), len(open(mozmill_example).read().strip().splitlines()) + ) + + # Ensure that capitalization and order aren't an issue: + lines = ['["%s"]' % test["name"] for test in tests] + self.assertEqual(lines, open(mozmill_example).read().strip().splitlines()) + + # Show how you select subsets of tests: + mozmill_restart_example = os.path.join(here, "mozmill-restart-example.toml") + parser.read(mozmill_restart_example) + restart_tests = parser.get(type="restart") + self.assertTrue(len(restart_tests) < len(parser.tests)) + self.assertEqual( + len(restart_tests), len(parser.get(manifest=mozmill_restart_example)) + ) + self.assertFalse( + [ + test + for test in restart_tests + if test["manifest"] + != os.path.join(here, "mozmill-restart-example.toml") + ] + ) + self.assertEqual( + parser.get("name", tags=["foo"]), + [ + "restartTests/testExtensionInstallUninstall/test2.js", + "restartTests/testExtensionInstallUninstall/test1.js", + ], + ) + self.assertEqual( + parser.get("name", foo="bar"), + ["restartTests/testExtensionInstallUninstall/test2.js"], + ) + + def test_include(self): + """Illustrate how include works""" + + include_example = os.path.join(here, "include-example.toml") + parser = ManifestParser(manifests=(include_example,), use_toml=False) + + # All of the tests should be included, in order: + self.assertEqual(parser.get("name"), ["crash-handling", "fleem", "flowers"]) + self.assertEqual( + [ + (test["name"], os.path.basename(test["manifest"])) + for test in parser.tests + ], + [ + ("crash-handling", "bar.toml"), + ("fleem", "include-example.toml"), + ("flowers", "foo.toml"), + ], + ) + + # The including manifest is always reported as a part of the generated test object. + self.assertTrue( + all( + [ + t["ancestor_manifest"] == "include-example.toml" + for t in parser.tests + if t["name"] != "fleem" + ] + ) + ) + + # The manifests should be there too: + self.assertEqual(len(parser.manifests()), 3) + + # We already have the root directory: + self.assertEqual(here, parser.rootdir) + + # DEFAULT values should persist across includes, unless they're + # overwritten. In this example, include-example.toml sets foo=bar, but + # it's overridden to fleem in bar.toml + self.assertEqual(parser.get("name", foo="bar"), ["fleem", "flowers"]) + self.assertEqual(parser.get("name", foo="fleem"), ["crash-handling"]) + + # Passing parameters in the include section allows defining variables in + # the submodule scope: + self.assertEqual(parser.get("name", tags=["red"]), ["flowers"]) + + # However, this should be overridable from the DEFAULT section in the + # included file and that overridable via the key directly connected to + # the test: + self.assertEqual(parser.get(name="flowers")[0]["blue"], "ocean") + self.assertEqual(parser.get(name="flowers")[0]["yellow"], "submarine") + + # You can query multiple times if you need to: + flowers = parser.get(foo="bar") + self.assertEqual(len(flowers), 2) + + # Using the inverse flag should invert the set of tests returned: + self.assertEqual( + parser.get("name", inverse=True, tags=["red"]), ["crash-handling", "fleem"] + ) + + # All of the included tests actually exist: + self.assertEqual([i["name"] for i in parser.missing()], []) + + # Write the output to a manifest: + buffer = StringIO() + parser.write(fp=buffer, global_kwargs={"foo": "bar"}) + expected_output = """[DEFAULT] +foo = bar + +[fleem] + +[include/flowers] +blue = ocean +red = roses +yellow = submarine""" # noqa + + self.assertEqual(buffer.getvalue().strip(), expected_output) + + def test_include_toml(self): + """Illustrate how include works (TOML)""" + + include_example = os.path.join(here, "include-example.toml") + parser = ManifestParser(manifests=(include_example,), use_toml=True) + + # All of the tests should be included, in order: + self.assertEqual(parser.get("name"), ["crash-handling", "fleem", "flowers"]) + self.assertEqual( + [ + (test["name"], os.path.basename(test["manifest"])) + for test in parser.tests + ], + [ + ("crash-handling", "bar.toml"), + ("fleem", "include-example.toml"), + ("flowers", "foo.toml"), + ], + ) + + # The including manifest is always reported as a part of the generated test object. + self.assertTrue( + all( + [ + t["ancestor_manifest"] == "include-example.toml" + for t in parser.tests + if t["name"] != "fleem" + ] + ) + ) + + # The manifests should be there too: + self.assertEqual(len(parser.manifests()), 3) + + # We already have the root directory: + self.assertEqual(here, parser.rootdir) + + # DEFAULT values should persist across includes, unless they're + # overwritten. In this example, include-example.toml sets foo=bar, but + # it's overridden to fleem in bar.toml + self.assertEqual(parser.get("name", foo="bar"), ["fleem", "flowers"]) + self.assertEqual(parser.get("name", foo="fleem"), ["crash-handling"]) + + # Passing parameters in the include section allows defining variables in + # the submodule scope: + self.assertEqual(parser.get("name", tags=["red"]), ["flowers"]) + + # However, this should be overridable from the DEFAULT section in the + # included file and that overridable via the key directly connected to + # the test: + self.assertEqual(parser.get(name="flowers")[0]["blue"], "ocean") + self.assertEqual(parser.get(name="flowers")[0]["yellow"], "submarine") + + # You can query multiple times if you need to: + flowers = parser.get(foo="bar") + self.assertEqual(len(flowers), 2) + + # Using the inverse flag should invert the set of tests returned: + self.assertEqual( + parser.get("name", inverse=True, tags=["red"]), ["crash-handling", "fleem"] + ) + + # All of the included tests actually exist: + self.assertEqual([i["name"] for i in parser.missing()], []) + + # Write the output to a manifest: + buffer = StringIO() + parser.write(fp=buffer, global_kwargs={"foo": "bar"}) + expected_output = """[DEFAULT] +foo = bar + +[fleem] + +[include/flowers] +blue = ocean +red = roses +yellow = submarine""" # noqa + + self.assertEqual(buffer.getvalue().strip(), expected_output) + + def test_include_manifest_defaults_toml(self): + """ + Test that manifest_defaults and manifests() are correctly populated + when includes are used. (TOML) + """ + + include_example = os.path.join(here, "include-example.toml") + noinclude_example = os.path.join(here, "just-defaults.toml") + bar_path = os.path.join(here, "include", "bar.toml") + foo_path = os.path.join(here, "include", "foo.toml") + + parser = ManifestParser( + manifests=(include_example, noinclude_example), rootdir=here, use_toml=True + ) + + # Standalone manifests must be appear as-is. + self.assertTrue(include_example in parser.manifest_defaults) + self.assertTrue(noinclude_example in parser.manifest_defaults) + + # Included manifests must only appear together with the parent manifest + # that included the manifest. + self.assertFalse(bar_path in parser.manifest_defaults) + self.assertFalse(foo_path in parser.manifest_defaults) + ancestor_toml = os.path.relpath(include_example, parser.rootdir) + self.assertTrue((ancestor_toml, bar_path) in parser.manifest_defaults) + self.assertTrue((ancestor_toml, foo_path) in parser.manifest_defaults) + + # manifests() must only return file paths (strings). + manifests = parser.manifests() + self.assertEqual(len(manifests), 4) + self.assertIn(foo_path, manifests) + self.assertIn(bar_path, manifests) + self.assertIn(include_example, manifests) + self.assertIn(noinclude_example, manifests) + + def test_include_handle_defaults_False_toml(self): + """ + Test that manifest_defaults and manifests() are correct even when + handle_defaults is set to False. (TOML) + """ + manifest = os.path.join(here, "include-example.toml") + foo_path = os.path.join(here, "include", "foo.toml") + + parser = ManifestParser( + manifests=(manifest,), handle_defaults=False, rootdir=here, use_toml=True + ) + ancestor_ini = os.path.relpath(manifest, parser.rootdir) + + self.assertIn(manifest, parser.manifest_defaults) + self.assertNotIn(foo_path, parser.manifest_defaults) + self.assertIn((ancestor_ini, foo_path), parser.manifest_defaults) + self.assertEqual( + parser.manifest_defaults[manifest], + { + "foo": "bar", + "here": here, + }, + ) + self.assertEqual( + parser.manifest_defaults[(ancestor_ini, foo_path)], + { + "here": os.path.join(here, "include"), + "red": "roses", + "blue": "ocean", + "yellow": "daffodils", + }, + ) + + def test_include_repeated_toml(self): + """ + Test that repeatedly included manifests are independent of each other. (TOML) + """ + include_example = os.path.join(here, "include-example.toml") + included_foo = os.path.join(here, "include", "foo.toml") + + # In the expected output, blue and yellow have the values from foo.toml + # (ocean, submarine) instead of the ones from include-example.toml + # (violets, daffodils), because the defaults in the included file take + # precedence over the values from the parent. + include_output = """[include/crash-handling] +foo = fleem + +[fleem] +foo = bar + +[include/flowers] +blue = ocean +foo = bar +red = roses +yellow = submarine + +""" + included_output = """[include/flowers] +blue = ocean +yellow = submarine + +""" + + parser = ManifestParser( + manifests=(include_example, included_foo), rootdir=here, use_toml=True + ) + self.assertEqual( + parser.get("name"), ["crash-handling", "fleem", "flowers", "flowers"] + ) + self.assertEqual( + [ + (test["name"], os.path.basename(test["manifest"])) + for test in parser.tests + ], + [ + ("crash-handling", "bar.toml"), + ("fleem", "include-example.toml"), + ("flowers", "foo.toml"), + ("flowers", "foo.toml"), + ], + ) + self.check_included_repeat( + parser, + parser.tests[3], + parser.tests[2], + "%s%s" % (include_output, included_output), + True, + ) + + # Same tests, but with the load order of the manifests swapped. + parser = ManifestParser( + manifests=(included_foo, include_example), rootdir=here, use_toml=True + ) + self.assertEqual( + parser.get("name"), ["flowers", "crash-handling", "fleem", "flowers"] + ) + self.assertEqual( + [ + (test["name"], os.path.basename(test["manifest"])) + for test in parser.tests + ], + [ + ("flowers", "foo.toml"), + ("crash-handling", "bar.toml"), + ("fleem", "include-example.toml"), + ("flowers", "foo.toml"), + ], + ) + self.check_included_repeat( + parser, + parser.tests[0], + parser.tests[3], + "%s%s" % (included_output, include_output), + True, + ) + + def check_included_repeat( + self, parser, isolated_test, included_test, expected_output, use_toml=False + ): + if use_toml: + include_example_filename = "include-example.toml" + foo_filename = "foo.toml" + else: + include_example_filename = "include-example.toml" + foo_filename = "foo.toml" + include_example = os.path.join(here, include_example_filename) + included_foo = os.path.join(here, "include", foo_filename) + ancestor_ini = os.path.relpath(include_example, parser.rootdir) + manifest_default_key = (ancestor_ini, included_foo) + + self.assertFalse("ancestor_manifest" in isolated_test) + self.assertEqual(included_test["ancestor_manifest"], include_example_filename) + + self.assertTrue(include_example in parser.manifest_defaults) + self.assertTrue(included_foo in parser.manifest_defaults) + self.assertTrue(manifest_default_key in parser.manifest_defaults) + self.assertEqual( + parser.manifest_defaults[manifest_default_key], + { + "foo": "bar", + "here": os.path.join(here, "include"), + "red": "roses", + "blue": "ocean", + "yellow": "daffodils", + }, + ) + + buffer = StringIO() + parser.write(fp=buffer) + self.assertEqual(buffer.getvalue(), expected_output) + + def test_invalid_path_toml(self): + """ + Test invalid path should not throw when not strict (TOML) + """ + manifest = os.path.join(here, "include-invalid.toml") + ManifestParser(manifests=(manifest,), strict=False, use_toml=True) + + def test_copy_toml(self): + """Test our ability to copy a set of manifests (TOML)""" + + tempdir = tempfile.mkdtemp() + include_example = os.path.join(here, "include-example.toml") + manifest = ManifestParser(manifests=(include_example,), use_toml=True) + manifest.copy(tempdir) + self.assertEqual( + sorted(os.listdir(tempdir)), ["fleem", "include", "include-example.toml"] + ) + self.assertEqual( + sorted(os.listdir(os.path.join(tempdir, "include"))), + ["bar.toml", "crash-handling", "flowers", "foo.toml"], + ) + from_manifest = ManifestParser(manifests=(include_example,), use_toml=True) + to_manifest = os.path.join(tempdir, "include-example.toml") + to_manifest = ManifestParser(manifests=(to_manifest,), use_toml=True) + self.assertEqual(to_manifest.get("name"), from_manifest.get("name")) + shutil.rmtree(tempdir) + + def test_path_override_toml(self): + """You can override the path in the section too. + This shows that you can use a relative path""" + path_example = os.path.join(here, "path-example.toml") + manifest = ManifestParser(manifests=(path_example,), use_toml=True) + self.assertEqual(manifest.tests[0]["path"], os.path.join(here, "fleem")) + + def test_relative_path_toml(self): + """ + Relative test paths are correctly calculated. (TOML) + """ + relative_path = os.path.join(here, "relative-path.toml") + manifest = ManifestParser(manifests=(relative_path,), use_toml=True) + self.assertEqual( + manifest.tests[0]["path"], os.path.join(os.path.dirname(here), "fleem") + ) + self.assertEqual(manifest.tests[0]["relpath"], os.path.join("..", "fleem")) + self.assertEqual( + manifest.tests[1]["relpath"], os.path.join("..", "testsSIBLING", "example") + ) + + def test_path_from_fd(self): + """ + Test paths are left untouched when manifest is a file-like object. + """ + fp = StringIO("[section]\npath=fleem") + manifest = ManifestParser(manifests=(fp,)) + self.assertEqual(manifest.tests[0]["path"], "fleem") + self.assertEqual(manifest.tests[0]["relpath"], "fleem") + self.assertEqual(manifest.tests[0]["manifest"], None) + + def test_comments_toml(self): + """ + ensure comments work, see + https://bugzilla.mozilla.org/show_bug.cgi?id=813674 + (TOML) + """ + comment_example = os.path.join(here, "comment-example.toml") + manifest = ManifestParser(manifests=(comment_example,), use_toml=True) + self.assertEqual(len(manifest.tests), 8) + names = [i["name"] for i in manifest.tests] + self.assertFalse("test_0202_app_launch_apply_update_dirlocked.js" in names) + + def test_verifyDirectory_toml(self): + directory = os.path.join(here, "verifyDirectory") + + # correct manifest + manifest_path = os.path.join(directory, "verifyDirectory.toml") + manifest = ManifestParser(manifests=(manifest_path,), use_toml=True) + missing = manifest.verifyDirectory(directory, extensions=(".js",)) + self.assertEqual(missing, (set(), set())) + + # manifest is missing test_1.js + test_1 = os.path.join(directory, "test_1.js") + manifest_path = os.path.join(directory, "verifyDirectory_incomplete.toml") + manifest = ManifestParser(manifests=(manifest_path,), use_toml=True) + missing = manifest.verifyDirectory(directory, extensions=(".js",)) + self.assertEqual(missing, (set(), set([test_1]))) + + # filesystem is missing test_notappearinginthisfilm.js + missing_test = os.path.join(directory, "test_notappearinginthisfilm.js") + manifest_path = os.path.join(directory, "verifyDirectory_toocomplete.toml") + manifest = ManifestParser(manifests=(manifest_path,), use_toml=True) + missing = manifest.verifyDirectory(directory, extensions=(".js",)) + self.assertEqual(missing, (set([missing_test]), set())) + + def test_just_defaults_toml(self): + """Ensure a manifest with just a DEFAULT section exposes that data. (TOML)""" + + parser = ManifestParser(use_toml=True) + manifest = os.path.join(here, "just-defaults.toml") + parser.read(manifest) + self.assertEqual(len(parser.tests), 0) + self.assertTrue(manifest in parser.manifest_defaults) + self.assertEqual(parser.manifest_defaults[manifest]["foo"], "bar") + + def test_manifest_list_toml(self): + """ + Ensure a manifest with just a DEFAULT section still returns + itself from the manifests() method. (TOML) + """ + + parser = ManifestParser(use_toml=True) + manifest = os.path.join(here, "no-tests.toml") + parser.read(manifest) + self.assertEqual(len(parser.tests), 0) + self.assertTrue(len(parser.manifests()) == 1) + + def test_manifest_with_invalid_condition_toml(self): + """ + Ensure a skip-if or similar condition with an assignment in it + causes errors. (TOML) + """ + + parser = ManifestParser(use_toml=True) + manifest = os.path.join(here, "broken-skip-if.toml") + with self.assertRaisesRegex( + Exception, "Should not assign in skip-if condition for DEFAULT" + ): + parser.read(manifest) + + def test_parse_error_toml(self): + """ + Verify handling of a mal-formed TOML file + """ + + parser = ManifestParser(use_toml=True) + manifest = os.path.join(here, "parse-error.toml") + with self.assertRaisesRegex( + Exception, + r".*'str' object has no attribute 'keys'.*", + ): + parser.read(manifest) + + def test_parse_error_tomlkit(self): + """ + Verify handling of a mal-formed TOML file + """ + + parser = ManifestParser(use_toml=True, document=True) + manifest = os.path.join(here, "parse-error.toml") + with self.assertRaisesRegex( + Exception, + r".*'String' object has no attribute 'keys'.*", + ): + parser.read(manifest) + + def test_edit_manifest(self): + """ + Verify reading and writing TOML manifest with tomlkit + """ + parser = ManifestParser(use_toml=True, document=True) + before = "edit-manifest-before.toml" + before_path = os.path.join(here, before) + parser.read(before_path) + assert before_path in parser.source_documents + manifest = parser.source_documents[before_path] + assert manifest is not None + assert isinstance(manifest, TOMLDocument) + + filename = "bug_20.js" + assert filename in manifest + condition1a = "os == 'mac'" + bug = "Bug 20" + manifestparser.toml.add_skip_if(manifest, filename, condition1a, bug) + condition1b = "os == 'windows'" + manifestparser.toml.add_skip_if(manifest, filename, condition1b, bug) + + filename2 = "test_foo.html" + assert filename2 in manifest + condition2 = "os == 'mac' && debug" + manifestparser.toml.add_skip_if(manifest, filename2, condition2) + + filename3 = "test_bar.html" + assert filename3 in manifest + condition3a = "tsan" + bug3a = "Bug 444" + manifestparser.toml.add_skip_if(manifest, filename3, condition3a, bug3a) + condition3b = "os == 'linux'" # pre-existing, should be ignored + bug3b = "Bug 555" + manifestparser.toml.add_skip_if(manifest, filename3, condition3b, bug3b) + + filename4 = "bug_100.js" + assert filename4 in manifest + condition4 = "apple_catalina" + bug4 = "Bug 200" + manifestparser.toml.add_skip_if(manifest, filename4, condition4, bug4) + + filename5 = "bug_3.js" + assert filename5 in manifest + condition5 = "verify" + bug5 = "Bug 33333" + manifestparser.toml.add_skip_if(manifest, filename5, condition5, bug5) + + manifest_str = manifestparser.toml.alphabetize_toml_str(manifest) + after = "edit-manifest-after.toml" + after_path = os.path.join(here, after) + after_str = open(after_path, "r", encoding="utf-8").read() + assert manifest_str == after_str + + +if __name__ == "__main__": + mozunit.main() diff --git a/testing/mozbase/manifestparser/tests/test_read_ini.py b/testing/mozbase/manifestparser/tests/test_read_ini.py new file mode 100755 index 0000000000..0d5a3ee250 --- /dev/null +++ b/testing/mozbase/manifestparser/tests/test_read_ini.py @@ -0,0 +1,134 @@ +#!/usr/bin/env python + +""" +test .ini parsing + +ensure our .ini parser is doing what we want; to be deprecated for +python's standard ConfigParser when 2.7 is reality so OrderedDict +is the default: + +http://docs.python.org/2/library/configparser.html +""" + +from io import StringIO +from textwrap import dedent + +import mozunit +import pytest +from manifestparser import read_ini + + +@pytest.fixture(scope="module") +def parse_manifest(): + def inner(string, **kwargs): + buf = StringIO() + buf.write(dedent(string)) + buf.seek(0) + return read_ini(buf, **kwargs)[0] + + return inner + + +def test_inline_comments(parse_manifest): + result = parse_manifest( + """ + [test_felinicity.py] + kittens = true # This test requires kittens + cats = false#but not cats + """ + )[0][1] + + # make sure inline comments get stripped out, but comments without a space in front don't + assert result["kittens"] == "true" + assert result["cats"] == "false#but not cats" + + +def test_line_continuation(parse_manifest): + result = parse_manifest( + """ + [test_caninicity.py] + breeds = + sheppard + retriever + terrier + + [test_cats_and_dogs.py] + cats=yep + dogs= + yep + yep + birds=nope + fish=nope + """ + ) + assert result[0][1]["breeds"].split() == ["sheppard", "retriever", "terrier"] + assert result[1][1]["cats"] == "yep" + assert result[1][1]["dogs"].split() == ["yep", "yep"] + assert result[1][1]["birds"].split() == ["nope", "fish=nope"] + + +def test_dupes_error(parse_manifest): + dupes = """ + [test_dupes.py] + foo = bar + foo = baz + """ + with pytest.raises(AssertionError): + parse_manifest(dupes, strict=True) + + with pytest.raises(AssertionError): + parse_manifest(dupes, strict=False) + + +def test_defaults_handling(parse_manifest): + manifest = """ + [DEFAULT] + flower = rose + skip-if = true + + [test_defaults] + """ + + result = parse_manifest(manifest)[0][1] + assert result["flower"] == "rose" + assert result["skip-if"] == "true" + + result = parse_manifest( + manifest, + defaults={ + "flower": "tulip", + "colour": "pink", + "skip-if": "false", + }, + )[0][1] + assert result["flower"] == "rose" + assert result["colour"] == "pink" + assert result["skip-if"] == "false\ntrue" + + result = parse_manifest(manifest.replace("DEFAULT", "default"))[0][1] + assert result["flower"] == "rose" + assert result["skip-if"] == "true" + + +def test_multiline_skip(parse_manifest): + manifest = """ + [test_multiline_skip] + skip-if = + os == "mac" # bug 123 + os == "linux" && debug # bug 456 + """ + + result = parse_manifest(manifest)[0][1] + assert ( + result["skip-if"].replace("\r\n", "\n") + == dedent( + """ + os == "mac" + os == "linux" && debug + """ + ).rstrip() + ) + + +if __name__ == "__main__": + mozunit.main() diff --git a/testing/mozbase/manifestparser/tests/test_testmanifest.py b/testing/mozbase/manifestparser/tests/test_testmanifest.py new file mode 100644 index 0000000000..d51ec6c088 --- /dev/null +++ b/testing/mozbase/manifestparser/tests/test_testmanifest.py @@ -0,0 +1,125 @@ +#!/usr/bin/env python + +import os +import shutil +import tempfile +import unittest + +import mozunit +from manifestparser import ParseError, TestManifest +from manifestparser.filters import subsuite + +here = os.path.dirname(os.path.abspath(__file__)) + + +class TestTestManifest(unittest.TestCase): + """Test the Test Manifest""" + + def test_testmanifest_toml(self): + # Test filtering based on platform: + filter_example = os.path.join(here, "filter-example.toml") + manifest = TestManifest( + manifests=(filter_example,), strict=False, use_toml=True + ) + self.assertEqual( + [ + i["name"] + for i in manifest.active_tests(os="win", disabled=False, exists=False) + ], + ["windowstest", "fleem"], + ) + self.assertEqual( + [ + i["name"] + for i in manifest.active_tests(os="linux", disabled=False, exists=False) + ], + ["fleem", "linuxtest"], + ) + + # Look for existing tests. There is only one: + self.assertEqual([i["name"] for i in manifest.active_tests()], ["fleem"]) + + # You should be able to expect failures: + last = manifest.active_tests(exists=False, os="linux")[-1] + self.assertEqual(last["name"], "linuxtest") + self.assertEqual(last["expected"], "pass") + last = manifest.active_tests(exists=False, os="mac")[-1] + self.assertEqual(last["expected"], "fail") + + def test_missing_paths_toml(self): + """ + Test paths that don't exist raise an exception in strict mode. (TOML) + """ + tempdir = tempfile.mkdtemp() + + missing_path = os.path.join(here, "missing-path.toml") + manifest = TestManifest(manifests=(missing_path,), strict=True, use_toml=True) + self.assertRaises(IOError, manifest.active_tests) + self.assertRaises(IOError, manifest.copy, tempdir) + self.assertRaises(IOError, manifest.update, tempdir) + + shutil.rmtree(tempdir) + + def test_comments_toml(self): + """ + ensure comments work, see + https://bugzilla.mozilla.org/show_bug.cgi?id=813674 + (TOML) + """ + comment_example = os.path.join(here, "comment-example.toml") + manifest = TestManifest(manifests=(comment_example,), use_toml=True) + self.assertEqual(len(manifest.tests), 8) + names = [i["name"] for i in manifest.tests] + self.assertFalse("test_0202_app_launch_apply_update_dirlocked.js" in names) + + def test_manifest_subsuites_toml(self): + """ + test subsuites and conditional subsuites (TOML) + """ + relative_path = os.path.join(here, "subsuite.toml") + manifest = TestManifest(manifests=(relative_path,), use_toml=True) + info = {"foo": "bar"} + + # 6 tests total + tests = manifest.active_tests(exists=False, **info) + self.assertEqual(len(tests), 6) + + # only 3 tests for subsuite bar when foo==bar + tests = manifest.active_tests(exists=False, filters=[subsuite("bar")], **info) + self.assertEqual(len(tests), 3) + + # only 1 test for subsuite baz, regardless of conditions + other = {"something": "else"} + tests = manifest.active_tests(exists=False, filters=[subsuite("baz")], **info) + self.assertEqual(len(tests), 1) + tests = manifest.active_tests(exists=False, filters=[subsuite("baz")], **other) + self.assertEqual(len(tests), 1) + + # 4 tests match when the condition doesn't match (all tests except + # the unconditional subsuite) + info = {"foo": "blah"} + tests = manifest.active_tests(exists=False, filters=[subsuite()], **info) + self.assertEqual(len(tests), 5) + + # test for illegal subsuite value + manifest.tests[0]["subsuite"] = 'subsuite=bar,foo=="bar",type="nothing"' + with self.assertRaises(ParseError): + manifest.active_tests(exists=False, filters=[subsuite("foo")], **info) + + def test_none_and_empty_manifest_toml(self): + """ + Test TestManifest for None and empty manifest, see + https://bugzilla.mozilla.org/show_bug.cgi?id=1087682 + (TOML) + """ + none_manifest = TestManifest(manifests=None, strict=False, use_toml=True) + self.assertEqual(len(none_manifest.test_paths()), 0) + self.assertEqual(len(none_manifest.active_tests()), 0) + + empty_manifest = TestManifest(manifests=[], strict=False) + self.assertEqual(len(empty_manifest.test_paths()), 0) + self.assertEqual(len(empty_manifest.active_tests()), 0) + + +if __name__ == "__main__": + mozunit.main() diff --git a/testing/mozbase/manifestparser/tests/test_util.py b/testing/mozbase/manifestparser/tests/test_util.py new file mode 100644 index 0000000000..f2b37de43c --- /dev/null +++ b/testing/mozbase/manifestparser/tests/test_util.py @@ -0,0 +1,104 @@ +#!/usr/bin/env python + +""" +Test how our utility functions are working. +""" + +from io import StringIO +from textwrap import dedent + +import mozunit +import pytest +from manifestparser import read_ini +from manifestparser.util import evaluate_list_from_string + + +@pytest.fixture(scope="module") +def parse_manifest(): + def inner(string, **kwargs): + buf = StringIO() + buf.write(dedent(string)) + buf.seek(0) + return read_ini(buf, **kwargs)[0] + + return inner + + +@pytest.mark.parametrize( + "test_manifest, expected_list", + [ + [ + """ + [test_felinicity.py] + kittens = true + cats = + "I", + "Am", + "A", + "Cat", + """, + ["I", "Am", "A", "Cat"], + ], + [ + """ + [test_felinicity.py] + kittens = true + cats = + ["I", 1], + ["Am", 2], + ["A", 3], + ["Cat", 4], + """, + [ + ["I", 1], + ["Am", 2], + ["A", 3], + ["Cat", 4], + ], + ], + ], +) +def test_string_to_list_conversion(test_manifest, expected_list, parse_manifest): + parsed_tests = parse_manifest(test_manifest) + assert evaluate_list_from_string(parsed_tests[0][1]["cats"]) == expected_list + + +@pytest.mark.parametrize( + "test_manifest, failure", + [ + [ + """ + # This will fail since the elements are not enlosed in quotes + [test_felinicity.py] + kittens = true + cats = + I, + Am, + A, + Cat, + """, + ValueError, + ], + [ + """ + # This will fail since the syntax is incorrect + [test_felinicity.py] + kittens = true + cats = + ["I", 1, + ["Am", 2, + ["A", 3], + ["Cat", 4], + """, + SyntaxError, + ], + ], +) +def test_string_to_list_conversion_failures(test_manifest, failure, parse_manifest): + parsed_tests = parse_manifest(test_manifest) + with pytest.raises(failure): + evaluate_list_from_string(parsed_tests[0][1]["cats"]) + + +if __name__ == "__main__": + mozunit.main() diff --git a/testing/mozbase/manifestparser/tests/verifyDirectory/subdir/manifest.ini b/testing/mozbase/manifestparser/tests/verifyDirectory/subdir/manifest.ini new file mode 100644 index 0000000000..509ebd62ef --- /dev/null +++ b/testing/mozbase/manifestparser/tests/verifyDirectory/subdir/manifest.ini @@ -0,0 +1 @@ +[test_sub.js] diff --git a/testing/mozbase/manifestparser/tests/verifyDirectory/subdir/manifest.toml b/testing/mozbase/manifestparser/tests/verifyDirectory/subdir/manifest.toml new file mode 100644 index 0000000000..54519cc275 --- /dev/null +++ b/testing/mozbase/manifestparser/tests/verifyDirectory/subdir/manifest.toml @@ -0,0 +1 @@ +["test_sub.js"] diff --git a/testing/mozbase/manifestparser/tests/verifyDirectory/subdir/test_sub.js b/testing/mozbase/manifestparser/tests/verifyDirectory/subdir/test_sub.js new file mode 100644 index 0000000000..df48720d9d --- /dev/null +++ b/testing/mozbase/manifestparser/tests/verifyDirectory/subdir/test_sub.js @@ -0,0 +1 @@ +// test_sub.js diff --git a/testing/mozbase/manifestparser/tests/verifyDirectory/test_1.js b/testing/mozbase/manifestparser/tests/verifyDirectory/test_1.js new file mode 100644 index 0000000000..c5a966f46a --- /dev/null +++ b/testing/mozbase/manifestparser/tests/verifyDirectory/test_1.js @@ -0,0 +1 @@ +// test_1.js diff --git a/testing/mozbase/manifestparser/tests/verifyDirectory/test_2.js b/testing/mozbase/manifestparser/tests/verifyDirectory/test_2.js new file mode 100644 index 0000000000..d8648599c5 --- /dev/null +++ b/testing/mozbase/manifestparser/tests/verifyDirectory/test_2.js @@ -0,0 +1 @@ +// test_2.js diff --git a/testing/mozbase/manifestparser/tests/verifyDirectory/test_3.js b/testing/mozbase/manifestparser/tests/verifyDirectory/test_3.js new file mode 100644 index 0000000000..794bc2c341 --- /dev/null +++ b/testing/mozbase/manifestparser/tests/verifyDirectory/test_3.js @@ -0,0 +1 @@ +// test_3.js diff --git a/testing/mozbase/manifestparser/tests/verifyDirectory/verifyDirectory.ini b/testing/mozbase/manifestparser/tests/verifyDirectory/verifyDirectory.ini new file mode 100644 index 0000000000..10e0c79c81 --- /dev/null +++ b/testing/mozbase/manifestparser/tests/verifyDirectory/verifyDirectory.ini @@ -0,0 +1,4 @@ +[test_1.js] +[test_2.js] +[test_3.js] +[include:subdir/manifest.ini] diff --git a/testing/mozbase/manifestparser/tests/verifyDirectory/verifyDirectory.toml b/testing/mozbase/manifestparser/tests/verifyDirectory/verifyDirectory.toml new file mode 100644 index 0000000000..b18ec4e482 --- /dev/null +++ b/testing/mozbase/manifestparser/tests/verifyDirectory/verifyDirectory.toml @@ -0,0 +1,4 @@ +["test_1.js"] +["test_2.js"] +["test_3.js"] +["include:subdir/manifest.toml"] diff --git a/testing/mozbase/manifestparser/tests/verifyDirectory/verifyDirectory_incomplete.ini b/testing/mozbase/manifestparser/tests/verifyDirectory/verifyDirectory_incomplete.ini new file mode 100644 index 0000000000..cde526acfc --- /dev/null +++ b/testing/mozbase/manifestparser/tests/verifyDirectory/verifyDirectory_incomplete.ini @@ -0,0 +1,3 @@ +[test_2.js] +[test_3.js] +[include:subdir/manifest.ini] diff --git a/testing/mozbase/manifestparser/tests/verifyDirectory/verifyDirectory_incomplete.toml b/testing/mozbase/manifestparser/tests/verifyDirectory/verifyDirectory_incomplete.toml new file mode 100644 index 0000000000..d29be9b125 --- /dev/null +++ b/testing/mozbase/manifestparser/tests/verifyDirectory/verifyDirectory_incomplete.toml @@ -0,0 +1,3 @@ +["test_2.js"] +["test_3.js"] +["include:subdir/manifest.toml"] diff --git a/testing/mozbase/manifestparser/tests/verifyDirectory/verifyDirectory_toocomplete.ini b/testing/mozbase/manifestparser/tests/verifyDirectory/verifyDirectory_toocomplete.ini new file mode 100644 index 0000000000..88994ae26f --- /dev/null +++ b/testing/mozbase/manifestparser/tests/verifyDirectory/verifyDirectory_toocomplete.ini @@ -0,0 +1,5 @@ +[test_1.js] +[test_2.js] +[test_3.js] +[test_notappearinginthisfilm.js] +[include:subdir/manifest.ini] diff --git a/testing/mozbase/manifestparser/tests/verifyDirectory/verifyDirectory_toocomplete.toml b/testing/mozbase/manifestparser/tests/verifyDirectory/verifyDirectory_toocomplete.toml new file mode 100644 index 0000000000..4c3cd3bb37 --- /dev/null +++ b/testing/mozbase/manifestparser/tests/verifyDirectory/verifyDirectory_toocomplete.toml @@ -0,0 +1,5 @@ +["test_1.js"] +["test_2.js"] +["test_3.js"] +["test_notappearinginthisfilm.js"] +["include:subdir/manifest.toml"] diff --git a/testing/mozbase/moz.build b/testing/mozbase/moz.build new file mode 100644 index 0000000000..8d44dbc852 --- /dev/null +++ b/testing/mozbase/moz.build @@ -0,0 +1,70 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +PYTHON_UNITTEST_MANIFESTS += [ + "manifestparser/tests/manifest.toml", + "mozcrash/tests/manifest.toml", + "mozdebug/tests/manifest.toml", + "mozdevice/tests/manifest.toml", + "mozfile/tests/manifest.toml", + "mozgeckoprofiler/tests/manifest.toml", + "mozhttpd/tests/manifest.toml", + "mozinfo/tests/manifest.toml", + "mozinstall/tests/manifest.toml", + "mozleak/tests/manifest.toml", + "mozlog/tests/manifest.toml", + "moznetwork/tests/manifest.toml", + "mozpower/tests/manifest.toml", + "mozprocess/tests/manifest.toml", + "mozprofile/tests/manifest.toml", + "mozproxy/tests/manifest.toml", + "mozrunner/tests/manifest.toml", + "mozsystemmonitor/tests/manifest.toml", + "moztest/tests/manifest.toml", + "mozversion/tests/manifest.toml", +] + +python_modules = [ + "manifestparser", + "mozcrash", + "mozdebug", + "mozdevice", + "mozfile", + "mozgeckoprofiler", + "mozhttpd", + "mozinfo", + "mozinstall", + "mozleak", + "mozlog", + "moznetwork", + "mozpower", + "mozprocess", + "mozprofile", + "mozproxy", + "mozrunner", + "mozscreenshot", + "mozserve", + "mozsystemmonitor", + "moztest", + "mozversion", +] + +TEST_HARNESS_FILES.mozbase += [m + "/**" for m in python_modules] + +TEST_HARNESS_FILES.mozbase += [ + "setup_development.py", +] + +SPHINX_TREES["/mozbase"] = "docs" + +with Files("docs/**"): + SCHEDULES.exclusive = ["docs"] + +with Files("**"): + BUG_COMPONENT = ("Testing", "Mozbase") + +with Files("rust/**"): + BUG_COMPONENT = ("Testing", "Mozbase Rust") diff --git a/testing/mozbase/mozcrash/mozcrash/__init__.py b/testing/mozbase/mozcrash/mozcrash/__init__.py new file mode 100644 index 0000000000..a6dfab2b24 --- /dev/null +++ b/testing/mozbase/mozcrash/mozcrash/__init__.py @@ -0,0 +1,9 @@ +# flake8: noqa +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. +""" +mozcrash is a library for getting a stack trace out of processes that have crashed +and left behind a minidump file using the Google Breakpad library. +""" +from .mozcrash import * diff --git a/testing/mozbase/mozcrash/mozcrash/mozcrash.py b/testing/mozbase/mozcrash/mozcrash/mozcrash.py new file mode 100644 index 0000000000..0589600019 --- /dev/null +++ b/testing/mozbase/mozcrash/mozcrash/mozcrash.py @@ -0,0 +1,865 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +import glob +import json +import os +import re +import shutil +import signal +import subprocess +import sys +import tempfile +import traceback +import zipfile +from collections import namedtuple + +import mozfile +import mozinfo +import mozlog +import six +from redo import retriable + +__all__ = [ + "check_for_crashes", + "check_for_java_exception", + "kill_and_get_minidump", + "log_crashes", + "cleanup_pending_crash_reports", +] + + +StackInfo = namedtuple( + "StackInfo", + [ + "minidump_path", + "signature", + "stackwalk_stdout", + "stackwalk_stderr", + "stackwalk_retcode", + "stackwalk_errors", + "extra", + "process_type", + "pid", + "reason", + "java_stack", + ], +) + + +def get_logger(): + structured_logger = mozlog.get_default_logger("mozcrash") + if structured_logger is None: + return mozlog.unstructured.getLogger("mozcrash") + return structured_logger + + +def check_for_crashes( + dump_directory, + symbols_path=None, + stackwalk_binary=None, + dump_save_path=None, + test_name=None, + quiet=False, + keep=False, +): + """ + Print a stack trace for minidump files left behind by a crashing program. + + `dump_directory` will be searched for minidump files. Any minidump files found will + have `stackwalk_binary` executed on them, with `symbols_path` passed as an extra + argument. + + `stackwalk_binary` should be a path to the minidump-stackwalk binary. + If `stackwalk_binary` is not set, the MINIDUMP_STACKWALK environment variable + will be checked and its value used if it is not empty. If neither is set, then + ~/.mozbuild/minidump-stackwalk/minidump-stackwalk will be used. + + `symbols_path` should be a path to a directory containing symbols to use for + dump processing. This can either be a path to a directory containing Breakpad-format + symbols, or a URL to a zip file containing a set of symbols. + + If `dump_save_path` is set, it should be a path to a directory in which to copy minidump + files for safekeeping after a stack trace has been printed. If not set, the environment + variable MINIDUMP_SAVE_PATH will be checked and its value used if it is not empty. + + If `test_name` is set it will be used as the test name in log output. If not set the + filename of the calling function will be used. + + If `quiet` is set, no PROCESS-CRASH message will be printed to stdout if a + crash is detected. + + If `keep` is set, minidump files will not be removed after processing. + + Returns number of minidump files found. + """ + + # try to get the caller's filename if no test name is given + if test_name is None: + try: + test_name = os.path.basename(sys._getframe(1).f_code.co_filename) + except Exception: + test_name = "unknown" + + if not quiet: + print("mozcrash checking %s for minidumps..." % dump_directory) + + crash_info = CrashInfo( + dump_directory, + symbols_path, + dump_save_path=dump_save_path, + stackwalk_binary=stackwalk_binary, + keep=keep, + ) + + crash_count = 0 + for info in crash_info: + crash_count += 1 + output = None + if info.java_stack: + output = "PROCESS-CRASH | {name} | {stack}".format( + name=test_name, stack=info.java_stack + ) + elif not quiet: + stackwalk_output = ["Crash dump filename: {}".format(info.minidump_path)] + stackwalk_output.append("Process type: {}".format(info.process_type)) + stackwalk_output.append("Process pid: {}".format(info.pid or "unknown")) + if info.reason: + stackwalk_output.append("Mozilla crash reason: %s" % info.reason) + if info.stackwalk_stderr: + stackwalk_output.append("stderr from minidump-stackwalk:") + stackwalk_output.append(info.stackwalk_stderr) + elif info.stackwalk_stdout is not None: + stackwalk_output.append(info.stackwalk_stdout) + if info.stackwalk_retcode is not None and info.stackwalk_retcode != 0: + stackwalk_output.append( + "minidump-stackwalk exited with return code {}".format( + info.stackwalk_retcode + ) + ) + signature = info.signature if info.signature else "unknown top frame" + + output = "PROCESS-CRASH | {reason} [{sig}] | {name}\n{out}\n{err}".format( + reason=info.reason, + name=test_name, + sig=signature, + out="\n".join(stackwalk_output), + err="\n".join(info.stackwalk_errors), + ) + if output is not None: + if six.PY2 and sys.stdout.encoding != "UTF-8": + output = output.encode("utf-8") + print(output) + + return crash_count + + +def log_crashes( + logger, + dump_directory, + symbols_path, + process=None, + test=None, + stackwalk_binary=None, + dump_save_path=None, + quiet=False, +): + """Log crashes using a structured logger""" + crash_count = 0 + for info in CrashInfo( + dump_directory, + symbols_path, + dump_save_path=dump_save_path, + stackwalk_binary=stackwalk_binary, + ): + crash_count += 1 + if not quiet: + kwargs = info._asdict() + kwargs.pop("extra") + logger.crash(process=process, test=test, **kwargs) + return crash_count + + +# Function signatures of abort functions which should be ignored when +# determining the appropriate frame for the crash signature. +ABORT_SIGNATURES = ( + "Abort(char const*)", + "RustMozCrash", + "NS_DebugBreak", + # This signature is part of Rust panic stacks on some platforms. On + # others, it includes a template parameter containing "core::panic::" and + # is automatically filtered out by that pattern. + "core::ops::function::Fn::call", + "gkrust_shared::panic_hook", + "mozglue_static::panic_hook", + "intentional_panic", + "mozalloc_abort", + "mozalloc_abort(char const* const)", + "static void Abort(const char *)", + "std::sys_common::backtrace::__rust_end_short_backtrace", + "rust_begin_unwind", + # This started showing up when we enabled dumping inlined functions + "MOZ_Crash(char const*, int, char const*)", + " as core::ops::function::Fn>::call", +) + +# Similar to above, but matches if the substring appears anywhere in the +# frame's signature. +ABORT_SUBSTRINGS = ( + # On some platforms, Rust panic frames unfortunately appear without the + # std::panicking or core::panic namespaces. + "_panic_", + "core::panic::", + "core::panicking::", + "core::result::unwrap_failed", + "std::panicking::", +) + + +class CrashInfo(object): + """Get information about a crash based on dump files. + + Typical usage is to iterate over the CrashInfo object. This returns StackInfo + objects, one for each crash dump file that is found in the dump_directory. + + :param dump_directory: Path to search for minidump files + :param symbols_path: Path to a path to a directory containing symbols to use for + dump processing. This can either be a path to a directory + containing Breakpad-format symbols, or a URL to a zip file + containing a set of symbols. + :param dump_save_path: Path to which to save the dump files. If this is None, + the MINIDUMP_SAVE_PATH environment variable will be used. + :param stackwalk_binary: Path to the minidump-stackwalk binary. If this is None, + the MINIDUMP_STACKWALK environment variable will be used + as the path to the minidump binary. If neither is set, + then ~/.mozbuild/minidump-stackwalk/minidump-stackwalk + will be used.""" + + def __init__( + self, + dump_directory, + symbols_path, + dump_save_path=None, + stackwalk_binary=None, + keep=False, + ): + self.dump_directory = dump_directory + self.symbols_path = symbols_path + self.remove_symbols = False + self.brief_output = False + self.keep = keep + + if dump_save_path is None: + dump_save_path = os.environ.get("MINIDUMP_SAVE_PATH", None) + self.dump_save_path = dump_save_path + + if stackwalk_binary is None: + stackwalk_binary = os.environ.get("MINIDUMP_STACKWALK", None) + if stackwalk_binary is None: + # Location of minidump-stackwalk installed by "mach bootstrap". + executable_name = "minidump-stackwalk" + state_dir = os.environ.get( + "MOZBUILD_STATE_PATH", + os.path.expanduser(os.path.join("~", ".mozbuild")), + ) + stackwalk_binary = os.path.join(state_dir, executable_name, executable_name) + if mozinfo.isWin and not stackwalk_binary.endswith(".exe"): + stackwalk_binary += ".exe" + if os.path.exists(stackwalk_binary): + # If we reach this point, then we're almost certainly + # running on a local user's machine. Full minidump-stackwalk + # output is a bit noisy and verbose for that use-case, + # so we should use the --brief output. + self.brief_output = True + + self.stackwalk_binary = stackwalk_binary + + self.logger = get_logger() + self._dump_files = None + + @retriable(attempts=5, sleeptime=5, sleepscale=2) + def _get_symbols(self): + if not self.symbols_path: + self.logger.warning( + "No local symbols_path provided, only http symbols will be used." + ) + + # This updates self.symbols_path so we only download once. + if mozfile.is_url(self.symbols_path): + self.remove_symbols = True + self.logger.info("Downloading symbols from: %s" % self.symbols_path) + # Get the symbols and write them to a temporary zipfile + data = six.moves.urllib.request.urlopen(self.symbols_path) + with tempfile.TemporaryFile() as symbols_file: + symbols_file.write(data.read()) + # extract symbols to a temporary directory (which we'll delete after + # processing all crashes) + self.symbols_path = tempfile.mkdtemp() + with zipfile.ZipFile(symbols_file, "r") as zfile: + mozfile.extract_zip(zfile, self.symbols_path) + + @property + def dump_files(self): + """List of tuple (path_to_dump_file, path_to_extra_file) for each dump + file in self.dump_directory. The extra files may not exist.""" + if self._dump_files is None: + paths = [self.dump_directory] + if mozinfo.isWin: + # Add the hard-coded paths used for minidumps recorded by + # Windows Error Reporting in automation + paths += [ + "C:\\error-dumps\\", + "Z:\\error-dumps\\", + ] + self._dump_files = [] + for path in paths: + self._dump_files += [ + (minidump_path, os.path.splitext(minidump_path)[0] + ".extra") + for minidump_path in reversed( + sorted(glob.glob(os.path.join(path, "*.dmp"))) + ) + ] + max_dumps = 10 + if len(self._dump_files) > max_dumps: + self.logger.warning( + "Found %d dump files -- limited to %d!" + % (len(self._dump_files), max_dumps) + ) + del self._dump_files[max_dumps:] + + return self._dump_files + + @property + def has_dumps(self): + """Boolean indicating whether any crash dump files were found in the + current directory""" + return len(self.dump_files) > 0 + + def __iter__(self): + for path, extra in self.dump_files: + rv = self._process_dump_file(path, extra) + yield rv + + if self.remove_symbols: + mozfile.remove(self.symbols_path) + + def _process_dump_file(self, path, extra): + """Process a single dump file using self.stackwalk_binary, and return a + tuple containing properties of the crash dump. + + :param path: Path to the minidump file to analyse + :return: A StackInfo tuple with the fields:: + minidump_path: Path of the dump file + signature: The top frame of the stack trace, or None if it + could not be determined. + stackwalk_stdout: String of stdout data from stackwalk + stackwalk_stderr: String of stderr data from stackwalk or + None if it succeeded + stackwalk_retcode: Return code from stackwalk + stackwalk_errors: List of errors in human-readable form that prevented + stackwalk being launched. + reason: The reason provided by a MOZ_CRASH() invokation (optional) + java_stack: The stack trace of a Java exception (optional) + process_type: The type of process that crashed + pid: The PID of the crashed process + """ + self._get_symbols() + + errors = [] + signature = None + out = None + err = None + retcode = None + reason = None + java_stack = None + annotations = None + pid = None + process_type = "unknown" + if ( + self.stackwalk_binary + and os.path.exists(self.stackwalk_binary) + and os.access(self.stackwalk_binary, os.X_OK) + ): + # Now build up the actual command + command = [self.stackwalk_binary] + + # Fallback to the symbols server for unknown symbols on automation + # (mostly for system libraries). + if ( + "MOZ_AUTOMATION" in os.environ + or "MOZ_STACKWALK_SYMBOLS_SERVER" in os.environ + ): + command.append("--symbols-url=https://symbols.mozilla.org/") + + with tempfile.TemporaryDirectory() as json_dir: + crash_id = os.path.basename(path)[:-4] + json_output = os.path.join(json_dir, "{}.trace".format(crash_id)) + # Specify the kind of output + command.append("--cyborg={}".format(json_output)) + if self.brief_output: + command.append("--brief") + + # The minidump path and symbols_path values are positional and come last + # (in practice the CLI parsers are more permissive, but best not to + # unecessarily play with fire). + command.append(path) + + if self.symbols_path: + command.append(self.symbols_path) + + self.logger.info("Copy/paste: {}".format(" ".join(command))) + # run minidump-stackwalk + p = subprocess.Popen( + command, stdout=subprocess.PIPE, stderr=subprocess.PIPE + ) + (out, err) = p.communicate() + retcode = p.returncode + if six.PY3: + out = six.ensure_str(out) + err = six.ensure_str(err) + + if retcode == 0: + processed_crash = self._process_json_output(json_output) + signature = processed_crash.get("signature") + pid = processed_crash.get("pid") + + else: + if not self.stackwalk_binary: + errors.append( + "MINIDUMP_STACKWALK not set, can't process dump. Either set " + "MINIDUMP_STACKWALK or use mach bootstrap --no-system-changes " + "to install minidump-stackwalk." + ) + elif self.stackwalk_binary and not os.path.exists(self.stackwalk_binary): + errors.append( + "MINIDUMP_STACKWALK binary not found: %s. Use mach bootstrap " + "--no-system-changes to install minidump-stackwalk." + % self.stackwalk_binary + ) + elif not os.access(self.stackwalk_binary, os.X_OK): + errors.append("This user cannot execute the MINIDUMP_STACKWALK binary.") + + if os.path.exists(extra): + annotations = self._parse_extra_file(extra) + + if annotations: + reason = annotations.get("MozCrashReason") + java_stack = annotations.get("JavaStackTrace") + process_type = annotations.get("ProcessType") or "main" + + if self.dump_save_path: + self._save_dump_file(path, extra) + + if os.path.exists(path) and not self.keep: + mozfile.remove(path) + if os.path.exists(extra) and not self.keep: + mozfile.remove(extra) + + return StackInfo( + path, + signature, + out, + err, + retcode, + errors, + extra, + process_type, + pid, + reason, + java_stack, + ) + + def _process_json_output(self, json_path): + signature = None + pid = None + + try: + json_file = open(json_path, "r") + crash_json = json.load(json_file) + json_file.close() + + signature = self._generate_signature(crash_json) + pid = crash_json.get("pid") + + except Exception as e: + traceback.print_exc() + signature = "an error occurred while processing JSON output: {}".format(e) + + return { + "pid": pid, + "signature": signature, + } + + def _generate_signature(self, crash_json): + signature = None + + try: + crashing_thread = crash_json.get("crashing_thread") or {} + frames = crashing_thread.get("frames") or [] + + flattened_frames = [] + for frame in frames: + for inline in frame.get("inlines") or []: + flattened_frames.append(inline.get("function")) + + flattened_frames.append( + frame.get("function") + or "{} + {}".format(frame.get("module"), frame.get("module_offset")) + ) + + for func in flattened_frames: + if not func: + continue + + signature = "@ %s" % func + + if not ( + func in ABORT_SIGNATURES + or any(pat in func for pat in ABORT_SUBSTRINGS) + ): + break + except Exception as e: + traceback.print_exc() + signature = "an error occurred while generating the signature: {}".format(e) + + # Strip parameters from signature + if signature: + pmatch = re.search(r"(.*)\(.*\)", signature) + if pmatch: + signature = pmatch.group(1) + + return signature + + def _parse_extra_file(self, path): + with open(path) as file: + try: + return json.load(file) + except ValueError: + self.logger.warning(".extra file does not contain proper json") + return None + + def _save_dump_file(self, path, extra): + if os.path.isfile(self.dump_save_path): + os.unlink(self.dump_save_path) + if not os.path.isdir(self.dump_save_path): + try: + os.makedirs(self.dump_save_path) + except OSError: + pass + + shutil.move(path, self.dump_save_path) + self.logger.info( + "Saved minidump as {}".format( + os.path.join(self.dump_save_path, os.path.basename(path)) + ) + ) + + if os.path.isfile(extra): + shutil.move(extra, self.dump_save_path) + self.logger.info( + "Saved app info as {}".format( + os.path.join(self.dump_save_path, os.path.basename(extra)) + ) + ) + + +def check_for_java_exception(logcat, test_name=None, quiet=False): + """ + Print a summary of a fatal Java exception, if present in the provided + logcat output. + + Today, exceptions in geckoview are usually noted in the minidump .extra file, allowing + java exceptions to be reported by the "normal" minidump processing, like log_crashes(); + therefore, this function may be extraneous (but maintained for now, while exception + handling is evolving). + + Example: + PROCESS-CRASH | | java-exception java.lang.NullPointerException at org.mozilla.gecko.GeckoApp$21.run(GeckoApp.java:1833) # noqa + + `logcat` should be a list of strings. + + If `test_name` is set it will be used as the test name in log output. If not set the + filename of the calling function will be used. + + If `quiet` is set, no PROCESS-CRASH message will be printed to stdout if a + crash is detected. + + Returns True if a fatal Java exception was found, False otherwise. + """ + + # try to get the caller's filename if no test name is given + if test_name is None: + try: + test_name = os.path.basename(sys._getframe(1).f_code.co_filename) + except Exception: + test_name = "unknown" + + found_exception = False + + for i, line in enumerate(logcat): + # Logs will be of form: + # + # 01-30 20:15:41.937 E/GeckoAppShell( 1703): >>> REPORTING UNCAUGHT EXCEPTION FROM THREAD 9 ("GeckoBackgroundThread") # noqa + # 01-30 20:15:41.937 E/GeckoAppShell( 1703): java.lang.NullPointerException + # 01-30 20:15:41.937 E/GeckoAppShell( 1703): at org.mozilla.gecko.GeckoApp$21.run(GeckoApp.java:1833) # noqa + # 01-30 20:15:41.937 E/GeckoAppShell( 1703): at android.os.Handler.handleCallback(Handler.java:587) # noqa + if "REPORTING UNCAUGHT EXCEPTION" in line: + # Strip away the date, time, logcat tag and pid from the next two lines and + # concatenate the remainder to form a concise summary of the exception. + found_exception = True + if len(logcat) >= i + 3: + logre = re.compile(r".*\): \t?(.*)") + m = logre.search(logcat[i + 1]) + if m and m.group(1): + exception_type = m.group(1) + m = logre.search(logcat[i + 2]) + if m and m.group(1): + exception_location = m.group(1) + if not quiet: + output = ( + "PROCESS-CRASH | {name} | java-exception {type} {loc}".format( + name=test_name, type=exception_type, loc=exception_location + ) + ) + print(output.encode("utf-8")) + else: + print( + "Automation Error: java exception in logcat at line " + "{0} of {1}: {2}".format(i, len(logcat), line) + ) + break + + return found_exception + + +if mozinfo.isWin: + import ctypes + import uuid + + kernel32 = ctypes.windll.kernel32 + OpenProcess = kernel32.OpenProcess + CloseHandle = kernel32.CloseHandle + + def write_minidump(pid, dump_directory, utility_path): + """ + Write a minidump for a process. + + :param pid: PID of the process to write a minidump for. + :param dump_directory: Directory in which to write the minidump. + """ + PROCESS_QUERY_INFORMATION = 0x0400 + PROCESS_VM_READ = 0x0010 + GENERIC_READ = 0x80000000 + GENERIC_WRITE = 0x40000000 + CREATE_ALWAYS = 2 + FILE_ATTRIBUTE_NORMAL = 0x80 + INVALID_HANDLE_VALUE = -1 + + log = get_logger() + file_name = os.path.join(dump_directory, str(uuid.uuid4()) + ".dmp") + + if not os.path.exists(dump_directory): + # `kernal32.CreateFileW` can fail to create the dmp file if the dump + # directory was deleted or doesn't exist (error code 3). + os.makedirs(dump_directory) + + if mozinfo.info["bits"] != ctypes.sizeof(ctypes.c_voidp) * 8 and utility_path: + # We're not going to be able to write a minidump with ctypes if our + # python process was compiled for a different architecture than + # firefox, so we invoke the minidumpwriter utility program. + + minidumpwriter = os.path.normpath( + os.path.join(utility_path, "minidumpwriter.exe") + ) + log.info( + "Using {} to write a dump to {} for [{}]".format( + minidumpwriter, file_name, pid + ) + ) + if not os.path.exists(minidumpwriter): + log.error("minidumpwriter not found in {}".format(utility_path)) + return + + status = subprocess.Popen([minidumpwriter, str(pid), file_name]).wait() + if status: + log.error("minidumpwriter exited with status: %d" % status) + return + + log.info("Writing a dump to {} for [{}]".format(file_name, pid)) + + proc_handle = OpenProcess(PROCESS_QUERY_INFORMATION | PROCESS_VM_READ, 0, pid) + if not proc_handle: + err = kernel32.GetLastError() + log.warning("unable to get handle for pid %d: %d" % (pid, err)) + return + + if not isinstance(file_name, six.text_type): + # Convert to unicode explicitly so our path will be valid as input + # to CreateFileW + file_name = six.text_type(file_name, sys.getfilesystemencoding()) + + file_handle = kernel32.CreateFileW( + file_name, + GENERIC_READ | GENERIC_WRITE, + 0, + None, + CREATE_ALWAYS, + FILE_ATTRIBUTE_NORMAL, + None, + ) + if file_handle != INVALID_HANDLE_VALUE: + if not ctypes.windll.dbghelp.MiniDumpWriteDump( + proc_handle, + pid, + file_handle, + # Dump type - MiniDumpNormal + 0, + # Exception parameter + None, + # User stream parameter + None, + # Callback parameter + None, + ): + err = kernel32.GetLastError() + log.warning("unable to dump minidump file for pid %d: %d" % (pid, err)) + CloseHandle(file_handle) + else: + err = kernel32.GetLastError() + log.warning("unable to create minidump file for pid %d: %d" % (pid, err)) + CloseHandle(proc_handle) + + def kill_pid(pid): + """ + Terminate a process with extreme prejudice. + + :param pid: PID of the process to terminate. + """ + PROCESS_TERMINATE = 0x0001 + SYNCHRONIZE = 0x00100000 + WAIT_OBJECT_0 = 0x0 + WAIT_FAILED = -1 + logger = get_logger() + handle = OpenProcess(PROCESS_TERMINATE | SYNCHRONIZE, 0, pid) + if handle: + if kernel32.TerminateProcess(handle, 1): + # TerminateProcess is async; wait up to 30 seconds for process to + # actually terminate, then give up so that clients are not kept + # waiting indefinitely for hung processes. + status = kernel32.WaitForSingleObject(handle, 30000) + if status == WAIT_FAILED: + err = kernel32.GetLastError() + logger.warning( + "kill_pid(): wait failed (%d) terminating pid %d: error %d" + % (status, pid, err) + ) + elif status != WAIT_OBJECT_0: + logger.warning( + "kill_pid(): wait failed (%d) terminating pid %d" + % (status, pid) + ) + else: + err = kernel32.GetLastError() + logger.warning( + "kill_pid(): unable to terminate pid %d: %d" % (pid, err) + ) + CloseHandle(handle) + else: + err = kernel32.GetLastError() + logger.warning( + "kill_pid(): unable to get handle for pid %d: %d" % (pid, err) + ) + +else: + + def kill_pid(pid): + """ + Terminate a process with extreme prejudice. + + :param pid: PID of the process to terminate. + """ + os.kill(pid, signal.SIGKILL) + + +def kill_and_get_minidump(pid, dump_directory, utility_path=None): + """ + Attempt to kill a process and leave behind a minidump describing its + execution state. + + :param pid: The PID of the process to kill. + :param dump_directory: The directory where a minidump should be written on + Windows, where the dump will be written from outside the process. + + On Windows a dump will be written using the MiniDumpWriteDump function + from DbgHelp.dll. On Linux and OS X the process will be sent a SIGABRT + signal to trigger minidump writing via a Breakpad signal handler. On other + platforms the process will simply be killed via SIGKILL. + + If the process is hung in such a way that it cannot respond to SIGABRT + it may still be running after this function returns. In that case it + is the caller's responsibility to deal with killing it. + """ + needs_killing = True + if mozinfo.isWin: + write_minidump(pid, dump_directory, utility_path) + elif mozinfo.isLinux or mozinfo.isMac: + os.kill(pid, signal.SIGABRT) + needs_killing = False + if needs_killing: + kill_pid(pid) + + +def cleanup_pending_crash_reports(): + """ + Delete any pending crash reports. + + The presence of pending crash reports may be reported by the browser, + affecting test results; it is best to ensure that these are removed + before starting any browser tests. + + Firefox stores pending crash reports in "/Crash Reports". + If the browser is not running, it cannot provide , so this + code tries to anticipate its value. + + See dom/system/OSFileConstants.cpp for platform variations of . + """ + if mozinfo.isWin: + location = os.path.expanduser( + "~\\AppData\\Roaming\\Mozilla\\Firefox\\Crash Reports" + ) + elif mozinfo.isMac: + location = os.path.expanduser( + "~/Library/Application Support/firefox/Crash Reports" + ) + else: + location = os.path.expanduser("~/.mozilla/firefox/Crash Reports") + logger = get_logger() + if os.path.exists(location): + try: + mozfile.remove(location) + logger.info("Removed pending crash reports at '%s'" % location) + except Exception: + pass + + +if __name__ == "__main__": + import argparse + + parser = argparse.ArgumentParser() + parser.add_argument("--stackwalk-binary", "-b") + parser.add_argument("--dump-save-path", "-o") + parser.add_argument("--test-name", "-n") + parser.add_argument("--keep", action="store_true") + parser.add_argument("dump_directory") + parser.add_argument("symbols_path") + args = parser.parse_args() + + check_for_crashes( + args.dump_directory, + args.symbols_path, + stackwalk_binary=args.stackwalk_binary, + dump_save_path=args.dump_save_path, + test_name=args.test_name, + keep=args.keep, + ) diff --git a/testing/mozbase/mozcrash/setup.cfg b/testing/mozbase/mozcrash/setup.cfg new file mode 100644 index 0000000000..3c6e79cf31 --- /dev/null +++ b/testing/mozbase/mozcrash/setup.cfg @@ -0,0 +1,2 @@ +[bdist_wheel] +universal=1 diff --git a/testing/mozbase/mozcrash/setup.py b/testing/mozbase/mozcrash/setup.py new file mode 100644 index 0000000000..d67060149b --- /dev/null +++ b/testing/mozbase/mozcrash/setup.py @@ -0,0 +1,33 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +from setuptools import setup + +PACKAGE_NAME = "mozcrash" +PACKAGE_VERSION = "2.2.0" + +# dependencies +deps = ["mozfile >= 1.0", "mozlog >= 6.0"] + +setup( + name=PACKAGE_NAME, + version=PACKAGE_VERSION, + description="Library for printing stack traces from minidumps " + "left behind by crashed processes", + long_description="see https://firefox-source-docs.mozilla.org/mozbase/index.html", + classifiers=[ + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3.5", + ], + # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers + keywords="mozilla", + author="Mozilla Automation and Tools team", + author_email="tools@lists.mozilla.org", + url="https://wiki.mozilla.org/Auto-tools/Projects/Mozbase", + license="MPL", + packages=["mozcrash"], + include_package_data=True, + zip_safe=False, + install_requires=deps, +) diff --git a/testing/mozbase/mozcrash/tests/conftest.py b/testing/mozbase/mozcrash/tests/conftest.py new file mode 100644 index 0000000000..7b6dcea496 --- /dev/null +++ b/testing/mozbase/mozcrash/tests/conftest.py @@ -0,0 +1,127 @@ +# coding=UTF-8 + +import uuid + +import mozcrash +import pytest +from py._path.common import fspath + + +@pytest.fixture(scope="session") +def stackwalk(tmpdir_factory): + stackwalk = tmpdir_factory.mktemp("stackwalk_binary").join("stackwalk") + stackwalk.write("fake binary") + stackwalk.chmod(0o744) + return stackwalk + + +@pytest.fixture +def check_for_crashes(tmpdir, stackwalk, monkeypatch): + monkeypatch.delenv("MINIDUMP_SAVE_PATH", raising=False) + + def wrapper( + dump_directory=fspath(tmpdir), + symbols_path="symbols_path", + stackwalk_binary=fspath(stackwalk), + dump_save_path=None, + test_name=None, + quiet=True, + ): + return mozcrash.check_for_crashes( + dump_directory, + symbols_path, + stackwalk_binary, + dump_save_path, + test_name, + quiet, + ) + + return wrapper + + +@pytest.fixture +def check_for_java_exception(): + def wrapper(logcat=None, test_name=None, quiet=True): + return mozcrash.check_for_java_exception(logcat, test_name, quiet) + + return wrapper + + +def minidump_files(request, tmpdir): + files = [] + + for i in range(getattr(request, "param", 1)): + name = uuid.uuid4() + + dmp = tmpdir.join("{}.dmp".format(name)) + dmp.write("foo") + + extra = tmpdir.join("{}.extra".format(name)) + + extra.write_text( + """ +{ + "ContentSandboxLevel":"2", + "TelemetryEnvironment":"{🍪}", + "EMCheckCompatibility":"true", + "ProductName":"Firefox", + "ContentSandboxCapabilities":"119", + "TelemetryClientId":"", + "Vendor":"Mozilla", + "InstallTime":"1000000000", + "Theme":"classic/1.0", + "ReleaseChannel":"default", + "ServerURL":"https://crash-reports.mozilla.com", + "SafeMode":"0", + "ContentSandboxCapable":"1", + "useragent_locale":"en-US", + "Version":"55.0a1", + "BuildID":"20170512114708", + "ProductID":"{ec8030f7-c20a-464f-9b0e-13a3a9e97384}", + "MozCrashReason": "MOZ_CRASH()", + "TelemetryServerURL":"", + "DOMIPCEnabled":"1", + "Add-ons":"", + "CrashTime":"1494582646", + "UptimeTS":"14.9179586", + "ContentSandboxEnabled":"1", + "ProcessType":"content", + "StartupTime":"1000000000", + "URL":"about:home" +} + + """, + encoding="utf-8", + ) + + files.append({"dmp": dmp, "extra": extra}) + + return files + + +@pytest.fixture(name="minidump_files") +def minidump_files_fixture(request, tmpdir): + return minidump_files(request, tmpdir) + + +@pytest.fixture(autouse=True) +def mock_popen(monkeypatch): + """Generate a class that can mock subprocess.Popen. + + :param stdouts: Iterable that should return an iterable for the + stdout of each process in turn. + """ + + class MockPopen(object): + def __init__(self, args, *args_rest, **kwargs): + # all_popens.append(self) + self.args = args + self.returncode = 0 + + def communicate(self): + return ("Stackwalk command: {}".format(" ".join(self.args)), "") + + def wait(self): + return self.returncode + + monkeypatch.setattr(mozcrash.mozcrash.subprocess, "Popen", MockPopen) diff --git a/testing/mozbase/mozcrash/tests/manifest.toml b/testing/mozbase/mozcrash/tests/manifest.toml new file mode 100644 index 0000000000..20e31fbc9d --- /dev/null +++ b/testing/mozbase/mozcrash/tests/manifest.toml @@ -0,0 +1,12 @@ +[DEFAULT] +subsuite = "mozbase" + +["test_basic.py"] + +["test_java_exception.py"] + +["test_save_path.py"] + +["test_stackwalk.py"] + +["test_symbols_path.py"] diff --git a/testing/mozbase/mozcrash/tests/test_basic.py b/testing/mozbase/mozcrash/tests/test_basic.py new file mode 100644 index 0000000000..384aba62dc --- /dev/null +++ b/testing/mozbase/mozcrash/tests/test_basic.py @@ -0,0 +1,43 @@ +#!/usr/bin/env python +# coding=UTF-8 + +import mozunit +import pytest +from conftest import fspath + + +def test_no_dump_files(check_for_crashes): + """Test that check_for_crashes returns 0 if no dumps are present.""" + assert 0 == check_for_crashes() + + +@pytest.mark.parametrize("minidump_files", [3], indirect=True) +def test_dump_count(check_for_crashes, minidump_files): + """Test that check_for_crashes returns the number of crash dumps.""" + assert 3 == check_for_crashes() + + +def test_dump_directory_unicode(request, check_for_crashes, tmpdir, capsys): + """Test that check_for_crashes can handle unicode in dump_directory.""" + from conftest import minidump_files + + tmpdir = tmpdir.ensure("🍪", dir=1) + minidump_files = minidump_files(request, tmpdir) + + assert 1 == check_for_crashes(dump_directory=fspath(tmpdir), quiet=False) + + out, _ = capsys.readouterr() + assert fspath(minidump_files[0]["dmp"]) in out + assert "🍪" in out + + +def test_test_name_unicode(check_for_crashes, minidump_files, capsys): + """Test that check_for_crashes can handle unicode in dump_directory.""" + assert 1 == check_for_crashes(test_name="🍪", quiet=False) + + out, err = capsys.readouterr() + assert "| 🍪" in out + + +if __name__ == "__main__": + mozunit.main() diff --git a/testing/mozbase/mozcrash/tests/test_java_exception.py b/testing/mozbase/mozcrash/tests/test_java_exception.py new file mode 100644 index 0000000000..00c8d3c46a --- /dev/null +++ b/testing/mozbase/mozcrash/tests/test_java_exception.py @@ -0,0 +1,51 @@ +#!/usr/bin/env python +# coding=UTF-8 + +import mozunit +import pytest + + +@pytest.fixture +def test_log(): + return [ + "01-30 20:15:41.937 E/GeckoAppShell( 1703): >>> " + 'REPORTING UNCAUGHT EXCEPTION FROM THREAD 9 ("GeckoBackgroundThread")', + "01-30 20:15:41.937 E/GeckoAppShell( 1703): java.lang.NullPointerException", + "01-30 20:15:41.937 E/GeckoAppShell( 1703):" + " at org.mozilla.gecko.GeckoApp$21.run(GeckoApp.java:1833)", + "01-30 20:15:41.937 E/GeckoAppShell( 1703):" + " at android.os.Handler.handleCallback(Handler.java:587)", + ] + + +def test_uncaught_exception(check_for_java_exception, test_log): + """Test for an exception which should be caught.""" + assert 1 == check_for_java_exception(test_log) + + +def test_truncated_exception(check_for_java_exception, test_log): + """Test for an exception which should be caught which was truncated.""" + truncated_log = list(test_log) + truncated_log[0], truncated_log[1] = truncated_log[1], truncated_log[0] + + assert 1 == check_for_java_exception(truncated_log) + + +def test_unchecked_exception(check_for_java_exception, test_log): + """Test for an exception which should not be caught.""" + passable_log = list(test_log) + passable_log[0] = ( + "01-30 20:15:41.937 E/GeckoAppShell( 1703):" + ' >>> NOT-SO-BAD EXCEPTION FROM THREAD 9 ("GeckoBackgroundThread")' + ) + + assert 0 == check_for_java_exception(passable_log) + + +def test_test_name_unicode(check_for_java_exception, test_log): + """Test that check_for_crashes can handle unicode in dump_directory.""" + assert 1 == check_for_java_exception(test_log, test_name="🍪", quiet=False) + + +if __name__ == "__main__": + mozunit.main() diff --git a/testing/mozbase/mozcrash/tests/test_save_path.py b/testing/mozbase/mozcrash/tests/test_save_path.py new file mode 100644 index 0000000000..fad83ab71b --- /dev/null +++ b/testing/mozbase/mozcrash/tests/test_save_path.py @@ -0,0 +1,68 @@ +#!/usr/bin/env python + +import os + +import mozunit +import pytest +from conftest import fspath + + +def test_save_path_not_present(check_for_crashes, minidump_files, tmpdir): + """Test that dump_save_path works when the directory doesn't exist.""" + save_path = tmpdir.join("saved") + + assert 1 == check_for_crashes(dump_save_path=fspath(save_path)) + + assert save_path.join(minidump_files[0]["dmp"].basename).check() + assert save_path.join(minidump_files[0]["extra"].basename).check() + + +def test_save_path(check_for_crashes, minidump_files, tmpdir): + """Test that dump_save_path works.""" + save_path = tmpdir.mkdir("saved") + + assert 1 == check_for_crashes(dump_save_path=fspath(save_path)) + + assert save_path.join(minidump_files[0]["dmp"].basename).check() + assert save_path.join(minidump_files[0]["extra"].basename).check() + + +def test_save_path_isfile(check_for_crashes, minidump_files, tmpdir): + """Test that dump_save_path works when the path is a file and not a directory.""" + save_path = tmpdir.join("saved") + save_path.write("junk") + + assert 1 == check_for_crashes(dump_save_path=fspath(save_path)) + + assert save_path.join(minidump_files[0]["dmp"].basename).check() + assert save_path.join(minidump_files[0]["extra"].basename).check() + + +def test_save_path_envvar(check_for_crashes, minidump_files, tmpdir): + """Test that the MINDUMP_SAVE_PATH environment variable works.""" + save_path = tmpdir.mkdir("saved") + + os.environ["MINIDUMP_SAVE_PATH"] = fspath(save_path) + try: + assert 1 == check_for_crashes(dump_save_path=None) + finally: + del os.environ["MINIDUMP_SAVE_PATH"] + + assert save_path.join(minidump_files[0]["dmp"].basename).check() + assert save_path.join(minidump_files[0]["extra"].basename).check() + + +@pytest.mark.parametrize("minidump_files", [3], indirect=True) +def test_save_multiple(check_for_crashes, minidump_files, tmpdir): + """Test that all minidumps are saved.""" + save_path = tmpdir.mkdir("saved") + + assert 3 == check_for_crashes(dump_save_path=fspath(save_path)) + + for i in range(3): + assert save_path.join(minidump_files[i]["dmp"].basename).check() + assert save_path.join(minidump_files[i]["extra"].basename).check() + + +if __name__ == "__main__": + mozunit.main() diff --git a/testing/mozbase/mozcrash/tests/test_stackwalk.py b/testing/mozbase/mozcrash/tests/test_stackwalk.py new file mode 100644 index 0000000000..3292e4fdf1 --- /dev/null +++ b/testing/mozbase/mozcrash/tests/test_stackwalk.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python +# coding=UTF-8 + +import os + +import mozunit +from conftest import fspath + + +def test_stackwalk_not_found(check_for_crashes, minidump_files, tmpdir, capsys): + """Test that check_for_crashes can handle unicode in dump_directory.""" + stackwalk = tmpdir.join("stackwalk") + + assert 1 == check_for_crashes(stackwalk_binary=fspath(stackwalk), quiet=False) + + out, _ = capsys.readouterr() + assert "MINIDUMP_STACKWALK binary not found" in out + + +def test_stackwalk_envvar(check_for_crashes, minidump_files, stackwalk): + """Test that check_for_crashes uses the MINIDUMP_STACKWALK environment var.""" + os.environ["MINIDUMP_STACKWALK"] = fspath(stackwalk) + try: + assert 1 == check_for_crashes(stackwalk_binary=None) + finally: + del os.environ["MINIDUMP_STACKWALK"] + + +def test_stackwalk_unicode(check_for_crashes, minidump_files, tmpdir, capsys): + """Test that check_for_crashes can handle unicode in dump_directory.""" + stackwalk = tmpdir.mkdir("🍪").join("stackwalk") + stackwalk.write("fake binary") + stackwalk.chmod(0o744) + + assert 1 == check_for_crashes(stackwalk_binary=fspath(stackwalk), quiet=False) + + out, err = capsys.readouterr() + assert fspath(stackwalk) in out + + +if __name__ == "__main__": + mozunit.main() diff --git a/testing/mozbase/mozcrash/tests/test_symbols_path.py b/testing/mozbase/mozcrash/tests/test_symbols_path.py new file mode 100644 index 0000000000..644302c947 --- /dev/null +++ b/testing/mozbase/mozcrash/tests/test_symbols_path.py @@ -0,0 +1,97 @@ +#!/usr/bin/env python +# coding=UTF-8 + +import zipfile + +import mozhttpd +import mozunit +from conftest import fspath +from six import BytesIO +from six.moves.urllib.parse import urlunsplit + + +def test_symbols_path_not_present(check_for_crashes, minidump_files): + """Test that no symbols path let mozcrash try to find the symbols.""" + assert 1 == check_for_crashes(symbols_path=None) + + +def test_symbols_path_unicode(check_for_crashes, minidump_files, tmpdir, capsys): + """Test that check_for_crashes can handle unicode in dump_directory.""" + symbols_path = tmpdir.mkdir("🍪") + + assert 1 == check_for_crashes(symbols_path=fspath(symbols_path), quiet=False) + + out, _ = capsys.readouterr() + assert fspath(symbols_path) in out + + +def test_symbols_path_url(check_for_crashes, minidump_files): + """Test that passing a URL as symbols_path correctly fetches the URL.""" + data = {"retrieved": False} + + def make_zipfile(): + zdata = BytesIO() + z = zipfile.ZipFile(zdata, "w") + z.writestr("symbols.txt", "abc/xyz") + z.close() + return zdata.getvalue() + + def get_symbols(req): + data["retrieved"] = True + + headers = {} + return (200, headers, make_zipfile()) + + httpd = mozhttpd.MozHttpd( + port=0, + urlhandlers=[{"method": "GET", "path": "/symbols", "function": get_symbols}], + ) + httpd.start() + symbol_url = urlunsplit( + ("http", "%s:%d" % httpd.httpd.server_address, "/symbols", "", "") + ) + + assert 1 == check_for_crashes(symbols_path=symbol_url) + assert data["retrieved"] + + +def test_symbols_retry(check_for_crashes, minidump_files): + """Test that passing a URL as symbols_path succeeds on retry after temporary HTTP failure.""" + data = {"retrieved": False} + get_symbols_calls = 0 + + def make_zipfile(): + zdata = BytesIO() + z = zipfile.ZipFile(zdata, "w") + z.writestr("symbols.txt", "abc/xyz") + z.close() + return zdata.getvalue() + + def get_symbols(req): + nonlocal get_symbols_calls + data["retrieved"] = True + if get_symbols_calls > 0: + ret = 200 + else: + ret = 504 + get_symbols_calls += 1 + + headers = {} + return (ret, headers, make_zipfile()) + + httpd = mozhttpd.MozHttpd( + port=0, + urlhandlers=[{"method": "GET", "path": "/symbols", "function": get_symbols}], + ) + httpd.start() + symbol_url = urlunsplit( + ("http", "%s:%d" % httpd.httpd.server_address, "/symbols", "", "") + ) + + assert 1 == check_for_crashes(symbols_path=symbol_url) + assert data["retrieved"] + assert 2 == get_symbols_calls + + +if __name__ == "__main__": + mozunit.main() diff --git a/testing/mozbase/mozdebug/mozdebug/__init__.py b/testing/mozbase/mozdebug/mozdebug/__init__.py new file mode 100644 index 0000000000..bb8711e2c4 --- /dev/null +++ b/testing/mozbase/mozdebug/mozdebug/__init__.py @@ -0,0 +1,30 @@ +# flake8: noqa +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +""" +This module contains a set of function to gather information about the +debugging capabilities of the platform. It allows to look for a specific +debugger or to query the system for a compatible/default debugger. + +The following simple example looks for the default debugger on the +current platform and launches a debugger process with the correct +debugger-specific arguments: + +:: + + import mozdebug + + debugger = mozdebug.get_default_debugger_name() + debuggerInfo = mozdebug.get_debugger_info(debugger) + + debuggeePath = "toDebug" + + processArgs = [self.debuggerInfo.path] + self.debuggerInfo.args + processArgs.append(debuggeePath) + + run_process(args, ...) + +""" +from .mozdebug import * diff --git a/testing/mozbase/mozdebug/mozdebug/mozdebug.py b/testing/mozbase/mozdebug/mozdebug/mozdebug.py new file mode 100755 index 0000000000..beecc2cd9d --- /dev/null +++ b/testing/mozbase/mozdebug/mozdebug/mozdebug.py @@ -0,0 +1,315 @@ +#!/usr/bin/env python + +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +import json +import os +import sys +from collections import namedtuple +from distutils.spawn import find_executable +from subprocess import check_output + +import mozinfo + +__all__ = [ + "get_debugger_info", + "get_default_debugger_name", + "DebuggerSearch", + "get_default_valgrind_args", + "DebuggerInfo", +] + +""" +Map of debugging programs to information about them, like default arguments +and whether or not they are interactive. + +To add support for a new debugger, simply add the relative entry in +_DEBUGGER_INFO and optionally update the _DEBUGGER_PRIORITIES. +""" +_DEBUGGER_INFO = { + # gdb requires that you supply the '--args' flag in order to pass arguments + # after the executable name to the executable. + "gdb": {"interactive": True, "args": ["-q", "--args"]}, + "cgdb": {"interactive": True, "args": ["-q", "--args"]}, + "rust-gdb": {"interactive": True, "args": ["-q", "--args"]}, + "lldb": {"interactive": True, "args": ["--"], "requiresEscapedArgs": True}, + # Visual Studio Debugger Support. + "devenv.exe": {"interactive": True, "args": ["-debugexe"]}, + # Visual C++ Express Debugger Support. + "wdexpress.exe": {"interactive": True, "args": ["-debugexe"]}, + # Windows Development Kit super-debugger. + "windbg.exe": { + "interactive": True, + }, +} + +# Maps each OS platform to the preferred debugger programs found in _DEBUGGER_INFO. +_DEBUGGER_PRIORITIES = { + "win": ["devenv.exe", "wdexpress.exe"], + "linux": ["gdb", "cgdb", "lldb"], + "mac": ["lldb", "gdb"], + "android": ["lldb"], + "unknown": ["gdb"], +} + + +DebuggerInfo = namedtuple( + "DebuggerInfo", ["path", "interactive", "args", "requiresEscapedArgs"] +) + + +def _windbg_installation_paths(): + programFilesSuffixes = ["", " (x86)"] + programFiles = "C:/Program Files" + # Try the most recent versions first. + windowsKitsVersions = ["10", "8.1", "8"] + + for suffix in programFilesSuffixes: + windowsKitsPrefix = os.path.join(programFiles + suffix, "Windows Kits") + for version in windowsKitsVersions: + yield os.path.join( + windowsKitsPrefix, version, "Debuggers", "x64", "windbg.exe" + ) + + +def _vswhere_path(): + try: + import buildconfig + + path = os.path.join(buildconfig.topsrcdir, "build", "win32", "vswhere.exe") + if os.path.isfile(path): + return path + except ImportError: + pass + # Hope it's available on PATH! + return "vswhere.exe" + + +def get_debugger_path(debugger): + """ + Get the full path of the debugger. + + :param debugger: The name of the debugger. + """ + + if mozinfo.os == "mac" and debugger == "lldb": + # On newer OSX versions System Integrity Protections prevents us from + # setting certain env vars for a process such as DYLD_LIBRARY_PATH if + # it's in a protected directory such as /usr/bin. This is the case for + # lldb, so we try to find an instance under the Xcode install instead. + + # Attempt to use the xcrun util to find the path. + try: + path = check_output( + ["xcrun", "--find", "lldb"], universal_newlines=True + ).strip() + if path: + return path + except Exception: + # Just default to find_executable instead. + pass + + if mozinfo.os == "win" and debugger == "devenv.exe": + # Attempt to use vswhere to find the path. + try: + encoding = "mbcs" if sys.platform == "win32" else "utf-8" + vswhere = _vswhere_path() + vsinfo = check_output([vswhere, "-format", "json", "-latest"]) + vsinfo = json.loads(vsinfo.decode(encoding, "replace")) + return os.path.join( + vsinfo[0]["installationPath"], "Common7", "IDE", "devenv.exe" + ) + except Exception: + # Just default to find_executable instead. + pass + + return find_executable(debugger) + + +def get_debugger_info(debugger, debuggerArgs=None, debuggerInteractive=False): + """ + Get the information about the requested debugger. + + Returns a dictionary containing the ``path`` of the debugger executable, + if it will run in ``interactive`` mode, its arguments and whether it needs + to escape arguments it passes to the debugged program (``requiresEscapedArgs``). + If the debugger cannot be found in the system, returns ``None``. + + :param debugger: The name of the debugger. + :param debuggerArgs: If specified, it's the arguments to pass to the debugger, + as a string. Any debugger-specific separator arguments are appended after + these arguments. + :param debuggerInteractive: If specified, forces the debugger to be interactive. + """ + + debuggerPath = None + + if debugger: + # Append '.exe' to the debugger on Windows if it's not present, + # so things like '--debugger=devenv' work. + if os.name == "nt" and not debugger.lower().endswith(".exe"): + debugger += ".exe" + + debuggerPath = get_debugger_path(debugger) + + if not debuggerPath: + # windbg is not installed with the standard set of tools, and it's + # entirely possible that the user hasn't added the install location to + # PATH, so we have to be a little more clever than normal to locate it. + # Just try to look for it in the standard installed location(s). + if debugger == "windbg.exe": + for candidate in _windbg_installation_paths(): + if os.path.exists(candidate): + debuggerPath = candidate + break + else: + if os.path.exists(debugger): + debuggerPath = debugger + + if not debuggerPath: + print("Error: Could not find debugger %s." % debugger) + print("Is it installed? Is it in your PATH?") + return None + + debuggerName = os.path.basename(debuggerPath).lower() + + def get_debugger_info(type, default): + if debuggerName in _DEBUGGER_INFO and type in _DEBUGGER_INFO[debuggerName]: + return _DEBUGGER_INFO[debuggerName][type] + return default + + # Define a namedtuple to access the debugger information from the outside world. + debugger_arguments = [] + + if debuggerArgs: + # Append the provided debugger arguments at the end of the arguments list. + debugger_arguments += debuggerArgs.split() + + debugger_arguments += get_debugger_info("args", []) + + # Override the default debugger interactive mode if needed. + debugger_interactive = get_debugger_info("interactive", False) + if debuggerInteractive: + debugger_interactive = debuggerInteractive + + d = DebuggerInfo( + debuggerPath, + debugger_interactive, + debugger_arguments, + get_debugger_info("requiresEscapedArgs", False), + ) + + return d + + +# Defines the search policies to use in get_default_debugger_name. + + +class DebuggerSearch: + OnlyFirst = 1 + KeepLooking = 2 + + +def get_default_debugger_name(search=DebuggerSearch.OnlyFirst): + """ + Get the debugger name for the default debugger on current platform. + + :param search: If specified, stops looking for the debugger if the + default one is not found (``DebuggerSearch.OnlyFirst``) or keeps + looking for other compatible debuggers (``DebuggerSearch.KeepLooking``). + """ + + mozinfo.find_and_update_from_json() + os = mozinfo.info["os"] + + # Find out which debuggers are preferred for use on this platform. + debuggerPriorities = _DEBUGGER_PRIORITIES[ + os if os in _DEBUGGER_PRIORITIES else "unknown" + ] + + # Finally get the debugger information. + for debuggerName in debuggerPriorities: + debuggerPath = get_debugger_path(debuggerName) + if debuggerPath: + return debuggerName + elif not search == DebuggerSearch.KeepLooking: + return None + + return None + + +# Defines default values for Valgrind flags. +# +# --smc-check=all-non-file is required to deal with code generation and +# patching by the various JITS. Note that this is only necessary on +# x86 and x86_64, but not on ARM. This flag is only necessary for +# Valgrind versions prior to 3.11. +# +# --vex-iropt-register-updates=allregs-at-mem-access is required so that +# Valgrind generates correct register values whenever there is a +# segfault that is caught and handled. In particular OdinMonkey +# requires this. More recent Valgrinds (3.11 and later) provide +# --px-default=allregs-at-mem-access and +# --px-file-backed=unwindregs-at-mem-access +# which provide a significantly cheaper alternative, by restricting the +# precise exception behaviour to JIT generated code only. +# +# --trace-children=yes is required to get Valgrind to follow into +# content and other child processes. The resulting output can be +# difficult to make sense of, and --child-silent-after-fork=yes +# helps by causing Valgrind to be silent for the child in the period +# after fork() but before its subsequent exec(). +# +# --trace-children-skip lists processes that we are not interested +# in tracing into. +# +# --leak-check=full requests full stack traces for all leaked blocks +# detected at process exit. +# +# --show-possibly-lost=no requests blocks for which only an interior +# pointer was found to be considered not leaked. +# +# +# TODO: pass in the user supplied args for V (--valgrind-args=) and +# use this to detect if a different tool has been selected. If so +# adjust tool-specific args appropriately. +# +# TODO: pass in the path to the Valgrind to be used (--valgrind=), and +# check what flags it accepts. Possible args that might be beneficial: +# +# --num-transtab-sectors=24 [reduces re-jitting overheads in long runs] +# --px-default=allregs-at-mem-access +# --px-file-backed=unwindregs-at-mem-access +# [these reduce PX overheads as described above] +# + + +def get_default_valgrind_args(): + return [ + "--fair-sched=yes", + "--smc-check=all-non-file", + "--vex-iropt-register-updates=allregs-at-mem-access", + "--trace-children=yes", + "--child-silent-after-fork=yes", + ( + "--trace-children-skip=" + + "/usr/bin/hg,/bin/rm,*/bin/certutil,*/bin/pk12util," + + "*/bin/ssltunnel,*/bin/uname,*/bin/which,*/bin/ps," + + "*/bin/grep,*/bin/java,*/bin/lsb_release" + ), + ] + get_default_valgrind_tool_specific_args() + + +# The default tool is Memcheck. Feeding these arguments to a different +# Valgrind tool will cause it to fail at startup, so don't do that! + + +def get_default_valgrind_tool_specific_args(): + return [ + "--partial-loads-ok=yes", + "--leak-check=summary", + "--show-possibly-lost=no", + "--show-mismatched-frees=no", + ] diff --git a/testing/mozbase/mozdebug/setup.cfg b/testing/mozbase/mozdebug/setup.cfg new file mode 100644 index 0000000000..3c6e79cf31 --- /dev/null +++ b/testing/mozbase/mozdebug/setup.cfg @@ -0,0 +1,2 @@ +[bdist_wheel] +universal=1 diff --git a/testing/mozbase/mozdebug/setup.py b/testing/mozbase/mozdebug/setup.py new file mode 100644 index 0000000000..2e28924fad --- /dev/null +++ b/testing/mozbase/mozdebug/setup.py @@ -0,0 +1,31 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +from setuptools import setup + +PACKAGE_VERSION = "0.3.0" +DEPS = ["mozinfo"] + + +setup( + name="mozdebug", + version=PACKAGE_VERSION, + description="Utilities for running applications under native code debuggers " + "intended for use in Mozilla testing", + long_description="see https://firefox-source-docs.mozilla.org/mozbase/index.html", + classifiers=[ + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3.6", + ], + # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers + keywords="mozilla", + author="Mozilla Automation and Testing Team", + author_email="tools@lists.mozilla.org", + url="https://wiki.mozilla.org/Auto-tools/Projects/Mozbase", + license="MPL", + packages=["mozdebug"], + include_package_data=True, + zip_safe=False, + install_requires=DEPS, +) diff --git a/testing/mozbase/mozdebug/tests/fake_debuggers/cgdb/cgdb b/testing/mozbase/mozdebug/tests/fake_debuggers/cgdb/cgdb new file mode 100755 index 0000000000..e69de29bb2 diff --git a/testing/mozbase/mozdebug/tests/fake_debuggers/devenv/devenv.exe b/testing/mozbase/mozdebug/tests/fake_debuggers/devenv/devenv.exe new file mode 100755 index 0000000000..e69de29bb2 diff --git a/testing/mozbase/mozdebug/tests/fake_debuggers/gdb/gdb b/testing/mozbase/mozdebug/tests/fake_debuggers/gdb/gdb new file mode 100755 index 0000000000..e69de29bb2 diff --git a/testing/mozbase/mozdebug/tests/fake_debuggers/lldb/lldb b/testing/mozbase/mozdebug/tests/fake_debuggers/lldb/lldb new file mode 100755 index 0000000000..e69de29bb2 diff --git a/testing/mozbase/mozdebug/tests/fake_debuggers/wdexpress/wdexpress.exe b/testing/mozbase/mozdebug/tests/fake_debuggers/wdexpress/wdexpress.exe new file mode 100755 index 0000000000..e69de29bb2 diff --git a/testing/mozbase/mozdebug/tests/manifest.toml b/testing/mozbase/mozdebug/tests/manifest.toml new file mode 100644 index 0000000000..147e23872e --- /dev/null +++ b/testing/mozbase/mozdebug/tests/manifest.toml @@ -0,0 +1,4 @@ +[DEFAULT] +subsuite = "mozbase" + +["test.py"] diff --git a/testing/mozbase/mozdebug/tests/test.py b/testing/mozbase/mozdebug/tests/test.py new file mode 100644 index 0000000000..57bbfec95d --- /dev/null +++ b/testing/mozbase/mozdebug/tests/test.py @@ -0,0 +1,65 @@ +#!/usr/bin/env python + +import os + +import mozunit +import pytest +from mozdebug.mozdebug import ( + _DEBUGGER_PRIORITIES, + DebuggerSearch, + get_default_debugger_name, +) + +here = os.path.abspath(os.path.dirname(__file__)) + + +@pytest.fixture +def set_debuggers(monkeypatch): + debugger_dir = os.path.join(here, "fake_debuggers") + + def _set_debuggers(*debuggers): + dirs = [] + for d in debuggers: + if d.endswith(".exe"): + d = d[: -len(".exe")] + dirs.append(os.path.join(debugger_dir, d)) + monkeypatch.setenv("PATH", os.pathsep.join(dirs)) + + return _set_debuggers + + +@pytest.mark.parametrize("os_name", ["android", "linux", "mac", "win", "unknown"]) +def test_default_debugger_name(os_name, set_debuggers, monkeypatch): + import sys + + import mozinfo + + def update_os_name(*args, **kwargs): + mozinfo.info["os"] = os_name + + monkeypatch.setattr(mozinfo, "find_and_update_from_json", update_os_name) + + if sys.platform == "win32": + # This is used so distutils.spawn.find_executable doesn't add '.exe' + # suffixes to all our dummy binaries on Windows. + monkeypatch.setattr(sys, "platform", "linux") + + debuggers = _DEBUGGER_PRIORITIES[os_name][:] + debuggers.reverse() + first = True + while len(debuggers) > 0: + set_debuggers(*debuggers) + + if first: + assert get_default_debugger_name() == debuggers[-1] + first = False + else: + assert get_default_debugger_name() is None + assert ( + get_default_debugger_name(DebuggerSearch.KeepLooking) == debuggers[-1] + ) + debuggers = debuggers[:-1] + + +if __name__ == "__main__": + mozunit.main() diff --git a/testing/mozbase/mozdevice/mozdevice/__init__.py b/testing/mozbase/mozdevice/mozdevice/__init__.py new file mode 100644 index 0000000000..e8e4965b92 --- /dev/null +++ b/testing/mozbase/mozdevice/mozdevice/__init__.py @@ -0,0 +1,181 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +"""mozdevice provides a Python interface to the Android Debug Bridge (adb) for Android Devices. + +mozdevice exports the following classes: + +ADBProcess is a class which is used by ADBCommand to execute commands +via subprocess.Popen. + +ADBCommand is an internal only class which provides the basics of +the interfaces for connecting to a device, and executing commands +either on the host or device using ADBProcess. + +ADBHost is a Python class used to execute commands which are not +necessarily associated with a specific device. It is intended to be +used directly. + +ADBDevice is a Python class used to execute commands which will +interact with a specific connected Android device. + +ADBAndroid inherits directly from ADBDevice and is essentially a +synonym for ADBDevice. It is included for backwards compatibility only +and should not be used in new code. + +ADBDeviceFactory is a Python function used to create instances of +ADBDevice. ADBDeviceFactory is preferred over using ADBDevice to +create new instances of ADBDevice since it will only create one +instance of ADBDevice for each connected device. + +mozdevice exports the following exceptions: + +:: + + Exception - + |- ADBTimeoutError + |- ADBDeviceFactoryError + |- ADBError + |- ADBProcessError + |- ADBListDevicesError + +ADBTimeoutError is a special exception that is not part of the +ADBError class hierarchy. It is raised when a command has failed to +complete within the specified timeout period. Since this typically is +due to a failure in the usb connection to the device and is not +recoverable, it is implemented separately from ADBError so that it +will not be caught by normal except clause handling of expected error +conditions and is considered to be treated as a *fatal* error. + +ADBDeviceFactoryError is also a special exception that is not part +of the ADBError class hierarchy. It is raised by ADBDeviceFactory +when the state of the internal ADBDevices object is in an +inconsistent state and is considered to be a *fatal* error. + +ADBListDevicesError is an instance of ADBError which is +raised only by the ADBHost.devices() method to signify that +``adb devices`` reports that the device state has no permissions and can +not be contacted via adb. + +ADBProcessError is an instance of ADBError which is raised when a +process executed via ADBProcess has exited with a non-zero exit +code. It is raised by the ADBCommand.command method and the methods +that call it. + +ADBError is a generic exception class to signify that some error +condition has occured which may be handled depending on the semantics +of the executing code. + +Example: + +:: + + from mozdevice import ADBHost, ADBDeviceFactory, ADBError + + adbhost = ADBHost() + try: + adbhost.kill_server() + adbhost.start_server() + except ADBError as e: + print('Unable to restart the adb server: {}'.format(str(e))) + + device = ADBDeviceFactory() + try: + sdcard_contents = device.ls('/sdcard/') # List the contents of the sdcard on the device. + print('sdcard contains {}'.format(' '.join(sdcard_contents)) + except ADBError as e: + print('Unable to list the sdcard: {}'.format(str(e))) + +Android devices use a security model based upon user permissions much +like that used in Linux upon which it is based. The adb shell executes +commands on the device as the shell user whose access to the files and +directories on the device are limited by the directory and file +permissions set in the device's file system. + +Android apps run under their own user accounts and are restricted by +the app's requested permissions in terms of what commands and files +and directories they may access. + +Like Linux, Android supports a root user who has unrestricted access +to the command and content stored on the device. + +Most commercially released Android devices do not allow adb to run +commands as the root user. Typically, only Android emulators running +certain system images, devices which have AOSP debug or engineering +Android builds or devices which have been *rooted* can run commands as +the root user. + +ADBDevice supports using both unrooted and rooted devices by laddering +its capabilities depending on the specific circumstances where it is +used. + +ADBDevice uses a special location on the device, called the +*test_root*, where it places content to be tested. This can include +binary executables and libraries, configuration files and log +files. Since the special location /data/local/tmp is usually +accessible by the shell user, the test_root is located at +/data/local/tmp/test_root by default. /data/local/tmp is used instead +of the sdcard due to recent Scoped Storage restrictions on access to +the sdcard in Android 10 and later. + +If the device supports running adbd as root, or if the device has been +rooted and supports the use of the su command to run commands as root, +ADBDevice will default to running all shell commands under the root +user and the test_root will remain set to /data/local/tmp/test_root +unless changed. + +If the device does not support running shell commands under the root +user, and a *debuggable* app is set in ADBDevice property +run_as_package, then ADBDevice will set the test_root to +/data/data//test_root and will run shell commands as +the app user when accessing content located in the app's data +directory. Content can be pushed to the app's data directory or pulled +from the app's data directory by using the command run-as to access +the app's data. + +If a device does not support running commands as root and a +*debuggable* app is not being used, command line programs can still be +executed by pushing them to the /data/local/tmp directory which is +accessible to the shell user. + +If for some reason, the device is not rooted and /data/local/tmp is +not acccessible to the shell user, then ADBDevice will fail to +initialize and will not be useable for that device. + +NOTE: ADBFactory will clear the contents of the test_root when it +first creates an instance of ADBDevice. + +When the run_as_package property is set in an ADBDevice instance, it +will clear the contents of the current test_root before changing the +test_root to point to the new location +/data/data//test_root which will then be cleared of +any existing content. + +""" + +from .adb import ( + ADBCommand, + ADBDevice, + ADBDeviceFactory, + ADBError, + ADBHost, + ADBProcess, + ADBProcessError, + ADBTimeoutError, +) +from .adb_android import ADBAndroid +from .remote_process_monitor import RemoteProcessMonitor + +__all__ = [ + "ADBError", + "ADBProcessError", + "ADBTimeoutError", + "ADBProcess", + "ADBCommand", + "ADBHost", + "ADBDevice", + "ADBAndroid", + "ADBDeviceFactory", + "RemoteProcessMonitor", +] diff --git a/testing/mozbase/mozdevice/mozdevice/adb.py b/testing/mozbase/mozdevice/mozdevice/adb.py new file mode 100644 index 0000000000..bf3029c2f4 --- /dev/null +++ b/testing/mozbase/mozdevice/mozdevice/adb.py @@ -0,0 +1,4438 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +import io +import os +import pipes +import posixpath +import re +import shlex +import shutil +import signal +import subprocess +import sys +import tempfile +import time +import traceback +from shutil import copytree +from threading import Thread + +import six +from six.moves import range + +from . import version_codes + +_TEST_ROOT = None + + +class ADBProcess(object): + """ADBProcess encapsulates the data related to executing the adb process.""" + + def __init__(self, args, use_stdout_pipe=False, timeout=None): + #: command argument list. + self.args = args + Popen_args = {} + + #: Temporary file handle to be used for stdout. + if use_stdout_pipe: + self.stdout_file = subprocess.PIPE + # Reading utf-8 from the stdout pipe + if sys.version_info >= (3, 6): + Popen_args["encoding"] = "utf-8" + else: + Popen_args["universal_newlines"] = True + else: + self.stdout_file = tempfile.NamedTemporaryFile(mode="w+b") + Popen_args["stdout"] = self.stdout_file + + #: boolean indicating if the command timed out. + self.timedout = None + + #: exitcode of the process. + self.exitcode = None + + #: subprocess Process object used to execute the command. + Popen_args["stderr"] = subprocess.STDOUT + self.proc = subprocess.Popen(args, **Popen_args) + + # If a timeout is set, then create a thread responsible for killing the + # process, as well as updating the exitcode and timedout status. + def timeout_thread(adb_process, timeout): + start_time = time.time() + polling_interval = 0.001 + adb_process.exitcode = adb_process.proc.poll() + while (time.time() - start_time) <= float( + timeout + ) and adb_process.exitcode is None: + time.sleep(polling_interval) + adb_process.exitcode = adb_process.proc.poll() + + if adb_process.exitcode is None: + adb_process.proc.kill() + adb_process.timedout = True + adb_process.exitcode = adb_process.proc.poll() + + if timeout: + Thread(target=timeout_thread, args=(self, timeout), daemon=True).start() + + @property + def stdout(self): + """Return the contents of stdout.""" + assert not self.stdout_file == subprocess.PIPE + if not self.stdout_file or self.stdout_file.closed: + content = "" + else: + self.stdout_file.seek(0, os.SEEK_SET) + content = six.ensure_str(self.stdout_file.read().rstrip()) + return content + + def __str__(self): + # Remove -s from the error message to allow bug suggestions + # to be independent of the individual failing device. + arg_string = " ".join(self.args) + arg_string = re.sub(r" -s [\w-]+", "", arg_string) + return "args: %s, exitcode: %s, stdout: %s" % ( + arg_string, + self.exitcode, + self.stdout, + ) + + def __iter__(self): + assert self.stdout_file == subprocess.PIPE + return self + + def __next__(self): + assert self.stdout_file == subprocess.PIPE + try: + return next(self.proc.stdout) + except StopIteration: + # Wait until the process ends. + while self.exitcode is None or self.timedout: + time.sleep(0.001) + raise StopIteration + + +# ADBError and ADBTimeoutError are treated differently in order that +# ADBTimeoutErrors can be handled distinctly from ADBErrors. + + +class ADBError(Exception): + """ADBError is raised in situations where a command executed on a + device either exited with a non-zero exitcode or when an + unexpected error condition has occurred. Generally, ADBErrors can + be handled and the device can continue to be used. + """ + + pass + + +class ADBProcessError(ADBError): + """ADBProcessError is raised when an associated ADBProcess is + available and relevant. + """ + + def __init__(self, adb_process): + ADBError.__init__(self, str(adb_process)) + self.adb_process = adb_process + + +class ADBListDevicesError(ADBError): + """ADBListDevicesError is raised when errors are found listing the + devices, typically not any permissions. + + The devices information is stocked with the *devices* member. + """ + + def __init__(self, msg, devices): + ADBError.__init__(self, msg) + self.devices = devices + + +class ADBTimeoutError(Exception): + """ADBTimeoutError is raised when either a host command or shell + command takes longer than the specified timeout to execute. The + timeout value is set in the ADBCommand constructor and is 300 seconds by + default. This error is typically fatal since the host is having + problems communicating with the device. You may be able to recover + by rebooting, but this is not guaranteed. + + Recovery options are: + + * Killing and restarting the adb server via + :: + + adb kill-server; adb start-server + + * Rebooting the device manually. + * Rebooting the host. + """ + + pass + + +class ADBDeviceFactoryError(Exception): + """ADBDeviceFactoryError is raised when the ADBDeviceFactory is in + an inconsistent state. + """ + + pass + + +class ADBCommand(object): + """ADBCommand provides a basic interface to adb commands + which is used to provide the 'command' methods for the + classes ADBHost and ADBDevice. + + ADBCommand should only be used as the base class for other + classes and should not be instantiated directly. To enforce this + restriction calling ADBCommand's constructor will raise a + NonImplementedError exception. + + :param str adb: path to adb executable. Defaults to 'adb'. + :param str adb_host: host of the adb server. + :param int adb_port: port of the adb server. + :param str logger_name: logging logger name. Defaults to 'adb'. + :param int timeout: The default maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. This timeout is per adb call. The + total time spent may exceed this value. If it is not + specified, the value defaults to 300. + :param bool verbose: provide verbose output + :param bool use_root: Use root if available on device + :raises: :exc:`ADBError` + :exc:`ADBTimeoutError` + + :: + + from mozdevice import ADBCommand + + try: + adbcommand = ADBCommand() + except NotImplementedError: + print "ADBCommand can not be instantiated." + """ + + def __init__( + self, + adb="adb", + adb_host=None, + adb_port=None, + logger_name="adb", + timeout=300, + verbose=False, + use_root=True, + ): + if self.__class__ == ADBCommand: + raise NotImplementedError + + self._logger = self._get_logger(logger_name, verbose) + self._verbose = verbose + self._use_root = use_root + self._adb_path = adb + self._adb_host = adb_host + self._adb_port = adb_port + self._timeout = timeout + self._polling_interval = 0.001 + self._adb_version = "" + + self._logger.debug("%s: %s" % (self.__class__.__name__, self.__dict__)) + + # catch early a missing or non executable adb command + # and get the adb version while we are at it. + try: + output = subprocess.Popen( + [adb, "version"], stdout=subprocess.PIPE, stderr=subprocess.PIPE + ).communicate() + re_version = re.compile(r"Android Debug Bridge version (.*)") + if isinstance(output[0], six.binary_type): + self._adb_version = re_version.match( + output[0].decode("utf-8", "replace") + ).group(1) + else: + self._adb_version = re_version.match(output[0]).group(1) + + if self._adb_version < "1.0.36": + raise ADBError( + "adb version %s less than minimum 1.0.36" % self._adb_version + ) + + except Exception as exc: + raise ADBError("%s: %s is not executable." % (exc, adb)) + + def _get_logger(self, logger_name, verbose): + logger = None + level = "DEBUG" if verbose else "INFO" + try: + import mozlog + + logger = mozlog.get_default_logger(logger_name) + if not logger: + if sys.__stdout__.isatty(): + defaults = {"mach": sys.stdout} + else: + defaults = {"tbpl": sys.stdout} + logger = mozlog.commandline.setup_logging( + logger_name, {}, defaults, formatter_defaults={"level": level} + ) + except ImportError: + pass + + if logger is None: + import logging + + logger = logging.getLogger(logger_name) + logger.setLevel(level) + return logger + + # Host Command methods + + def command(self, cmds, device_serial=None, timeout=None): + """Executes an adb command on the host. + + :param list cmds: The command and its arguments to be + executed. + :param str device_serial: The device's + serial number if the adb command is to be executed against + a specific device. If it is not specified, ANDROID_SERIAL + from the environment will be used if it is set. + :param int timeout: The maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. This timeout is per adb call. The + total time spent may exceed this value. If it is not + specified, the value set in the ADBCommand constructor is used. + :return: :class:`ADBProcess` + + command() provides a low level interface for executing + commands on the host via adb. + + command() executes on the host in such a fashion that stdout + of the adb process is a file handle on the host and + the exit code is available as the exit code of the adb + process. + + The caller provides a list containing commands, as well as a + timeout period in seconds. + + A subprocess is spawned to execute adb with stdout and stderr + directed to a temporary file. If the process takes longer than + the specified timeout, the process is terminated. + + It is the caller's responsibilty to clean up by closing + the stdout temporary file. + """ + args = [self._adb_path] + device_serial = device_serial or os.environ.get("ANDROID_SERIAL") + if self._adb_host: + args.extend(["-H", self._adb_host]) + if self._adb_port: + args.extend(["-P", str(self._adb_port)]) + if device_serial: + args.extend(["-s", device_serial, "wait-for-device"]) + args.extend(cmds) + + adb_process = ADBProcess(args) + + if timeout is None: + timeout = self._timeout + + start_time = time.time() + adb_process.exitcode = adb_process.proc.poll() + while (time.time() - start_time) <= float( + timeout + ) and adb_process.exitcode is None: + time.sleep(self._polling_interval) + adb_process.exitcode = adb_process.proc.poll() + if adb_process.exitcode is None: + adb_process.proc.kill() + adb_process.timedout = True + adb_process.exitcode = adb_process.proc.poll() + + adb_process.stdout_file.seek(0, os.SEEK_SET) + + return adb_process + + def command_output(self, cmds, device_serial=None, timeout=None): + """Executes an adb command on the host returning stdout. + + :param list cmds: The command and its arguments to be + executed. + :param str device_serial: The device's + serial number if the adb command is to be executed against + a specific device. If it is not specified, ANDROID_SERIAL + from the environment will be used if it is set. + :param int timeout: The maximum time in seconds + for any spawned adb process to complete before throwing + an ADBTimeoutError. + This timeout is per adb call. The total time spent + may exceed this value. If it is not specified, the value + set in the ADBCommand constructor is used. + :return: str - content of stdout. + :raises: :exc:`ADBTimeoutError` + :exc:`ADBError` + """ + adb_process = None + try: + # Need to force the use of the ADBCommand class's command + # since ADBDevice will redefine command and call its + # own version otherwise. + adb_process = ADBCommand.command( + self, cmds, device_serial=device_serial, timeout=timeout + ) + if adb_process.timedout: + raise ADBTimeoutError("%s" % adb_process) + if adb_process.exitcode: + raise ADBProcessError(adb_process) + output = adb_process.stdout + if self._verbose: + self._logger.debug( + "command_output: %s, " + "timeout: %s, " + "timedout: %s, " + "exitcode: %s, output: %s" + % ( + " ".join(adb_process.args), + timeout, + adb_process.timedout, + adb_process.exitcode, + output, + ) + ) + + return output + finally: + if adb_process and isinstance(adb_process.stdout_file, io.IOBase): + adb_process.stdout_file.close() + + +class ADBHost(ADBCommand): + """ADBHost provides a basic interface to adb host commands + which do not target a specific device. + + :param str adb: path to adb executable. Defaults to 'adb'. + :param str adb_host: host of the adb server. + :param int adb_port: port of the adb server. + :param logger_name: logging logger name. Defaults to 'adb'. + :param int timeout: The default maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. This timeout is per adb call. The + total time spent may exceed this value. If it is not + specified, the value defaults to 300. + :param bool verbose: provide verbose output + :raises: :exc:`ADBError` + :exc:`ADBTimeoutError` + + :: + + from mozdevice import ADBHost + + adbhost = ADBHost() + adbhost.start_server() + """ + + def __init__( + self, + adb="adb", + adb_host=None, + adb_port=None, + logger_name="adb", + timeout=300, + verbose=False, + ): + ADBCommand.__init__( + self, + adb=adb, + adb_host=adb_host, + adb_port=adb_port, + logger_name=logger_name, + timeout=timeout, + verbose=verbose, + use_root=True, + ) + + def command(self, cmds, timeout=None): + """Executes an adb command on the host. + + :param list cmds: The command and its arguments to be + executed. + :param int timeout: The maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. This timeout is per adb call. The + total time spent may exceed this value. If it is not + specified, the value set in the ADBHost constructor is used. + :return: :class:`ADBProcess` + + command() provides a low level interface for executing + commands on the host via adb. + + command() executes on the host in such a fashion that stdout + of the adb process is a file handle on the host and + the exit code is available as the exit code of the adb + process. + + The caller provides a list containing commands, as well as a + timeout period in seconds. + + A subprocess is spawned to execute adb with stdout and stderr + directed to a temporary file. If the process takes longer than + the specified timeout, the process is terminated. + + It is the caller's responsibilty to clean up by closing + the stdout temporary file. + """ + return ADBCommand.command(self, cmds, timeout=timeout) + + def command_output(self, cmds, timeout=None): + """Executes an adb command on the host returning stdout. + + :param list cmds: The command and its arguments to be + executed. + :param int timeout: The maximum time in seconds + for any spawned adb process to complete before throwing + an ADBTimeoutError. + This timeout is per adb call. The total time spent + may exceed this value. If it is not specified, the value + set in the ADBHost constructor is used. + :return: str - content of stdout. + :raises: :exc:`ADBTimeoutError` + :exc:`ADBError` + """ + return ADBCommand.command_output(self, cmds, timeout=timeout) + + def start_server(self, timeout=None): + """Starts the adb server. + + :param int timeout: The maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. This timeout is per adb call. The + total time spent may exceed this value. If it is not + specified, the value set in the ADBHost constructor is used. + :raises: :exc:`ADBTimeoutError` + :exc:`ADBError` + + Attempting to use start_server with any adb_host value other than None + will fail with an ADBError exception. + + You will need to start the server on the remote host via the command: + + .. code-block:: shell + + adb -a fork-server server + + If you wish the remote adb server to restart automatically, you can + enclose the command in a loop as in: + + .. code-block:: shell + + while true; do + adb -a fork-server server + done + """ + self.command_output(["start-server"], timeout=timeout) + + def kill_server(self, timeout=None): + """Kills the adb server. + + :param int timeout: The maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. This timeout is per adb call. The + total time spent may exceed this value. If it is not + specified, the value set in the ADBHost constructor is used. + :raises: :exc:`ADBTimeoutError` + :exc:`ADBError` + """ + self.command_output(["kill-server"], timeout=timeout) + + def devices(self, timeout=None): + """Executes adb devices -l and returns a list of objects describing attached devices. + + :param int timeout: The maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. This timeout is per adb call. The + total time spent may exceed this value. If it is not + specified, the value set in the ADBHost constructor is used. + :return: an object contain + :raises: :exc:`ADBTimeoutError` + :exc:`ADBListDevicesError` + :exc:`ADBError` + + The output of adb devices -l + + :: + + $ adb devices -l + List of devices attached + b313b945 device usb:1-7 product:d2vzw model:SCH_I535 device:d2vzw + + is parsed and placed into an object as in + + :: + + [{'device_serial': 'b313b945', 'state': 'device', 'product': 'd2vzw', + 'usb': '1-7', 'device': 'd2vzw', 'model': 'SCH_I535' }] + """ + # b313b945 device usb:1-7 product:d2vzw model:SCH_I535 device:d2vzw + # from Android system/core/adb/transport.c statename() + re_device_info = re.compile( + r"([^\s]+)\s+(offline|bootloader|device|host|recovery|sideload|" + "no permissions|unauthorized|unknown)" + ) + devices = [] + lines = self.command_output(["devices", "-l"], timeout=timeout).splitlines() + for line in lines: + if line == "List of devices attached ": + continue + match = re_device_info.match(line) + if match: + device = {"device_serial": match.group(1), "state": match.group(2)} + remainder = line[match.end(2) :].strip() + if remainder: + try: + device.update( + dict([j.split(":") for j in remainder.split(" ")]) + ) + except ValueError: + self._logger.warning( + "devices: Unable to parse " "remainder for device %s" % line + ) + devices.append(device) + for device in devices: + if device["state"] == "no permissions": + raise ADBListDevicesError( + "No permissions to detect devices. You should restart the" + " adb server as root:\n" + "\n# adb kill-server\n# adb start-server\n" + "\nor maybe configure your udev rules.", + devices, + ) + return devices + + +ADBDEVICES = {} + + +def ADBDeviceFactory( + device=None, + adb="adb", + adb_host=None, + adb_port=None, + test_root=None, + logger_name="adb", + timeout=300, + verbose=False, + device_ready_retry_wait=20, + device_ready_retry_attempts=3, + use_root=True, + share_test_root=True, + run_as_package=None, +): + """ADBDeviceFactory provides a factory for :class:`ADBDevice` + instances that enforces the requirement that only one + :class:`ADBDevice` be created for each attached device. It uses + the identical arguments as the :class:`ADBDevice` + constructor. This is also used to ensure that the device's + test_root is initialized to an empty directory before tests are + run on the device. + + :return: :class:`ADBDevice` + :raises: :exc:`ADBDeviceFactoryError` + :exc:`ADBError` + :exc:`ADBTimeoutError` + + """ + device = device or os.environ.get("ANDROID_SERIAL") + if device is not None and device in ADBDEVICES: + # We have already created an ADBDevice for this device, just re-use it. + adbdevice = ADBDEVICES[device] + elif device is None and ADBDEVICES: + # We did not specify the device serial number and we have + # already created an ADBDevice which means we must only have + # one device connected and we can re-use the existing ADBDevice. + devices = list(ADBDEVICES.keys()) + assert ( + len(devices) == 1 + ), "Only one device may be connected if the device serial number is not specified." + adbdevice = ADBDEVICES[devices[0]] + elif ( + device is not None + and device not in ADBDEVICES + or device is None + and not ADBDEVICES + ): + # The device has not had an ADBDevice created yet. + adbdevice = ADBDevice( + device=device, + adb=adb, + adb_host=adb_host, + adb_port=adb_port, + test_root=test_root, + logger_name=logger_name, + timeout=timeout, + verbose=verbose, + device_ready_retry_wait=device_ready_retry_wait, + device_ready_retry_attempts=device_ready_retry_attempts, + use_root=use_root, + share_test_root=share_test_root, + run_as_package=run_as_package, + ) + ADBDEVICES[adbdevice._device_serial] = adbdevice + else: + raise ADBDeviceFactoryError( + "Inconsistent ADBDeviceFactory: device: %s, ADBDEVICES: %s" + % (device, ADBDEVICES) + ) + # Clean the test root before testing begins. + if test_root: + adbdevice.rm( + posixpath.join(adbdevice.test_root, "*"), + recursive=True, + force=True, + timeout=timeout, + ) + # Sync verbose and update the logger configuration in case it has + # changed since the initial initialization + if verbose != adbdevice._verbose: + adbdevice._verbose = verbose + adbdevice._logger = adbdevice._get_logger(adbdevice._logger.name, verbose) + return adbdevice + + +class ADBDevice(ADBCommand): + """ADBDevice provides methods which can be used to interact with the + associated Android-based device. + + :param str device: When a string is passed in device, it + is interpreted as the device serial number. This form is not + compatible with devices containing a ":" in the serial; in + this case ValueError will be raised. When a dictionary is + passed it must have one or both of the keys "device_serial" + and "usb". This is compatible with the dictionaries in the + list returned by ADBHost.devices(). If the value of + device_serial is a valid serial not containing a ":" it will + be used to identify the device, otherwise the value of the usb + key, prefixed with "usb:" is used. If None is passed and + there is exactly one device attached to the host, that device + is used. If None is passed and ANDROID_SERIAL is set in the environment, + that device is used. If there is more than one device attached and + device is None and ANDROID_SERIAL is not set in the environment, ValueError + is raised. If no device is attached the constructor will block + until a device is attached or the timeout is reached. + :param str adb_host: host of the adb server to connect to. + :param int adb_port: port of the adb server to connect to. + :param str test_root: value containing the test root to be + used on the device. This value will be shared among all + instances of ADBDevice if share_test_root is True. + :param str logger_name: logging logger name. Defaults to 'adb' + :param int timeout: The default maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. This timeout is per adb call. The + total time spent may exceed this value. If it is not + specified, the value defaults to 300. + :param bool verbose: provide verbose output + :param int device_ready_retry_wait: number of seconds to wait + between attempts to check if the device is ready after a + reboot. + :param integer device_ready_retry_attempts: number of attempts when + checking if a device is ready. + :param bool use_root: Use root if it is available on device + :param bool share_test_root: True if instance should share the + same test_root value with other ADBInstances. Defaults to True. + :param str run_as_package: Name of package to be used in run-as in liew of + using su. + :raises: :exc:`ADBError` + :exc:`ADBTimeoutError` + :exc:`ValueError` + + :: + + from mozdevice import ADBDevice + + adbdevice = ADBDevice() + print(adbdevice.list_files("/mnt/sdcard")) + if adbdevice.process_exist("org.mozilla.geckoview.test_runner"): + print("org.mozilla.geckoview.test_runner is running") + """ + + SOCKET_DIRECTION_REVERSE = "reverse" + + SOCKET_DIRECTION_FORWARD = "forward" + + # BUILTINS is used to determine which commands can not be executed + # via su or run-as. This set of possible builtin commands was + # obtained from `man builtin` on Linux. + BUILTINS = set( + [ + "alias", + "bg", + "bind", + "break", + "builtin", + "caller", + "cd", + "command", + "compgen", + "complete", + "compopt", + "continue", + "declare", + "dirs", + "disown", + "echo", + "enable", + "eval", + "exec", + "exit", + "export", + "false", + "fc", + "fg", + "getopts", + "hash", + "help", + "history", + "jobs", + "kill", + "let", + "local", + "logout", + "mapfile", + "popd", + "printf", + "pushd", + "pwd", + "read", + "readonly", + "return", + "set", + "shift", + "shopt", + "source", + "suspend", + "test", + "times", + "trap", + "true", + "type", + "typeset", + "ulimit", + "umask", + "unalias", + "unset", + "wait", + ] + ) + + def __init__( + self, + device=None, + adb="adb", + adb_host=None, + adb_port=None, + test_root=None, + logger_name="adb", + timeout=300, + verbose=False, + device_ready_retry_wait=20, + device_ready_retry_attempts=3, + use_root=True, + share_test_root=True, + run_as_package=None, + ): + global _TEST_ROOT + + ADBCommand.__init__( + self, + adb=adb, + adb_host=adb_host, + adb_port=adb_port, + logger_name=logger_name, + timeout=timeout, + verbose=verbose, + use_root=use_root, + ) + self._logger.info("Using adb %s" % self._adb_version) + self._device_serial = self._get_device_serial(device) + self._initial_test_root = test_root + self._share_test_root = share_test_root + if share_test_root and not _TEST_ROOT: + _TEST_ROOT = test_root + self._test_root = None + self._run_as_package = None + # Cache packages debuggable state. + self._debuggable_packages = {} + self._device_ready_retry_wait = device_ready_retry_wait + self._device_ready_retry_attempts = device_ready_retry_attempts + self._have_root_shell = False + self._have_su = False + self._have_android_su = False + self._selinux = None + self._re_internal_storage = None + + self._wait_for_boot_completed(timeout=timeout) + + # Record the start time of the ADBDevice initialization so we can + # determine if we should abort with an ADBTimeoutError if it is + # taking too long. + start_time = time.time() + + # Attempt to get the Android version as early as possible in order + # to work around differences in determining adb command exit codes + # in Android before and after Android 7. + self.version = 0 + while self.version < 1 and (time.time() - start_time) <= float(timeout): + try: + version = self.get_prop("ro.build.version.sdk", timeout=timeout) + self.version = int(version) + except ValueError: + self._logger.info("unexpected ro.build.version.sdk: '%s'" % version) + time.sleep(2) + if self.version < 1: + # note slightly different meaning to the ADBTimeoutError here (and above): + # failed to get valid (numeric) version string in all attempts in allowed time + raise ADBTimeoutError( + "ADBDevice: unable to determine ro.build.version.sdk." + ) + + self._mkdir_p = None + # Force the use of /system/bin/ls or /system/xbin/ls in case + # there is /sbin/ls which embeds ansi escape codes to colorize + # the output. Detect if we are using busybox ls. We want each + # entry on a single line and we don't want . or .. + ls_dir = "/system" + + # Using self.is_file is problematic on emulators either + # using ls or test to check for their existence. + # Executing the command to detect its existence works around + # any issues with ls or test. + boot_completed = False + while not boot_completed and (time.time() - start_time) <= float(timeout): + try: + self.shell_output("/system/bin/ls /system/bin/ls", timeout=timeout) + boot_completed = True + self._ls = "/system/bin/ls" + except ADBError as e1: + self._logger.debug("detect /system/bin/ls {}".format(e1)) + try: + self.shell_output( + "/system/xbin/ls /system/xbin/ls", timeout=timeout + ) + boot_completed = True + self._ls = "/system/xbin/ls" + except ADBError as e2: + self._logger.debug("detect /system/xbin/ls : {}".format(e2)) + if not boot_completed: + time.sleep(2) + if not boot_completed: + raise ADBError("ADBDevice.__init__: ls could not be found") + + # A race condition can occur especially with emulators where + # the device appears to be available but it has not completed + # mounting the sdcard. We can work around this by checking if + # the sdcard is missing when we attempt to ls it and retrying + # if it is not yet available. + boot_completed = False + while not boot_completed and (time.time() - start_time) <= float(timeout): + try: + self.shell_output("{} -1A {}".format(self._ls, ls_dir), timeout=timeout) + boot_completed = True + self._ls += " -1A" + except ADBError as e: + self._logger.debug("detect ls -1A: {}".format(e)) + if "No such file or directory" not in str(e): + boot_completed = True + self._ls += " -a" + if not boot_completed: + time.sleep(2) + if not boot_completed: + raise ADBTimeoutError("ADBDevice: /sdcard not found.") + + self._logger.info("%s supported" % self._ls) + + # builtin commands which do not exist as separate programs can + # not be executed using su or run-as. Remove builtin commands + # from self.BUILTINS which also exist as separate programs so + # that we will be able to execute them using su or run-as if + # necessary. + remove_builtins = set() + for builtin in self.BUILTINS: + try: + self.ls("/system/*bin/%s" % builtin, timeout=timeout) + self._logger.debug("Removing %s from BUILTINS" % builtin) + remove_builtins.add(builtin) + except ADBError: + pass + self.BUILTINS.difference_update(remove_builtins) + + # Do we have cp? + boot_completed = False + while not boot_completed and (time.time() - start_time) <= float(timeout): + try: + self.shell_output("cp --help", timeout=timeout) + boot_completed = True + self._have_cp = True + except ADBError as e: + if "not found" in str(e): + self._have_cp = False + boot_completed = True + elif "known option" in str(e): + self._have_cp = True + boot_completed = True + elif "invalid option" in str(e): + self._have_cp = True + boot_completed = True + if not boot_completed: + time.sleep(2) + if not boot_completed: + raise ADBTimeoutError("ADBDevice: cp not found.") + self._logger.info("Native cp support: %s" % self._have_cp) + + # Do we have chmod -R? + try: + self._chmod_R = False + re_recurse = re.compile(r"[-]R") + chmod_output = self.shell_output("chmod --help", timeout=timeout) + match = re_recurse.search(chmod_output) + if match: + self._chmod_R = True + except ADBError as e: + self._logger.debug("Check chmod -R: {}".format(e)) + match = re_recurse.search(str(e)) + if match: + self._chmod_R = True + self._logger.info("Native chmod -R support: {}".format(self._chmod_R)) + + # Do we have chown -R? + try: + self._chown_R = False + chown_output = self.shell_output("chown --help", timeout=timeout) + match = re_recurse.search(chown_output) + if match: + self._chown_R = True + except ADBError as e: + self._logger.debug("Check chown -R: {}".format(e)) + self._logger.info("Native chown -R support: {}".format(self._chown_R)) + + try: + cleared = self.shell_bool('logcat -P ""', timeout=timeout) + except ADBError: + cleared = False + if not cleared: + self._logger.info("Unable to turn off logcat chatty") + + # Do we have pidof? + if self.version < version_codes.N: + # unexpected pidof behavior observed on Android 6 in bug 1514363 + self._have_pidof = False + else: + boot_completed = False + while not boot_completed and (time.time() - start_time) <= float(timeout): + try: + self.shell_output("pidof --help", timeout=timeout) + boot_completed = True + self._have_pidof = True + except ADBError as e: + if "not found" in str(e): + self._have_pidof = False + boot_completed = True + elif "known option" in str(e): + self._have_pidof = True + boot_completed = True + if not boot_completed: + time.sleep(2) + if not boot_completed: + raise ADBTimeoutError("ADBDevice: pidof not found.") + # Bug 1529960 observed pidof intermittently returning no results for a + # running process on the 7.0 x86_64 emulator. + + characteristics = self.get_prop("ro.build.characteristics", timeout=timeout) + + abi = self.get_prop("ro.product.cpu.abi", timeout=timeout) + self._have_flaky_pidof = ( + self.version == version_codes.N + and abi == "x86_64" + and "emulator" in characteristics + ) + self._logger.info( + "Native {} pidof support: {}".format( + "flaky" if self._have_flaky_pidof else "normal", self._have_pidof + ) + ) + + if self._use_root: + # Detect if root is available, but do not fail if it is not. + # Catch exceptions due to the potential for segfaults + # calling su when using an improperly rooted device. + + self._check_adb_root(timeout=timeout) + + if not self._have_root_shell: + # To work around bug 1525401 where su -c id will return an + # exitcode of 1 if selinux permissive is not already in effect, + # we need su to turn off selinux prior to checking for su. + # We can use shell() directly to prevent the non-zero exitcode + # from raising an ADBError. + # Note: We are assuming su -c is supported and do not attempt to + # use su 0. + adb_process = self.shell("su -c setenforce 0") + self._logger.info( + "su -c setenforce 0 exitcode %s, stdout: %s" + % (adb_process.proc.poll(), adb_process.proc.stdout) + ) + + uid = "uid=0" + # Do we have a 'Superuser' sh like su? + try: + if self.shell_output("su -c id", timeout=timeout).find(uid) != -1: + self._have_su = True + self._logger.info("su -c supported") + except ADBError as e: + self._logger.debug("Check for su -c failed: {}".format(e)) + + # Check if Android's su 0 command works. + # If we already have detected su -c support, we can skip this check. + try: + if ( + not self._have_su + and self.shell_output("su 0 id", timeout=timeout).find(uid) + != -1 + ): + self._have_android_su = True + self._logger.info("su 0 supported") + except ADBError as e: + self._logger.debug("Check for su 0 failed: {}".format(e)) + + # Guarantee that /data/local/tmp exists and is accessible to all. + # It is a fatal error if /data/local/tmp does not exist and can not be created. + if not self.exists("/data/local/tmp", timeout=timeout): + # parents=True is required on emulator, where exist() may be flaky + self.mkdir("/data/local/tmp", parents=True, timeout=timeout) + + # Beginning in Android 8.1 /data/anr/traces.txt no longer contains + # a single file traces.txt but instead will contain individual files + # for each stack. + # See https://github.com/aosp-mirror/platform_build/commit/ + # fbba7fe06312241c7eb8c592ec2ac630e4316d55 + stack_trace_dir = self.shell_output( + "getprop dalvik.vm.stack-trace-dir", timeout=timeout + ) + if not stack_trace_dir: + stack_trace_file = self.shell_output( + "getprop dalvik.vm.stack-trace-file", timeout=timeout + ) + if stack_trace_file: + stack_trace_dir = posixpath.dirname(stack_trace_file) + else: + stack_trace_dir = "/data/anr" + self.stack_trace_dir = stack_trace_dir + self.enforcing = "Permissive" + self.run_as_package = run_as_package + + self._logger.debug("ADBDevice: %s" % self.__dict__) + + @property + def is_rooted(self): + return self._have_root_shell or self._have_su or self._have_android_su + + def _wait_for_boot_completed(self, timeout=None): + """Internal method to wait for boot to complete. + + Wait for sys.boot_completed=1 and raise ADBError if boot does + not complete within retry attempts. + + :param int timeout: The default maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. This timeout is per adb call. The + total time spent may exceed this value. If it is not + specified, the value defaults to 300. + :raises: :exc:`ADBError` + """ + for attempt in range(self._device_ready_retry_attempts): + sys_boot_completed = self.shell_output( + "getprop sys.boot_completed", timeout=timeout + ) + if sys_boot_completed == "1": + break + time.sleep(self._device_ready_retry_wait) + if sys_boot_completed != "1": + raise ADBError("Failed to complete boot in time") + + def _get_device_serial(self, device): + device = device or os.environ.get("ANDROID_SERIAL") + if device is None: + devices = ADBHost( + adb=self._adb_path, adb_host=self._adb_host, adb_port=self._adb_port + ).devices() + if len(devices) > 1: + raise ValueError( + "ADBDevice called with multiple devices " + "attached and no device specified" + ) + if len(devices) == 0: + raise ADBError("No connected devices found.") + device = devices[0] + + # Allow : in device serial if it matches a tcpip device serial. + re_device_serial_tcpip = re.compile(r"[^:]+:[0-9]+$") + + def is_valid_serial(serial): + return ( + serial.startswith("usb:") + or re_device_serial_tcpip.match(serial) is not None + or ":" not in serial + ) + + if isinstance(device, six.string_types): + # Treat this as a device serial + if not is_valid_serial(device): + raise ValueError( + "Device serials containing ':' characters are " + "invalid. Pass the output from " + "ADBHost.devices() for the device instead" + ) + return device + + serial = device.get("device_serial") + if serial is not None and is_valid_serial(serial): + return serial + usb = device.get("usb") + if usb is not None: + return "usb:%s" % usb + + raise ValueError("Unable to get device serial") + + def _check_root_user(self, timeout=None): + uid = "uid=0" + # Is shell already running as root? + try: + if self.shell_output("id", timeout=timeout).find(uid) != -1: + self._logger.info("adbd running as root") + return True + except ADBError: + self._logger.debug("Check for root user failed") + return False + + def _check_adb_root(self, timeout=None): + self._have_root_shell = self._check_root_user(timeout=timeout) + + # Exclude these devices from checking for a root shell due to + # potential hangs. + exclude_set = set() + exclude_set.add("E5823") # Sony Xperia Z5 Compact (E5823) + # Do we need to run adb root to get a root shell? + if not self._have_root_shell: + if self.get_prop("ro.product.model") in exclude_set: + self._logger.warning( + "your device was excluded from attempting adb root." + ) + else: + try: + self.command_output(["root"], timeout=timeout) + self._have_root_shell = self._check_root_user(timeout=timeout) + if self._have_root_shell: + self._logger.info("adbd restarted as root") + else: + self._logger.info("adbd not restarted as root") + except ADBError: + self._logger.debug("Check for root adbd failed") + + def pidof(self, app_name, timeout=None): + """ + Return a list of pids for all extant processes running within the + specified application package. + + :param str app_name: The name of the application package to examine + :param int timeout: The maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. This timeout is per + adb call. The total time spent may exceed this + value. If it is not specified, the value set + in the ADBDevice constructor is used. + :return: List of integers containing the pid(s) of the various processes. + :raises: :exc:`ADBTimeoutError` + """ + if self._have_pidof: + try: + pid_output = self.shell_output("pidof %s" % app_name, timeout=timeout) + re_pids = re.compile(r"[0-9]+") + pids = re_pids.findall(pid_output) + if self._have_flaky_pidof and not pids: + time.sleep(0.1) + pid_output = self.shell_output( + "pidof %s" % app_name, timeout=timeout + ) + pids = re_pids.findall(pid_output) + except ADBError: + pids = [] + else: + procs = self.get_process_list(timeout=timeout) + # limit the comparion to the first 75 characters due to a + # limitation in processname length in android. + pids = [proc[0] for proc in procs if proc[1] == app_name[:75]] + + return [int(pid) for pid in pids] + + def _sync(self, timeout=None): + """Sync the file system using shell_output in order that exceptions + are raised to the caller.""" + self.shell_output("sync", timeout=timeout) + + @staticmethod + def _should_quote(arg): + """Utility function if command argument should be quoted.""" + if not arg: + return False + if arg[0] == "'" and arg[-1] == "'" or arg[0] == '"' and arg[-1] == '"': + # Already quoted + return False + re_quotable_chars = re.compile(r"[ ()\"&'\];]") + return re_quotable_chars.search(arg) + + @staticmethod + def _quote(arg): + """Utility function to return quoted version of command argument.""" + if hasattr(shlex, "quote"): + quote = shlex.quote + elif hasattr(pipes, "quote"): + quote = pipes.quote + else: + + def quote(arg): + arg = arg or "" + re_unsafe = re.compile(r"[^\w@%+=:,./-]") + if re_unsafe.search(arg): + arg = "'" + arg.replace("'", "'\"'\"'") + "'" + return arg + + return quote(arg) + + @staticmethod + def _escape_command_line(cmds): + """Utility function which takes a list of command arguments and returns + escaped and quoted version of the command as a string. + """ + assert isinstance(cmds, list) + # This is identical to shlex.join in Python 3.8. We can + # replace it should we ever get Python 3.8 as a minimum. + quoted_cmd = " ".join([ADBDevice._quote(arg) for arg in cmds]) + + return quoted_cmd + + @staticmethod + def _get_exitcode(file_obj): + """Get the exitcode from the last line of the file_obj for shell + commands executed on Android prior to Android 7. + """ + re_returncode = re.compile(r"adb_returncode=([0-9]+)") + file_obj.seek(0, os.SEEK_END) + + line = "" + length = file_obj.tell() + offset = 1 + while length - offset >= 0: + file_obj.seek(-offset, os.SEEK_END) + char = six.ensure_str(file_obj.read(1)) + if not char: + break + if char != "\r" and char != "\n": + line = char + line + elif line: + # we have collected everything up to the beginning of the line + break + offset += 1 + match = re_returncode.match(line) + if match: + exitcode = int(match.group(1)) + # Set the position in the file to the position of the + # adb_returncode and truncate it from the output. + file_obj.seek(-1, os.SEEK_CUR) + file_obj.truncate() + else: + exitcode = None + # We may have a situation where the adb_returncode= is not + # at the end of the output. This happens at least in the + # failure jit-tests on arm. To work around this + # possibility, we can search the entire output for the + # appropriate match. + file_obj.seek(0, os.SEEK_SET) + for line in file_obj: + line = six.ensure_str(line) + match = re_returncode.search(line) + if match: + exitcode = int(match.group(1)) + break + # Reset the position in the file to the end. + file_obj.seek(0, os.SEEK_END) + + return exitcode + + def is_path_internal_storage(self, path, timeout=None): + """ + Return True if the path matches an internal storage path + as defined by either '/sdcard', '/mnt/sdcard', or any of the + .*_STORAGE environment variables on the device otherwise False. + + :param str path: The path to test. + :param int timeout: The maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. This timeout is per adb call. The + total time spent may exceed this value. If it is not + specified, the value set in the ADBDevice constructor is used. + :return: boolean + :raises: :exc:`ADBTimeoutError` + :exc:`ADBError` + """ + if not self._re_internal_storage: + storage_dirs = set(["/mnt/sdcard", "/sdcard"]) + re_STORAGE = re.compile("([^=]+STORAGE)=(.*)") + lines = self.shell_output("set", timeout=timeout).split() + for line in lines: + m = re_STORAGE.match(line.strip()) + if m and m.group(2): + storage_dirs.add(m.group(2)) + self._re_internal_storage = re.compile("/|".join(list(storage_dirs)) + "/") + return self._re_internal_storage.match(path) is not None + + def is_package_debuggable(self, package): + if not package: + return False + + if not self.is_app_installed(package): + self._logger.warning( + "Can not check if package %s is debuggable as it is not installed." + % package + ) + return False + + if package in self._debuggable_packages: + return self._debuggable_packages[package] + + try: + self.shell_output("run-as %s ls /system" % package) + self._debuggable_packages[package] = True + except ADBError as e: + self._debuggable_packages[package] = False + self._logger.warning("Package %s is not debuggable: %s" % (package, str(e))) + return self._debuggable_packages[package] + + @property + def package_dir(self): + if not self._run_as_package: + return None + # If we have a debuggable app and can use its directory to + # locate the test_root, this returns the location of the app's + # directory. If it is not located in the default location this + # will not be correct. + return "/data/data/%s" % self._run_as_package + + @property + def run_as_package(self): + """Returns the name of the package which will be used in run-as to change + the effective user executing a command.""" + return self._run_as_package + + @run_as_package.setter + def run_as_package(self, value): + if self._have_root_shell or self._have_su or self._have_android_su: + # When we have root available, use that instead of run-as. + return + + if self._run_as_package == value: + # Do nothing if the value doesn't change. + return + + if not value: + if self._test_root: + # Make sure the old test_root is clean without using + # the test_root property getter. + self.rm( + posixpath.join(self._test_root, "*"), recursive=True, force=True + ) + self._logger.info( + "Setting run_as_package to None. Resetting test root from %s to %s" + % (self._test_root, self._initial_test_root) + ) + self._run_as_package = None + # We must set _run_as_package to None before assigning to + # self.test_root in order to prevent attempts to use + # run-as. + self.test_root = self._initial_test_root + if self._test_root: + # Make sure the new test_root is clean. + self.rm( + posixpath.join(self._test_root, "*"), recursive=True, force=True + ) + return + + if not self.is_package_debuggable(value): + self._logger.warning( + "Can not set run_as_package to %s since it is not debuggable." % value + ) + # Since we are attempting to set run_as_package assume + # that we are not rooted and do not include + # /data/local/tmp as an option when checking for possible + # test_root paths using external storage. + paths = [ + "/storage/emulated/0/Android/data/%s/test_root" % value, + "/sdcard/test_root", + "/mnt/sdcard/test_root", + ] + self._try_test_root_candidates(paths) + return + + # Require these devices to have Verify bytecode turned off due to failures with run-as. + include_set = set() + include_set.add("SM-G973F") # Samsung S10g SM-G973F + + if ( + self.get_prop("ro.product.model") in include_set + and self.shell_output("settings get global art_verifier_verify_debuggable") + == "1" + ): + self._logger.warning( + """Your device has Verify bytecode of debuggable apps set which + causes problems attempting to use run-as to delegate command execution to debuggable + apps. You must turn this setting off in Developer options on your device. + """ + ) + raise ADBError( + "Verify bytecode of debuggable apps must be turned off to use run-as" + ) + + self._logger.info("Setting run_as_package to %s" % value) + + self._run_as_package = value + old_test_root = self._test_root + new_test_root = posixpath.join(self.package_dir, "test_root") + if old_test_root != new_test_root: + try: + # Make sure the old test_root is clean. + if old_test_root: + self.rm( + posixpath.join(old_test_root, "*"), recursive=True, force=True + ) + self.test_root = posixpath.join(self.package_dir, "test_root") + # Make sure the new test_root is clean. + self.rm(posixpath.join(self.test_root, "*"), recursive=True, force=True) + except ADBError as e: + # There was a problem using run-as to initialize + # the new test_root in the app's directory. + # Restore the old test root and raise an ADBError. + self._run_as_package = None + self.test_root = old_test_root + self._logger.warning( + "Exception %s setting test_root to %s. " + "Resetting test_root to %s." + % (str(e), new_test_root, old_test_root) + ) + raise ADBError( + "Unable to initialize test root while setting run_as_package %s" + % value + ) + + def enable_run_as_for_path(self, path): + return self._run_as_package is not None and path.startswith(self.package_dir) + + @property + def test_root(self): + """ + The test_root property returns the directory on the device where + temporary test files are stored. + + The first time test_root it is called it determines and caches a value + for the test root on the device. It determines the appropriate test + root by attempting to create a 'proof' directory on each of a list of + directories and returning the first successful directory as the + test_root value. The cached value for the test_root will be shared + by subsequent instances of ADBDevice if self._share_test_root is True. + + The default list of directories checked by test_root are: + + If the device is rooted: + - /data/local/tmp/test_root + + If run_as_package is not available and the device is not rooted: + + - /data/local/tmp/test_root + - /sdcard/test_root + - /storage/sdcard/test_root + - /mnt/sdcard/test_root + + You may override the default list by providing a test_root argument to + the :class:`ADBDevice` constructor which will then be used when + attempting to create the 'proof' directory. + + :raises: :exc:`ADBTimeoutError` + :exc:`ADBError` + """ + if self._test_root is not None: + self._logger.debug("Using cached test_root %s" % self._test_root) + return self._test_root + + if self.run_as_package is not None: + raise ADBError( + "run_as_package is %s however test_root is None" % self.run_as_package + ) + + if self._share_test_root and _TEST_ROOT: + self._logger.debug( + "Attempting to use shared test_root %s" % self._test_root + ) + paths = [_TEST_ROOT] + elif self._initial_test_root is not None: + self._logger.debug( + "Attempting to use initial test_root %s" % self._test_root + ) + paths = [self._initial_test_root] + else: + # Android 10's scoped storage means we can no longer + # reliably host profiles and tests on the sdcard though it + # depends on the device. See + # https://developer.android.com/training/data-storage#scoped-storage + # Also see RunProgram in + # python/mozbuild/mozbuild/mach_commands.py where they + # choose /data/local/tmp as the default location for the + # profile because GeckoView only takes its configuration + # file from /data/local/tmp. Since we have not specified + # a run_as_package yet, assume we may be attempting to use + # a shell program which creates files owned by the shell + # user and which would work using /data/local/tmp/ even if + # the device is not rooted. Fall back to external storage + # if /data/local/tmp is not available. + paths = ["/data/local/tmp/test_root"] + if not self.is_rooted: + # Note that /sdcard may be accessible while + # /mnt/sdcard is not. + paths.extend( + [ + "/sdcard/test_root", + "/storage/sdcard/test_root", + "/mnt/sdcard/test_root", + ] + ) + + return self._try_test_root_candidates(paths) + + @test_root.setter + def test_root(self, value): + # Cache the requested test root so that + # other invocations of ADBDevice will pick + # up the same value. + global _TEST_ROOT + if self._test_root == value: + return + self._logger.debug("Setting test_root from %s to %s" % (self._test_root, value)) + old_test_root = self._test_root + self._test_root = value + if self._share_test_root: + _TEST_ROOT = value + if not value: + return + if not self._try_test_root(value): + self._test_root = old_test_root + raise ADBError("Unable to set test_root to %s" % value) + readme = posixpath.join(value, "README") + if not self.is_file(readme): + tmpf = tempfile.NamedTemporaryFile(mode="w", delete=False) + tmpf.write( + "This directory is used by mozdevice to contain all content " + "related to running tests on this device.\n" + ) + tmpf.close() + try: + self.push(tmpf.name, readme) + finally: + if tmpf: + os.unlink(tmpf.name) + + def _try_test_root_candidates(self, paths): + max_attempts = 3 + for test_root in paths: + for attempt in range(1, max_attempts + 1): + self._logger.debug( + "Setting test root to %s attempt %d of %d" + % (test_root, attempt, max_attempts) + ) + + if self._try_test_root(test_root): + if not self._test_root: + # Cache the detected test_root so that we can + # restore the value without having re-run + # _try_test_root. + self._initial_test_root = test_root + self._test_root = test_root + self._logger.info("Setting test_root to %s" % self._test_root) + return self._test_root + + self._logger.debug( + "_setup_test_root: " + "Attempt %d of %d failed to set test_root to %s" + % (attempt, max_attempts, test_root) + ) + + if attempt != max_attempts: + time.sleep(20) + + raise ADBError( + "Unable to set up test root using paths: [%s]" % ", ".join(paths) + ) + + def _try_test_root(self, test_root): + try: + if not self.is_dir(test_root): + self.mkdir(test_root, parents=True) + proof_dir = posixpath.join(test_root, "proof") + if self.is_dir(proof_dir): + self.rm(proof_dir, recursive=True) + self.mkdir(proof_dir) + self.rm(proof_dir, recursive=True) + except ADBError as e: + self._logger.warning("%s is not writable: %s" % (test_root, str(e))) + return False + + return True + + # Host Command methods + + def command(self, cmds, timeout=None): + """Executes an adb command on the host against the device. + + :param list cmds: The command and its arguments to be + executed. + :param int timeout: The maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. This timeout is per adb call. The + total time spent may exceed this value. If it is not + specified, the value set in the ADBDevice constructor is used. + :return: :class:`ADBProcess` + + command() provides a low level interface for executing + commands for a specific device on the host via adb. + + command() executes on the host in such a fashion that stdout + of the adb process are file handles on the host and + the exit code is available as the exit code of the adb + process. + + For executing shell commands on the device, use + ADBDevice.shell(). The caller provides a list containing + commands, as well as a timeout period in seconds. + + A subprocess is spawned to execute adb for the device with + stdout and stderr directed to a temporary file. If the process + takes longer than the specified timeout, the process is + terminated. + + It is the caller's responsibilty to clean up by closing + the stdout temporary file. + """ + + return ADBCommand.command( + self, cmds, device_serial=self._device_serial, timeout=timeout + ) + + def command_output(self, cmds, timeout=None): + """Executes an adb command on the host against the device returning + stdout. + + :param list cmds: The command and its arguments to be executed. + :param int timeout: The maximum time in seconds + for any spawned adb process to complete before throwing + an ADBTimeoutError. + This timeout is per adb call. The total time spent + may exceed this value. If it is not specified, the value + set in the ADBDevice constructor is used. + :return: str - content of stdout. + :raises: :exc:`ADBTimeoutError` + :exc:`ADBError` + """ + return ADBCommand.command_output( + self, cmds, device_serial=self._device_serial, timeout=timeout + ) + + # Networking methods + + def _validate_port(self, port, is_local=True): + """Validate a port forwarding specifier. Raises ValueError on failure. + + :param str port: The port specifier to validate + :param bool is_local: Flag indicating whether the port represents a local port. + """ + prefixes = ["tcp", "localabstract", "localreserved", "localfilesystem", "dev"] + + if not is_local: + prefixes += ["jdwp"] + + parts = port.split(":", 1) + if len(parts) != 2 or parts[0] not in prefixes: + raise ValueError("Invalid port specifier %s" % port) + + def _validate_direction(self, direction): + """Validate direction of the socket connection. Raises ValueError on failure. + + :param str direction: The socket direction specifier to validate + :raises: :exc:`ValueError` + """ + if direction not in [ + self.SOCKET_DIRECTION_FORWARD, + self.SOCKET_DIRECTION_REVERSE, + ]: + raise ValueError("Invalid direction specifier {}".format(direction)) + + def create_socket_connection( + self, direction, local, remote, allow_rebind=True, timeout=None + ): + """Sets up a socket connection in the specified direction. + + :param str direction: Direction of the socket connection + :param str local: Local port + :param str remote: Remote port + :param bool allow_rebind: Do not fail if port is already bound + :param int timeout: The maximum time in seconds + for any spawned adb process to complete before throwing + an ADBTimeoutError. If it is not specified, the value + set in the ADBDevice constructor is used. + :return: When forwarding from "tcp:0", an int containing the port number + of the local port assigned by adb, otherwise None. + :raises: :exc:`ValueError` + :exc:`ADBTimeoutError` + :exc:`ADBError` + """ + # validate socket direction, and local and remote port formatting. + self._validate_direction(direction) + for port, is_local in [(local, True), (remote, False)]: + self._validate_port(port, is_local=is_local) + + cmd = [direction, local, remote] + + if not allow_rebind: + cmd.insert(1, "--no-rebind") + + # execute commands to establish socket connection. + cmd_output = self.command_output(cmd, timeout=timeout) + + # If we want to forward using local port "tcp:0", then we're letting + # adb assign the port for us, so we need to return that assignment. + if ( + direction == self.SOCKET_DIRECTION_FORWARD + and local == "tcp:0" + and cmd_output + ): + return int(cmd_output) + + return None + + def list_socket_connections(self, direction, timeout=None): + """Return a list of tuples specifying active socket connectionss. + + Return values are of the form (device, local, remote). + + :param str direction: 'forward' to list forward socket connections + 'reverse' to list reverse socket connections + :param int timeout: The maximum time in seconds + for any spawned adb process to complete before throwing + an ADBTimeoutError. If it is not specified, the value + set in the ADBDevice constructor is used. + :raises: :exc:`ValueError` + :exc:`ADBTimeoutError` + :exc:`ADBError` + """ + self._validate_direction(direction) + + cmd = [direction, "--list"] + output = self.command_output(cmd, timeout=timeout) + return [tuple(line.split(" ")) for line in output.splitlines() if line.strip()] + + def remove_socket_connections(self, direction, local=None, timeout=None): + """Remove existing socket connections for a given direction. + + :param str direction: 'forward' to remove forward socket connection + 'reverse' to remove reverse socket connection + :param str local: local port specifier as for ADBDevice.forward. If local + is not specified removes all forwards. + :param int timeout: The maximum time in seconds + for any spawned adb process to complete before throwing + an ADBTimeoutError. If it is not specified, the value + set in the ADBDevice constructor is used. + :raises: :exc:`ValueError` + :exc:`ADBTimeoutError` + :exc:`ADBError` + """ + self._validate_direction(direction) + + cmd = [direction] + + if local is None: + cmd.extend(["--remove-all"]) + else: + self._validate_port(local, is_local=True) + cmd.extend(["--remove", local]) + + self.command_output(cmd, timeout=timeout) + + # Legacy port forward methods + + def forward(self, local, remote, allow_rebind=True, timeout=None): + """Forward a local port to a specific port on the device. + + :return: When forwarding from "tcp:0", an int containing the port number + of the local port assigned by adb, otherwise None. + + See `ADBDevice.create_socket_connection`. + """ + return self.create_socket_connection( + self.SOCKET_DIRECTION_FORWARD, local, remote, allow_rebind, timeout + ) + + def list_forwards(self, timeout=None): + """Return a list of tuples specifying active forwards. + + See `ADBDevice.list_socket_connection`. + """ + return self.list_socket_connections(self.SOCKET_DIRECTION_FORWARD, timeout) + + def remove_forwards(self, local=None, timeout=None): + """Remove existing port forwards. + + See `ADBDevice.remove_socket_connection`. + """ + self.remove_socket_connections(self.SOCKET_DIRECTION_FORWARD, local, timeout) + + # Legacy port reverse methods + + def reverse(self, local, remote, allow_rebind=True, timeout=None): + """Sets up a reverse socket connection from device to host. + + See `ADBDevice.create_socket_connection`. + """ + self.create_socket_connection( + self.SOCKET_DIRECTION_REVERSE, local, remote, allow_rebind, timeout + ) + + def list_reverses(self, timeout=None): + """Returns a list of tuples showing active reverse socket connections. + + See `ADBDevice.list_socket_connection`. + """ + return self.list_socket_connections(self.SOCKET_DIRECTION_REVERSE, timeout) + + def remove_reverses(self, local=None, timeout=None): + """Remove existing reverse socket connections. + + See `ADBDevice.remove_socket_connection`. + """ + self.remove_socket_connections(self.SOCKET_DIRECTION_REVERSE, local, timeout) + + # Device Shell methods + + def shell( + self, + cmd, + env=None, + cwd=None, + timeout=None, + stdout_callback=None, + yield_stdout=None, + enable_run_as=False, + ): + """Executes a shell command on the device. + + :param str cmd: The command to be executed. + :param dict env: Contains the environment variables and + their values. + :param str cwd: The directory from which to execute. + :param int timeout: The maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. This timeout is per adb call. The + total time spent may exceed this value. If it is not + specified, the value set in the ADBDevice constructor is used. + :param function stdout_callback: Function called for each line of output. + :param bool yield_stdout: Flag used to make the returned process + iteratable. The return process can be used in a loop to get the output + and the loop would exit as the process ends. + :param bool enable_run_as: Flag used to temporarily enable use + of run-as to execute the command. + :return: :class:`ADBProcess` + + shell() provides a low level interface for executing commands + on the device via adb shell. + + shell() executes on the host in such as fashion that stdout + contains the stdout and stderr of the host abd process + combined with the stdout and stderr of the shell command + on the device. The exit code of shell() is the exit code of + the adb command if it was non-zero or the extracted exit code + from the output of the shell command executed on the + device. + + The caller provides a flag indicating if the command is to be + executed as root, a string for any requested working + directory, a hash defining the environment, a string + containing shell commands, as well as a timeout period in + seconds. + + The command line to be executed is created to set the current + directory, set the required environment variables, optionally + execute the command using su and to output the return code of + the command to stdout. The command list is created as a + command sequence separated by && which will terminate the + command sequence on the first command which returns a non-zero + exit code. + + A subprocess is spawned to execute adb shell for the device + with stdout and stderr directed to a temporary file. If the + process takes longer than the specified timeout, the process + is terminated. The return code is extracted from the stdout + and is then removed from the file. + + It is the caller's responsibilty to clean up by closing + the stdout temporary files. + + If the yield_stdout flag is set, then the returned ADBProcess + can be iterated over to get the output as it is produced by + adb command. The iterator ends when the process timed out or + if it exited. This flag is incompatible with stdout_callback. + + """ + + def _timed_read_line_handler(signum, frame): + raise IOError("ReadLineTimeout") + + def _timed_read_line(filehandle, timeout=None): + """ + Attempt to readline from filehandle. If readline does not return + within timeout seconds, raise IOError('ReadLineTimeout'). + On Windows, required signal facilities are usually not available; + as a result, the timeout is not respected and some reads may + block on Windows. + """ + if not hasattr(signal, "SIGALRM"): + return filehandle.readline() + if timeout is None: + timeout = 5 + line = "" + default_alarm_handler = signal.getsignal(signal.SIGALRM) + signal.signal(signal.SIGALRM, _timed_read_line_handler) + signal.alarm(int(timeout)) + try: + line = filehandle.readline() + finally: + signal.alarm(0) + signal.signal(signal.SIGALRM, default_alarm_handler) + return line + + first_word = cmd.split(" ")[0] + if first_word in self.BUILTINS: + # Do not attempt to use su or run-as with builtin commands + pass + elif self._have_root_shell: + pass + elif self._have_android_su: + cmd = "su 0 %s" % cmd + elif self._have_su: + cmd = "su -c %s" % ADBDevice._quote(cmd) + elif self._run_as_package and enable_run_as: + cmd = "run-as %s %s" % (self._run_as_package, cmd) + else: + pass + + # prepend cwd and env to command if necessary + if cwd: + cmd = "cd %s && %s" % (cwd, cmd) + if env: + envstr = "&& ".join(["export %s=%s" % (x[0], x[1]) for x in env.items()]) + cmd = envstr + "&& " + cmd + # Before Android 7, an exitcode 0 for the process on the host + # did not mean that the exitcode of the Android process was + # also 0. We therefore used the echo adb_returncode=$? hack to + # obtain it there. However Android 7 and later intermittently + # do not emit the adb_returncode in stdout using this hack. In + # Android 7 and later the exitcode of the host process does + # match the exitcode of the Android process and we can use it + # directly. + if ( + self._device_serial.startswith("emulator") + or not hasattr(self, "version") + or self.version < version_codes.N + ): + cmd += "; echo adb_returncode=$?" + + args = [self._adb_path] + if self._adb_host: + args.extend(["-H", self._adb_host]) + if self._adb_port: + args.extend(["-P", str(self._adb_port)]) + if self._device_serial: + args.extend(["-s", self._device_serial]) + args.extend(["wait-for-device", "shell", cmd]) + + if timeout is None: + timeout = self._timeout + + if yield_stdout: + # When using yield_stdout, rely on the timeout implemented in + # ADBProcess instead of relying on our own here. + assert not stdout_callback + return ADBProcess(args, use_stdout_pipe=yield_stdout, timeout=timeout) + else: + adb_process = ADBProcess(args) + + start_time = time.time() + exitcode = adb_process.proc.poll() + if not stdout_callback: + while ((time.time() - start_time) <= float(timeout)) and exitcode is None: + time.sleep(self._polling_interval) + exitcode = adb_process.proc.poll() + else: + stdout2 = io.open(adb_process.stdout_file.name, "rb") + partial = b"" + while ((time.time() - start_time) <= float(timeout)) and exitcode is None: + try: + line = _timed_read_line(stdout2) + if line and len(line) > 0: + if line.endswith(b"\n") or line.endswith(b"\r"): + line = partial + line + partial = b"" + line = line.rstrip() + if self._verbose: + self._logger.info(six.ensure_str(line)) + stdout_callback(line) + else: + # no more output available now, but more to come? + partial = partial + line + else: + # no new output, so sleep and poll + time.sleep(self._polling_interval) + except IOError: + pass + exitcode = adb_process.proc.poll() + if exitcode is None: + adb_process.proc.kill() + adb_process.timedout = True + adb_process.exitcode = adb_process.proc.poll() + elif exitcode == 0: + if ( + not self._device_serial.startswith("emulator") + and hasattr(self, "version") + and self.version >= version_codes.N + ): + adb_process.exitcode = 0 + else: + adb_process.exitcode = self._get_exitcode(adb_process.stdout_file) + else: + adb_process.exitcode = exitcode + + if stdout_callback: + line = stdout2.readline() + while line: + if line.endswith(b"\n") or line.endswith(b"\r"): + line = partial + line + partial = b"" + stdout_callback(line.rstrip()) + else: + # no more output available now, but more to come? + partial = partial + line + line = stdout2.readline() + if partial: + stdout_callback(partial) + stdout2.close() + + adb_process.stdout_file.seek(0, os.SEEK_SET) + + return adb_process + + def shell_bool(self, cmd, env=None, cwd=None, timeout=None, enable_run_as=False): + """Executes a shell command on the device returning True on success + and False on failure. + + :param str cmd: The command to be executed. + :param dict env: Contains the environment variables and + their values. + :param str cwd: The directory from which to execute. + :param int timeout: The maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. + This timeout is per adb call. The total time spent + may exceed this value. If it is not specified, the value + set in the ADBDevice constructor is used. + :param bool enable_run_as: Flag used to temporarily enable use + of run-as to execute the command. + :return: bool + :raises: :exc:`ADBTimeoutError` + """ + adb_process = None + try: + adb_process = self.shell( + cmd, env=env, cwd=cwd, timeout=timeout, enable_run_as=enable_run_as + ) + if adb_process.timedout: + raise ADBTimeoutError("%s" % adb_process) + return adb_process.exitcode == 0 + finally: + if adb_process: + if self._verbose: + output = adb_process.stdout + self._logger.debug( + "shell_bool: %s, " + "timeout: %s, " + "timedout: %s, " + "exitcode: %s, " + "output: %s" + % ( + " ".join(adb_process.args), + timeout, + adb_process.timedout, + adb_process.exitcode, + output, + ) + ) + + adb_process.stdout_file.close() + + def shell_output(self, cmd, env=None, cwd=None, timeout=None, enable_run_as=False): + """Executes an adb shell on the device returning stdout. + + :param str cmd: The command to be executed. + :param dict env: Contains the environment variables and their values. + :param str cwd: The directory from which to execute. + :param int timeout: The maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. This timeout is per + adb call. The total time spent may exceed this + value. If it is not specified, the value set + in the ADBDevice constructor is used. + :param bool enable_run_as: Flag used to temporarily enable use + of run-as to execute the command. + :return: str - content of stdout. + :raises: :exc:`ADBTimeoutError` + :exc:`ADBError` + """ + adb_process = None + try: + adb_process = self.shell( + cmd, env=env, cwd=cwd, timeout=timeout, enable_run_as=enable_run_as + ) + if adb_process.timedout: + raise ADBTimeoutError("%s" % adb_process) + if adb_process.exitcode: + raise ADBProcessError(adb_process) + output = adb_process.stdout + if self._verbose: + self._logger.debug( + "shell_output: %s, " + "timeout: %s, " + "timedout: %s, " + "exitcode: %s, " + "output: %s" + % ( + " ".join(adb_process.args), + timeout, + adb_process.timedout, + adb_process.exitcode, + output, + ) + ) + + return output + finally: + if adb_process and isinstance(adb_process.stdout_file, io.IOBase): + adb_process.stdout_file.close() + + # Informational methods + + def _get_logcat_buffer_args(self, buffers): + valid_buffers = set(["radio", "main", "events"]) + invalid_buffers = set(buffers).difference(valid_buffers) + if invalid_buffers: + raise ADBError( + "Invalid logcat buffers %s not in %s " + % (list(invalid_buffers), list(valid_buffers)) + ) + args = [] + for b in buffers: + args.extend(["-b", b]) + return args + + def clear_logcat(self, timeout=None, buffers=[]): + """Clears logcat via adb logcat -c. + + :param int timeout: The maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. This timeout is per + adb call. The total time spent may exceed this + value. If it is not specified, the value set + in the ADBDevice constructor is used. + :param list buffers: Log buffers to clear. Valid buffers are + "radio", "events", and "main". Defaults to "main". + :raises: :exc:`ADBTimeoutError` + :exc:`ADBError` + """ + buffers = self._get_logcat_buffer_args(buffers) + cmds = ["logcat", "-c"] + buffers + try: + self.command_output(cmds, timeout=timeout) + self.shell_output("log logcat cleared", timeout=timeout) + except ADBTimeoutError: + raise + except ADBProcessError as e: + if "failed to clear" not in str(e): + raise + self._logger.warning( + "retryable logcat clear error?: {}. Retrying...".format(str(e)) + ) + try: + self.command_output(cmds, timeout=timeout) + self.shell_output("log logcat cleared", timeout=timeout) + except ADBProcessError as e2: + if "failed to clear" not in str(e): + raise + self._logger.warning( + "Ignoring failure to clear logcat: {}.".format(str(e2)) + ) + + def get_logcat( + self, + filter_specs=[ + "dalvikvm:I", + "ConnectivityService:S", + "WifiMonitor:S", + "WifiStateTracker:S", + "wpa_supplicant:S", + "NetworkStateTracker:S", + "EmulatedCamera_Camera:S", + "EmulatedCamera_Device:S", + "EmulatedCamera_FakeCamera:S", + "EmulatedCamera_FakeDevice:S", + "EmulatedCamera_CallbackNotifier:S", + "GnssLocationProvider:S", + "Hyphenator:S", + "BatteryStats:S", + ], + format="time", + filter_out_regexps=[], + timeout=None, + buffers=[], + ): + """Returns the contents of the logcat file as a list of strings. + + :param list filter_specs: Optional logcat messages to + be included. + :param str format: Optional logcat format. + :param list filter_out_regexps: Optional logcat messages to be + excluded. + :param int timeout: The maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. + This timeout is per adb call. The total time spent + may exceed this value. If it is not specified, the value + set in the ADBDevice constructor is used. + :param list buffers: Log buffers to retrieve. Valid buffers are + "radio", "events", and "main". Defaults to "main". + :return: list of lines logcat output. + :raises: :exc:`ADBTimeoutError` + :exc:`ADBError` + """ + buffers = self._get_logcat_buffer_args(buffers) + cmds = ["logcat", "-v", format, "-d"] + buffers + filter_specs + lines = self.command_output(cmds, timeout=timeout).splitlines() + + for regex in filter_out_regexps: + lines = [line for line in lines if not re.search(regex, line)] + + return lines + + def get_prop(self, prop, timeout=None): + """Gets value of a property from the device via adb shell getprop. + + :param str prop: The propery name. + :param int timeout: The maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. + This timeout is per adb call. The total time spent + may exceed this value. If it is not specified, the value + set in the ADBDevice constructor is used. + :return: str value of property. + :raises: :exc:`ADBTimeoutError` + :exc:`ADBError` + """ + output = self.shell_output("getprop %s" % prop, timeout=timeout) + return output + + def get_state(self, timeout=None): + """Returns the device's state via adb get-state. + + :param int timeout: The maximum time in + seconds for any spawned adb process to complete before throwing + an ADBTimeoutError. + This timeout is per adb call. The total time spent + may exceed this value. If it is not specified, the value + set in the ADBDevice constructor is used. + :return: str value of adb get-state. + :raises: :exc:`ADBTimeoutError` + :exc:`ADBError` + """ + output = self.command_output(["get-state"], timeout=timeout).strip() + return output + + def get_ip_address(self, interfaces=None, timeout=None): + """Returns the device's ip address, or None if it doesn't have one + + :param list interfaces: Interfaces to allow, or None to allow any + non-loopback interface. + :param int timeout: The maximum time in + seconds for any spawned adb process to complete before throwing + an ADBTimeoutError. + This timeout is per adb call. The total time spent + may exceed this value. If it is not specified, the value + set in the ADBDevice constructor is used. + :return: str ip address of the device or None if it could not + be found. + :raises: :exc:`ADBTimeoutError` + :exc:`ADBError` + """ + if not self.is_rooted: + self._logger.warning("Device not rooted. Can not obtain ip address.") + return None + self._logger.debug("get_ip_address: interfaces: %s" % interfaces) + if not interfaces: + interfaces = ["wlan0", "eth0"] + wifi_interface = self.get_prop("wifi.interface", timeout=timeout) + self._logger.debug("get_ip_address: wifi_interface: %s" % wifi_interface) + if wifi_interface and wifi_interface not in interfaces: + interfaces = interfaces.append(wifi_interface) + + # ifconfig interface + # can return two different formats: + # eth0: ip 192.168.1.139 mask 255.255.255.0 flags [up broadcast running multicast] + # or + # wlan0 Link encap:Ethernet HWaddr 00:9A:CD:B8:39:65 + # inet addr:192.168.1.38 Bcast:192.168.1.255 Mask:255.255.255.0 + # inet6 addr: fe80::29a:cdff:feb8:3965/64 Scope: Link + # UP BROADCAST RUNNING MULTICAST MTU:1500 Metric:1 + # RX packets:180 errors:0 dropped:0 overruns:0 frame:0 + # TX packets:218 errors:0 dropped:0 overruns:0 carrier:0 + # collisions:0 txqueuelen:1000 + # RX bytes:84577 TX bytes:31202 + + re1_ip = re.compile(r"(\w+): ip ([0-9.]+) mask.*") + # re1_ip will match output of the first format + # with group 1 returning the interface and group 2 returing the ip address. + + # re2_interface will match the interface line in the second format + # while re2_ip will match the inet addr line of the second format. + re2_interface = re.compile(r"(\w+)\s+Link") + re2_ip = re.compile(r"\s+inet addr:([0-9.]+)") + + matched_interface = None + matched_ip = None + re_bad_addr = re.compile(r"127.0.0.1|0.0.0.0") + + self._logger.debug("get_ip_address: ifconfig") + for interface in interfaces: + try: + output = self.shell_output("ifconfig %s" % interface, timeout=timeout) + except ADBError as e: + self._logger.warning( + "get_ip_address ifconfig %s: %s" % (interface, str(e)) + ) + output = "" + + for line in output.splitlines(): + if not matched_interface: + match = re1_ip.match(line) + if match: + matched_interface, matched_ip = match.groups() + else: + match = re2_interface.match(line) + if match: + matched_interface = match.group(1) + else: + match = re2_ip.match(line) + if match: + matched_ip = match.group(1) + + if matched_ip: + if not re_bad_addr.match(matched_ip): + self._logger.debug( + "get_ip_address: found: %s %s" + % (matched_interface, matched_ip) + ) + return matched_ip + matched_interface = None + matched_ip = None + + self._logger.debug("get_ip_address: netcfg") + # Fall back on netcfg if ifconfig does not work. + # $ adb shell netcfg + # lo UP 127.0.0.1/8 0x00000049 00:00:00:00:00:00 + # dummy0 DOWN 0.0.0.0/0 0x00000082 8e:cd:67:48:b7:c2 + # rmnet0 DOWN 0.0.0.0/0 0x00000000 00:00:00:00:00:00 + # rmnet1 DOWN 0.0.0.0/0 0x00000000 00:00:00:00:00:00 + # rmnet2 DOWN 0.0.0.0/0 0x00000000 00:00:00:00:00:00 + # rmnet3 DOWN 0.0.0.0/0 0x00000000 00:00:00:00:00:00 + # rmnet4 DOWN 0.0.0.0/0 0x00000000 00:00:00:00:00:00 + # rmnet5 DOWN 0.0.0.0/0 0x00000000 00:00:00:00:00:00 + # rmnet6 DOWN 0.0.0.0/0 0x00000000 00:00:00:00:00:00 + # rmnet7 DOWN 0.0.0.0/0 0x00000000 00:00:00:00:00:00 + # sit0 DOWN 0.0.0.0/0 0x00000080 00:00:00:00:00:00 + # vip0 DOWN 0.0.0.0/0 0x00001012 00:01:00:00:00:01 + # wlan0 UP 192.168.1.157/24 0x00001043 38:aa:3c:1c:f6:94 + + re3_netcfg = re.compile( + r"(\w+)\s+UP\s+([1-9]\d{0,2}\.\d{1,3}\.\d{1,3}\.\d{1,3})" + ) + try: + output = self.shell_output("netcfg", timeout=timeout) + except ADBError as e: + self._logger.warning("get_ip_address netcfg: %s" % str(e)) + output = "" + for line in output.splitlines(): + match = re3_netcfg.search(line) + if match: + matched_interface, matched_ip = match.groups() + if matched_interface == "lo" or re_bad_addr.match(matched_ip): + matched_interface = None + matched_ip = None + elif matched_ip and matched_interface in interfaces: + self._logger.debug( + "get_ip_address: found: %s %s" % (matched_interface, matched_ip) + ) + return matched_ip + self._logger.debug("get_ip_address: not found") + return matched_ip + + # File management methods + + def remount(self, timeout=None): + """Remount /system/ in read/write mode + + :param int timeout: The maximum time in + seconds for any spawned adb process to complete before throwing + an ADBTimeoutError. + This timeout is per adb call. The total time spent + may exceed this value. If it is not specified, the value + set in the ADBDevice constructor is used. + :raises: :exc:`ADBTimeoutError` + :exc:`ADBError` + """ + + rv = self.command_output(["remount"], timeout=timeout) + if "remount succeeded" not in rv: + raise ADBError("Unable to remount device") + + def batch_execute(self, commands, timeout=None, enable_run_as=False): + """Writes commands to a temporary file then executes on the device. + + :param list commands_list: List of commands to be run by the shell. + :param int timeout: The maximum time in + seconds for any spawned adb process to complete before throwing + an ADBTimeoutError. + This timeout is per adb call. The total time spent + may exceed this value. If it is not specified, the value + set in the ADBDevice constructor is used. + :param bool enable_run_as: Flag used to temporarily enable use + of run-as to execute the command. + :raises: :exc:`ADBTimeoutError` + :exc:`ADBError` + """ + try: + tmpf = tempfile.NamedTemporaryFile(mode="w", delete=False) + tmpf.write("\n".join(commands)) + tmpf.close() + script = "/sdcard/{}".format(os.path.basename(tmpf.name)) + self.push(tmpf.name, script) + self.shell_output( + "sh {}".format(script), enable_run_as=enable_run_as, timeout=timeout + ) + finally: + if tmpf: + os.unlink(tmpf.name) + if script: + self.rm(script, timeout=timeout) + + def chmod(self, path, recursive=False, mask="777", timeout=None): + """Recursively changes the permissions of a directory on the + device. + + :param str path: The directory name on the device. + :param bool recursive: Flag specifying if the command should be + executed recursively. + :param str mask: The octal permissions. + :param int timeout: The maximum time in + seconds for any spawned adb process to complete before throwing + an ADBTimeoutError. + This timeout is per adb call. The total time spent + may exceed this value. If it is not specified, the value + set in the ADBDevice constructor is used. + :raises: :exc:`ADBTimeoutError` + :exc:`ADBError` + """ + # Note that on some tests such as webappstartup, an error + # occurs during recursive calls to chmod where a "No such file + # or directory" error will occur for the + # /data/data/org.mozilla.fennec/files/mozilla/*.webapp0/lock + # which is a symbolic link to a socket: lock -> + # 127.0.0.1:+. On Linux, chmod -R ignores symbolic + # links but it appear Android's version does not. We ignore + # this type of error, but pass on any other errors that are + # detected. + path = posixpath.normpath(path.strip()) + enable_run_as = self.enable_run_as_for_path(path) + self._logger.debug( + "chmod: path=%s, recursive=%s, mask=%s" % (path, recursive, mask) + ) + if self.is_path_internal_storage(path, timeout=timeout): + # External storage on Android is case-insensitive and permissionless + # therefore even with the proper privileges it is not possible + # to change modes. + self._logger.debug("Ignoring attempt to chmod external storage") + return + + # build up the command to be run based on capabilities. + command = ["chmod"] + + if recursive and self._chmod_R: + command.append("-R") + + command.append(mask) + + if recursive and not self._chmod_R: + paths = self.ls(path, recursive=True, timeout=timeout) + base = " ".join(command) + commands = [" ".join([base, entry]) for entry in paths] + self.batch_execute(commands, timeout=timeout, enable_run_as=enable_run_as) + else: + command.append(path) + try: + self.shell_output( + cmd=" ".join(command), timeout=timeout, enable_run_as=enable_run_as + ) + except ADBProcessError as e: + if "No such file or directory" not in str(e): + # It appears that chmod -R with symbolic links will exit with + # exit code 1 but the files apart from the symbolic links + # were transfered. + raise + + def chown(self, path, owner, group=None, recursive=False, timeout=None): + """Run the chown command on the provided path. + + :param str path: path name on the device. + :param str owner: new owner of the path. + :param str group: optional parameter specifying the new group the path + should belong to. + :param bool recursive: optional value specifying whether the command should + operate on files and directories recursively. + :param int timeout: The maximum time in + seconds for any spawned adb process to complete before throwing + an ADBTimeoutError. + This timeout is per adb call. The total time spent + may exceed this value. If it is not specified, the value + set in the ADBDevice constructor is used. + :raises: :exc:`ADBTimeoutError` + :exc:`ADBError` + """ + path = posixpath.normpath(path.strip()) + enable_run_as = self.enable_run_as_for_path(path) + if self.is_path_internal_storage(path, timeout=timeout): + self._logger.warning("Ignoring attempt to chown external storage") + return + + # build up the command to be run based on capabilities. + command = ["chown"] + + if recursive and self._chown_R: + command.append("-R") + + if group: + # officially supported notation is : but . has been checked with + # sdk 17 and it works. + command.append("{owner}.{group}".format(owner=owner, group=group)) + else: + command.append(owner) + + if recursive and not self._chown_R: + # recursive desired, but chown -R is not supported natively. + # like with chmod, get the list of subpaths, put them into a script + # then run it with adb with one call. + paths = self.ls(path, recursive=True, timeout=timeout) + base = " ".join(command) + commands = [" ".join([base, entry]) for entry in paths] + + self.batch_execute(commands, timeout=timeout, enable_run_as=enable_run_as) + else: + # recursive or not, and chown -R is supported natively. + # command can simply be run as provided by the user. + command.append(path) + self.shell_output( + cmd=" ".join(command), timeout=timeout, enable_run_as=enable_run_as + ) + + def _test_path(self, argument, path, timeout=None): + """Performs path and file type checking. + + :param str argument: Command line argument to the test command. + :param str path: The path or filename on the device. + :param int timeout: The maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. + This timeout is per adb call. The total time spent + may exceed this value. If it is not specified, the value + set in the ADBDevice constructor is used. + :param bool root: Flag specifying if the command should be + executed as root. + :return: boolean - True if path or filename fulfills the + condition of the test. + :raises: :exc:`ADBTimeoutError` + """ + enable_run_as = self.enable_run_as_for_path(path) + if not enable_run_as and not self._device_serial.startswith("emulator"): + return self.shell_bool( + "test -{arg} {path}".format(arg=argument, path=path), + timeout=timeout, + enable_run_as=False, + ) + # Bug 1572563 - work around intermittent test path failures on emulators. + # The shell built-in test is not supported via run-as. + if argument == "f": + return self.exists(path, timeout=timeout) and not self.is_dir( + path, timeout=timeout + ) + if argument == "d": + return self.shell_bool( + "ls -a {}/".format(path), timeout=timeout, enable_run_as=enable_run_as + ) + if argument == "e": + return self.shell_bool( + "ls -a {}".format(path), timeout=timeout, enable_run_as=enable_run_as + ) + raise ADBError("_test_path: Unknown argument %s" % argument) + + def exists(self, path, timeout=None): + """Returns True if the path exists on the device. + + :param str path: The path name on the device. + :param int timeout: The maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. + This timeout is per adb call. The total time spent + may exceed this value. If it is not specified, the value + set in the ADBDevice constructor is used. + :param bool root: Flag specifying if the command should be + executed as root. + :return: boolean - True if path exists. + :raises: :exc:`ADBTimeoutError` + """ + path = posixpath.normpath(path) + return self._test_path("e", path, timeout=timeout) + + def is_dir(self, path, timeout=None): + """Returns True if path is an existing directory on the device. + + :param str path: The directory on the device. + :param int timeout: The maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. + This timeout is per adb call. The total time spent + may exceed this value. If it is not specified, the value + set in the ADBDevice constructor is used. + :return: boolean - True if path exists on the device and is a + directory. + :raises: :exc:`ADBTimeoutError` + """ + path = posixpath.normpath(path) + return self._test_path("d", path, timeout=timeout) + + def is_file(self, path, timeout=None): + """Returns True if path is an existing file on the device. + + :param str path: The file name on the device. + :param int timeout: The maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. + This timeout is per adb call. The total time spent + may exceed this value. If it is not specified, the value + set in the ADBDevice constructor is used. + :return: boolean - True if path exists on the device and is a + file. + :raises: :exc:`ADBTimeoutError` + """ + path = posixpath.normpath(path) + return self._test_path("f", path, timeout=timeout) + + def list_files(self, path, timeout=None): + """Return a list of files/directories contained in a directory + on the device. + + :param str path: The directory name on the device. + :param int timeout: The maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. + This timeout is per adb call. The total time spent + may exceed this value. If it is not specified, the value + set in the ADBDevice constructor is used. + :return: list of files/directories contained in the directory. + :raises: :exc:`ADBTimeoutError` + """ + path = posixpath.normpath(path.strip()) + enable_run_as = self.enable_run_as_for_path(path) + data = [] + if self.is_dir(path, timeout=timeout): + try: + data = self.shell_output( + "%s %s" % (self._ls, path), + timeout=timeout, + enable_run_as=enable_run_as, + ).splitlines() + self._logger.debug("list_files: data: %s" % data) + except ADBError: + self._logger.error( + "Ignoring exception in ADBDevice.list_files\n%s" + % traceback.format_exc() + ) + data[:] = [item for item in data if item] + self._logger.debug("list_files: %s" % data) + return data + + def ls(self, path, recursive=False, timeout=None): + """Return a list of matching files/directories on the device. + + The ls method emulates the behavior of the ls shell command. + It differs from the list_files method by supporting wild cards + and returning matches even if the path is not a directory and + by allowing a recursive listing. + + ls /sdcard always returns /sdcard and not the contents of the + sdcard path. The ls method makes the behavior consistent with + others paths by adjusting /sdcard to /sdcard/. Note this is + also the case of other sdcard related paths such as + /storage/emulated/legacy but no adjustment is made in those + cases. + + The ls method works around a Nexus 4 bug which prevents + recursive listing of directories on the sdcard unless the path + ends with "/*" by adjusting sdcard paths ending in "/" to end + with "/*". This adjustment is only made on official Nexus 4 + builds with property ro.product.model "Nexus 4". Note that + this will fail to return any "hidden" files or directories + which begin with ".". + + :param str path: The directory name on the device. + :param bool recursive: Flag specifying if a recursive listing + is to be returned. If recursive is False, the returned + matches will be relative to the path. If recursive is True, + the returned matches will be absolute paths. + :param int timeout: The maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. + This timeout is per adb call. The total time spent + may exceed this value. If it is not specified, the value + set in the ADBDevice constructor is used. + :return: list of files/directories contained in the directory. + :raises: :exc:`ADBTimeoutError` + """ + path = posixpath.normpath(path.strip()) + enable_run_as = self.enable_run_as_for_path(path) + parent = "" + entries = {} + + if path == "/sdcard": + path += "/" + + # Android 2.3 and later all appear to support ls -R however + # Nexus 4 does not perform a recursive search on the sdcard + # unless the path is a directory with * wild card. + if not recursive: + recursive_flag = "" + else: + recursive_flag = "-R" + if path.startswith("/sdcard") and path.endswith("/"): + model = self.get_prop("ro.product.model", timeout=timeout) + if model == "Nexus 4": + path += "*" + lines = self.shell_output( + "%s %s %s" % (self._ls, recursive_flag, path), + timeout=timeout, + enable_run_as=enable_run_as, + ).splitlines() + for line in lines: + line = line.strip() + if not line: + parent = "" + continue + if line.endswith(":"): # This is a directory + parent = line.replace(":", "/") + entry = parent + # Remove earlier entry which is marked as a file. + if parent[:-1] in entries: + del entries[parent[:-1]] + elif parent: + entry = "%s%s" % (parent, line) + else: + entry = line + entries[entry] = 1 + entry_list = list(entries.keys()) + entry_list.sort() + return entry_list + + def mkdir(self, path, parents=False, timeout=None): + """Create a directory on the device. + + :param str path: The directory name on the device + to be created. + :param bool parents: Flag indicating if the parent directories are + also to be created. Think mkdir -p path. + :param int timeout: The maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. + This timeout is per adb call. The total time spent + may exceed this value. If it is not specified, the value + set in the ADBDevice constructor is used. + :raises: :exc:`ADBTimeoutError` + :exc:`ADBError` + """ + + def verify_mkdir(path): + # Verify that the directory was actually created. On some devices + # (x86_64 emulator, v 29.0.11) the directory is sometimes not + # immediately visible, so retries are allowed. + retry = 0 + while retry < 10: + if self.is_dir(path, timeout=timeout): + return True + time.sleep(1) + retry += 1 + return False + + self._sync(timeout=timeout) + + path = posixpath.normpath(path) + enable_run_as = self.enable_run_as_for_path(path) + if parents: + if self._mkdir_p is None or self._mkdir_p: + # Use shell_bool to catch the possible + # non-zero exitcode if -p is not supported. + if self.shell_bool( + "mkdir -p %s" % path, timeout=timeout, enable_run_as=enable_run_as + ) or verify_mkdir(path): + self.chmod(path, recursive=True, timeout=timeout) + self._mkdir_p = True + self._sync(timeout=timeout) + return + # mkdir -p is not supported. create the parent + # directories individually. + if not self.is_dir(posixpath.dirname(path)): + parts = path.split("/") + name = "/" + for part in parts[:-1]: + if part != "": + name = posixpath.join(name, part) + if not self.is_dir(name): + # Use shell_output to allow any non-zero + # exitcode to raise an ADBError. + self.shell_output( + "mkdir %s" % name, + timeout=timeout, + enable_run_as=enable_run_as, + ) + self.chmod(name, recursive=True, timeout=timeout) + self._sync(timeout=timeout) + + # If parents is True and the directory does exist, we don't + # need to do anything. Otherwise we call mkdir. If the + # directory already exists or if it is a file instead of a + # directory, mkdir will fail and we will raise an ADBError. + if not parents or not self.is_dir(path): + self.shell_output( + "mkdir %s" % path, timeout=timeout, enable_run_as=enable_run_as + ) + self._sync(timeout=timeout) + self.chmod(path, recursive=True, timeout=timeout) + if not verify_mkdir(path): + raise ADBError("mkdir %s Failed" % path) + + def push(self, local, remote, timeout=None): + """Pushes a file or directory to the device. + + :param str local: The name of the local file or + directory name. + :param str remote: The name of the remote file or + directory name. + :param int timeout: The maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. + This timeout is per adb call. The total time spent + may exceed this value. If it is not specified, the value + set in the ADBDevice constructor is used. + :raises: :exc:`ADBTimeoutError` + :exc:`ADBError` + """ + self._sync(timeout=timeout) + + # remove trailing / + local = os.path.normpath(local) + remote = posixpath.normpath(remote) + copy_required = False + sdcard_remote = None + if os.path.isfile(local) and self.is_dir(remote): + # force push to use the correct filename in the remote directory + remote = posixpath.join(remote, os.path.basename(local)) + elif os.path.isdir(local): + copy_required = True + temp_parent = tempfile.mkdtemp() + remote_name = os.path.basename(remote) + new_local = os.path.join(temp_parent, remote_name) + copytree(local, new_local) + local = new_local + # See do_sync_push in + # https://android.googlesource.com/platform/system/core/+/master/adb/file_sync_client.cpp + # Work around change in behavior in adb 1.0.36 where if + # the remote destination directory exists, adb push will + # copy the source directory *into* the destination + # directory otherwise it will copy the source directory + # *onto* the destination directory. + if self.is_dir(remote): + remote = "/".join(remote.rstrip("/").split("/")[:-1]) + try: + if not self._run_as_package: + self.command_output(["push", local, remote], timeout=timeout) + self.chmod(remote, recursive=True, timeout=timeout) + else: + # When using run-as to work around the lack of root on a + # device, we can not push directly to the app's + # internal storage since the shell user under which + # the push runs does not have permission to write to + # the app's directory. Instead, we use a two stage + # operation where we first push to a temporary + # intermediate location under /data/local/tmp which + # should be writable by the shell user, then using + # run-as, copy the data into the app's internal + # storage. + try: + with tempfile.NamedTemporaryFile(delete=True) as tmpf: + intermediate = posixpath.join( + "/data/local/tmp", os.path.basename(tmpf.name) + ) + self.command_output(["push", local, intermediate], timeout=timeout) + self.chmod(intermediate, recursive=True, timeout=timeout) + parent_dir = posixpath.dirname(remote) + if not self.is_dir(parent_dir, timeout=timeout): + self.mkdir(parent_dir, parents=True, timeout=timeout) + self.cp(intermediate, remote, recursive=True, timeout=timeout) + finally: + self.rm(intermediate, recursive=True, force=True, timeout=timeout) + except ADBProcessError as e: + if "remote secure_mkdirs failed" not in str(e): + raise + self._logger.warning( + "remote secure_mkdirs failed push('{}', '{}') {}".format( + local, remote, str(e) + ) + ) + # Work around change in Android where push creates + # directories which can not be written by "other" by first + # pushing the source to the sdcard which has no + # permissions issues, then moving it from the sdcard to + # the final destination. + self._logger.info("Falling back to using intermediate /sdcard in push.") + self.mkdir(posixpath.dirname(remote), parents=True, timeout=timeout) + with tempfile.NamedTemporaryFile(delete=True) as tmpf: + sdcard_remote = posixpath.join("/sdcard", os.path.basename(tmpf.name)) + self.command_output(["push", local, sdcard_remote], timeout=timeout) + self.cp(sdcard_remote, remote, recursive=True, timeout=timeout) + except BaseException: + raise + finally: + self._sync(timeout=timeout) + if copy_required: + shutil.rmtree(temp_parent) + if sdcard_remote: + self.rm(sdcard_remote, recursive=True, force=True, timeout=timeout) + + def pull(self, remote, local, timeout=None): + """Pulls a file or directory from the device. + + :param str remote: The path of the remote file or + directory. + :param str local: The path of the local file or + directory name. + :param int timeout: The maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. + This timeout is per adb call. The total time spent + may exceed this value. If it is not specified, the value + set in the ADBDevice constructor is used. + :raises: :exc:`ADBTimeoutError` + :exc:`ADBError` + """ + self._sync(timeout=timeout) + + # remove trailing / + local = os.path.normpath(local) + remote = posixpath.normpath(remote) + copy_required = False + original_local = local + if os.path.isdir(local) and self.is_dir(remote): + # See do_sync_pull in + # https://android.googlesource.com/platform/system/core/+/master/adb/file_sync_client.cpp + # Work around change in behavior in adb 1.0.36 where if + # the local destination directory exists, adb pull will + # copy the source directory *into* the destination + # directory otherwise it will copy the source directory + # *onto* the destination directory. + # + # If the destination directory does exist, pull to its + # parent directory. If the source and destination leaf + # directory names are different, pull the source directory + # into a temporary directory and then copy the temporary + # directory onto the destination. + local_name = os.path.basename(local) + remote_name = os.path.basename(remote) + if local_name != remote_name: + copy_required = True + temp_parent = tempfile.mkdtemp() + local = os.path.join(temp_parent, remote_name) + else: + local = "/".join(local.rstrip("/").split("/")[:-1]) + try: + if not self._run_as_package: + # We must first make the remote directory readable. + self.chmod(remote, recursive=True, timeout=timeout) + self.command_output(["pull", remote, local], timeout=timeout) + else: + # When using run-as to work around the lack of root on + # a device, we can not pull directly from the apps + # internal storage since the shell user under which + # the pull runs does not have permission to read from + # the app's directory. Instead, we use a two stage + # operation where we first use run-as to copy the data + # from the app's internal storage to a temporary + # intermediate location under /data/local/tmp which + # should be writable by the shell user, then using + # pull, to copy the data off of the device. + try: + with tempfile.NamedTemporaryFile(delete=True) as tmpf: + intermediate = posixpath.join( + "/data/local/tmp", os.path.basename(tmpf.name) + ) + # When using run-as , we must first use the + # shell to create the intermediate and chmod it + # before the app will be able to access it. + if self.is_dir(remote, timeout=timeout): + self.mkdir( + posixpath.join(intermediate, remote_name), + parents=True, + timeout=timeout, + ) + else: + self.shell_output("echo > %s" % intermediate, timeout=timeout) + self.chmod(intermediate, timeout=timeout) + self.cp(remote, intermediate, recursive=True, timeout=timeout) + self.command_output(["pull", intermediate, local], timeout=timeout) + except ADBError as e: + self._logger.error("pull %s %s: %s" % (intermediate, local, str(e))) + finally: + self.rm(intermediate, recursive=True, force=True, timeout=timeout) + finally: + if copy_required: + copytree(local, original_local, dirs_exist_ok=True) + shutil.rmtree(temp_parent) + + def get_file(self, remote, offset=None, length=None, timeout=None): + """Pull file from device and return the file's content + + :param str remote: The path of the remote file. + :param offset: If specified, return only content beyond this offset. + :param length: If specified, limit content length accordingly. + :param int timeout: The maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. + This timeout is per adb call. The total time spent + may exceed this value. If it is not specified, the value + set in the ADBDevice constructor is used. + :raises: :exc:`ADBTimeoutError` + :exc:`ADBError` + """ + self._sync(timeout=timeout) + + with tempfile.NamedTemporaryFile() as tf: + self.pull(remote, tf.name, timeout=timeout) + with io.open(tf.name, mode="rb") as tf2: + # ADB pull does not support offset and length, but we can + # instead read only the requested portion of the local file + if offset is not None and length is not None: + tf2.seek(offset) + return tf2.read(length) + if offset is not None: + tf2.seek(offset) + return tf2.read() + return tf2.read() + + def rm(self, path, recursive=False, force=False, timeout=None): + """Delete files or directories on the device. + + :param str path: The path of the remote file or directory. + :param bool recursive: Flag specifying if the command is + to be applied recursively to the target. Default is False. + :param bool force: Flag which if True will not raise an + error when attempting to delete a non-existent file. Default + is False. + :param int timeout: The maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. + This timeout is per adb call. The total time spent + may exceed this value. If it is not specified, the value + set in the ADBDevice constructor is used. + :raises: :exc:`ADBTimeoutError` + :exc:`ADBError` + """ + path = posixpath.normpath(path) + enable_run_as = self.enable_run_as_for_path(path) + self._sync(timeout=timeout) + + cmd = "rm" + if recursive: + cmd += " -r" + try: + self.shell_output( + "%s %s" % (cmd, path), timeout=timeout, enable_run_as=enable_run_as + ) + self._sync(timeout=timeout) + if self.exists(path, timeout=timeout): + raise ADBError('rm("%s") failed to remove path.' % path) + except ADBError as e: + if not force and "No such file or directory" in str(e): + raise + if "Directory not empty" in str(e): + raise + if self._verbose and "No such file or directory" not in str(e): + self._logger.error( + "rm %s recursive=%s force=%s timeout=%s enable_run_as=%s: %s" + % (path, recursive, force, timeout, enable_run_as, str(e)) + ) + + def rmdir(self, path, timeout=None): + """Delete empty directory on the device. + + :param str path: The directory name on the device. + :param int timeout: The maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. + This timeout is per adb call. The total time spent + may exceed this value. If it is not specified, the value + set in the ADBDevice constructor is used. + :raises: :exc:`ADBTimeoutError` + :exc:`ADBError` + """ + path = posixpath.normpath(path) + enable_run_as = self.enable_run_as_for_path(path) + self.shell_output( + "rmdir %s" % path, timeout=timeout, enable_run_as=enable_run_as + ) + self._sync(timeout=timeout) + if self.is_dir(path, timeout=timeout): + raise ADBError('rmdir("%s") failed to remove directory.' % path) + + # Process management methods + + def get_process_list(self, timeout=None): + """Returns list of tuples (pid, name, user) for running + processes on device. + + :param int timeout: The maximum time + in seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. + This timeout is per adb call. The total time spent + may exceed this value. If it is not specified, + the value set in the ADBDevice constructor is used. + :return: list of (pid, name, user) tuples for running processes + on the device. + :raises: :exc:`ADBTimeoutError` + :exc:`ADBError` + """ + adb_process = None + max_attempts = 2 + try: + for attempt in range(1, max_attempts + 1): + adb_process = self.shell("ps", timeout=timeout) + if adb_process.timedout: + raise ADBTimeoutError("%s" % adb_process) + if adb_process.exitcode: + raise ADBProcessError(adb_process) + # first line is the headers + header = six.ensure_str(adb_process.stdout_file.readline()) + pid_i = -1 + user_i = -1 + els = header.split() + for i in range(len(els)): + item = els[i].lower() + if item == "user": + user_i = i + elif item == "pid": + pid_i = i + if user_i != -1 and pid_i != -1: + break + # if this isn't the final attempt, don't print this as an error + if attempt < max_attempts: + self._logger.info( + "get_process_list: attempt: %d %s" % (attempt, header) + ) + else: + raise ADBError( + "get_process_list: Unknown format: %s: %s" + % (header, adb_process) + ) + ret = [] + line = six.ensure_str(adb_process.stdout_file.readline()) + while line: + els = line.split() + try: + ret.append([int(els[pid_i]), els[-1], els[user_i]]) + except ValueError: + self._logger.error( + "get_process_list: %s %s\n%s" + % (header, line, traceback.format_exc()) + ) + raise ADBError( + "get_process_list: %s: %s: %s" % (header, line, adb_process) + ) + except IndexError: + self._logger.error( + "get_process_list: %s %s els %s pid_i %s user_i %s\n%s" + % (header, line, els, pid_i, user_i, traceback.format_exc()) + ) + raise ADBError( + "get_process_list: %s: %s els %s pid_i %s user_i %s: %s" + % (header, line, els, pid_i, user_i, adb_process) + ) + line = six.ensure_str(adb_process.stdout_file.readline()) + self._logger.debug("get_process_list: %s" % ret) + return ret + finally: + if adb_process and isinstance(adb_process.stdout_file, io.IOBase): + adb_process.stdout_file.close() + + def kill(self, pids, sig=None, attempts=3, wait=5, timeout=None): + """Kills processes on the device given a list of process ids. + + :param list pids: process ids to be killed. + :param int sig: signal to be sent to the process. + :param integer attempts: number of attempts to try to + kill the processes. + :param integer wait: number of seconds to wait after each attempt. + :param int timeout: The maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. + This timeout is per adb call. The total time spent + may exceed this value. If it is not specified, the value + set in the ADBDevice constructor is used. + :raises: :exc:`ADBTimeoutError` + :exc:`ADBError` + """ + pid_list = [str(pid) for pid in pids] + for attempt in range(attempts): + args = ["kill"] + if sig: + args.append("-%d" % sig) + args.extend(pid_list) + try: + self.shell_output(" ".join(args), timeout=timeout) + except ADBError as e: + if "No such process" not in str(e): + raise + pid_set = set(pid_list) + current_pid_set = set( + [str(proc[0]) for proc in self.get_process_list(timeout=timeout)] + ) + pid_list = list(pid_set.intersection(current_pid_set)) + if not pid_list: + break + self._logger.debug( + "Attempt %d of %d to kill processes %s failed" + % (attempt + 1, attempts, pid_list) + ) + time.sleep(wait) + + if pid_list: + raise ADBError("kill: processes %s not killed" % pid_list) + + def pkill(self, appname, sig=None, attempts=3, wait=5, timeout=None): + """Kills a processes on the device matching a name. + + :param str appname: The app name of the process to + be killed. Note that only the first 75 characters of the + process name are significant. + :param int sig: optional signal to be sent to the process. + :param integer attempts: number of attempts to try to + kill the processes. + :param integer wait: number of seconds to wait after each attempt. + :param int timeout: The maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. + This timeout is per adb call. The total time spent + may exceed this value. If it is not specified, the value + set in the ADBDevice constructor is used. + :param bool root: Flag specifying if the command should + be executed as root. + :raises: :exc:`ADBTimeoutError` + :exc:`ADBError` + """ + pids = self.pidof(appname, timeout=timeout) + + if not pids: + return + + try: + self.kill(pids, sig, attempts=attempts, wait=wait, timeout=timeout) + except ADBError as e: + if self.process_exist(appname, timeout=timeout): + raise e + + def process_exist(self, process_name, timeout=None): + """Returns True if process with name process_name is running on + device. + + :param str process_name: The name of the process + to check. Note that only the first 75 characters of the + process name are significant. + :param int timeout: The maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. + This timeout is per adb call. The total time spent + may exceed this value. If it is not specified, the value + set in the ADBDevice constructor is used. + :return: boolean - True if process exists. + + :raises: :exc:`ADBTimeoutError` + :exc:`ADBError` + """ + if not isinstance(process_name, six.string_types): + raise ADBError("Process name %s is not a string" % process_name) + + # Filter out extra spaces. + parts = [x for x in process_name.split(" ") if x != ""] + process_name = " ".join(parts) + + # Filter out the quoted env string if it exists + # ex: '"name=value;name2=value2;etc=..." process args' -> 'process args' + parts = process_name.split('"') + if len(parts) > 2: + process_name = " ".join(parts[2:]).strip() + + pieces = process_name.split(" ") + parts = pieces[0].split("/") + app = parts[-1] + + if self.pidof(app, timeout=timeout): + return True + return False + + def cp(self, source, destination, recursive=False, timeout=None): + """Copies a file or directory on the device. + + :param source: string containing the path of the source file or + directory. + :param destination: string containing the path of the destination file + or directory. + :param recursive: optional boolean indicating if a recursive copy is to + be performed. Required if the source is a directory. Defaults to + False. Think cp -R source destination. + :param int timeout: optional integer specifying the maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. + This timeout is per adb call. The total time spent + may exceed this value. If it is not specified, the value + set in the ADBDevice constructor is used. + :raises: :exc:`ADBTimeoutError` + :exc:`ADBError` + """ + source = posixpath.normpath(source) + destination = posixpath.normpath(destination) + enable_run_as = self.enable_run_as_for_path( + source + ) or self.enable_run_as_for_path(destination) + if self._have_cp: + r = "-R" if recursive else "" + self.shell_output( + "cp %s %s %s" % (r, source, destination), + timeout=timeout, + enable_run_as=enable_run_as, + ) + self.chmod(destination, recursive=recursive, timeout=timeout) + self._sync(timeout=timeout) + return + + # Emulate cp behavior depending on if source and destination + # already exists and whether they are a directory or file. + if not self.exists(source, timeout=timeout): + raise ADBError("cp: can't stat '%s': No such file or directory" % source) + + if self.is_file(source, timeout=timeout): + if self.is_dir(destination, timeout=timeout): + # Copy the source file into the destination directory + destination = posixpath.join(destination, os.path.basename(source)) + self.shell_output("dd if=%s of=%s" % (source, destination), timeout=timeout) + self.chmod(destination, recursive=recursive, timeout=timeout) + self._sync(timeout=timeout) + return + + if self.is_file(destination, timeout=timeout): + raise ADBError("cp: %s: Not a directory" % destination) + + if not recursive: + raise ADBError("cp: omitting directory '%s'" % source) + + if self.is_dir(destination, timeout=timeout): + # Copy the source directory into the destination directory. + destination_dir = posixpath.join(destination, os.path.basename(source)) + else: + # Copy the contents of the source directory into the + # destination directory. + destination_dir = destination + + try: + # Do not create parent directories since cp does not. + self.mkdir(destination_dir, timeout=timeout) + except ADBError as e: + if "File exists" not in str(e): + raise + + for i in self.list_files(source, timeout=timeout): + self.cp( + posixpath.join(source, i), + posixpath.join(destination_dir, i), + recursive=recursive, + timeout=timeout, + ) + self.chmod(destination_dir, recursive=True, timeout=timeout) + + def mv(self, source, destination, timeout=None): + """Moves a file or directory on the device. + + :param source: string containing the path of the source file or + directory. + :param destination: string containing the path of the destination file + or directory. + :param int timeout: optional integer specifying the maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. + This timeout is per adb call. The total time spent + may exceed this value. If it is not specified, the value + set in the ADBDevice constructor is used. + :raises: :exc:`ADBTimeoutError` + :exc:`ADBError` + """ + source = posixpath.normpath(source) + destination = posixpath.normpath(destination) + enable_run_as = self.enable_run_as_for_path( + source + ) or self.enable_run_as_for_path(destination) + self.shell_output( + "mv %s %s" % (source, destination), + timeout=timeout, + enable_run_as=enable_run_as, + ) + + def reboot(self, timeout=None): + """Reboots the device. + + :param int timeout: optional integer specifying the maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. + This timeout is per adb call. The total time spent + may exceed this value. If it is not specified, the value + set in the ADB constructor is used. + :raises: :exc:`ADBTimeoutError` + :exc:`ADBError` + + reboot() reboots the device, issues an adb wait-for-device in order to + wait for the device to complete rebooting, then calls is_device_ready() + to determine if the device has completed booting. + + If the device supports running adbd as root, adbd will be + restarted running as root. Then, if the device supports + SELinux, setenforce Permissive will be called to change + SELinux to permissive. This must be done after adbd is + restarted in order for the SELinux Permissive setting to + persist. + + """ + self.command_output(["reboot"], timeout=timeout) + self._wait_for_boot_completed(timeout=timeout) + return self.is_device_ready(timeout=timeout) + + def get_sysinfo(self, timeout=None): + """ + Returns a detailed dictionary of information strings about the device. + + :param int timeout: optional integer specifying the maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. + This timeout is per adb call. The total time spent + may exceed this value. If it is not specified, the value + set in the ADB constructor is used. + + :raises: :exc:`ADBTimeoutError` + """ + results = {"info": self.get_info(timeout=timeout)} + for service in ( + "meminfo", + "cpuinfo", + "dbinfo", + "procstats", + "usagestats", + "battery", + "batterystats", + "diskstats", + ): + results[service] = self.shell_output( + "dumpsys %s" % service, timeout=timeout + ) + return results + + def get_info(self, directive=None, timeout=None): + """ + Returns a dictionary of information strings about the device. + + :param directive: information you want to get. Options are: + - `battery` - battery charge as a percentage + - `disk` - total, free, available bytes on disk + - `id` - unique id of the device + - `os` - name of the os + - `process` - list of running processes (same as ps) + - `systime` - system time of the device + - `uptime` - uptime of the device + + If `directive` is `None`, will return all available information + :param int timeout: optional integer specifying the maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. + This timeout is per adb call. The total time spent + may exceed this value. If it is not specified, the value + set in the ADB constructor is used. + :raises: :exc:`ADBTimeoutError` + :exc:`ADBError` + """ + directives = ["battery", "disk", "id", "os", "process", "systime", "uptime"] + + if directive in directives: + directives = [directive] + + info = {} + if "battery" in directives: + info["battery"] = self.get_battery_percentage(timeout=timeout) + if "disk" in directives: + info["disk"] = self.shell_output( + "df /data /system /sdcard", timeout=timeout + ).splitlines() + if "id" in directives: + info["id"] = self.command_output(["get-serialno"], timeout=timeout) + if "os" in directives: + info["os"] = self.get_prop("ro.build.display.id", timeout=timeout) + if "process" in directives: + ps = self.shell_output("ps", timeout=timeout) + info["process"] = ps.splitlines() + if "systime" in directives: + info["systime"] = self.shell_output("date", timeout=timeout) + if "uptime" in directives: + uptime = self.shell_output("uptime", timeout=timeout) + if uptime: + m = re.match(r"up time: ((\d+) days, )*(\d{2}):(\d{2}):(\d{2})", uptime) + if m: + uptime = "%d days %d hours %d minutes %d seconds" % tuple( + [int(g or 0) for g in m.groups()[1:]] + ) + info["uptime"] = uptime + return info + + # Properties to manage SELinux on the device: + # https://source.android.com/devices/tech/security/selinux/index.html + # setenforce [ Enforcing | Permissive | 1 | 0 ] + # getenforce returns either Enforcing or Permissive + + @property + def selinux(self): + """Returns True if SELinux is supported, False otherwise.""" + if self._selinux is None: + self._selinux = self.enforcing != "" + return self._selinux + + @property + def enforcing(self): + try: + enforce = self.shell_output("getenforce") + except ADBError as e: + enforce = "" + self._logger.warning("Unable to get SELinux enforcing due to %s." % e) + return enforce + + @enforcing.setter + def enforcing(self, value): + """Set SELinux mode. + :param str value: The new SELinux mode. Should be one of + Permissive, 0, Enforcing, 1 but it is not validated. + """ + try: + self.shell_output("setenforce %s" % value) + self._logger.info("Setting SELinux %s" % value) + except ADBError as e: + self._logger.warning("Unable to set SELinux Permissive due to %s." % e) + + # Informational methods + + def get_battery_percentage(self, timeout=None): + """Returns the battery charge as a percentage. + + :param int timeout: The maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. + This timeout is per adb call. The total time spent + may exceed this value. If it is not specified, the value + set in the ADBDevice constructor is used. + :return: battery charge as a percentage. + :raises: :exc:`ADBTimeoutError` + :exc:`ADBError` + """ + level = None + scale = None + percentage = 0 + cmd = "dumpsys battery" + re_parameter = re.compile(r"\s+(\w+):\s+(\d+)") + lines = self.shell_output(cmd, timeout=timeout).splitlines() + for line in lines: + match = re_parameter.match(line) + if match: + parameter = match.group(1) + value = match.group(2) + if parameter == "level": + level = float(value) + elif parameter == "scale": + scale = float(value) + if parameter is not None and scale is not None: + # pylint --py3k W1619 + percentage = 100.0 * level / scale + break + return percentage + + def get_top_activity(self, timeout=None): + """Returns the name of the top activity (focused app) reported by dumpsys + + :param int timeout: The maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. + This timeout is per adb call. The total time spent + may exceed this value. If it is not specified, the value + set in the ADBDevice constructor is used. + :return: package name of top activity or None (cannot be determined) + :raises: :exc:`ADBTimeoutError` + :exc:`ADBError` + """ + if self.version < version_codes.Q: + return self._get_top_activity_P(timeout=timeout) + return self._get_top_activity_Q(timeout=timeout) + + def _get_top_activity_P(self, timeout=None): + """Returns the name of the top activity (focused app) reported by dumpsys + for Android 9 and earlier. + """ + package = None + data = None + cmd = "dumpsys window windows" + verbose = self._verbose + try: + self._verbose = False + data = self.shell_output(cmd, timeout=timeout) + except Exception as e: + # dumpsys intermittently fails on some platforms. + self._logger.info("_get_top_activity_P: Exception %s: %s" % (cmd, e)) + return package + finally: + self._verbose = verbose + m = re.search("mFocusedApp(.+)/", data) + if not m: + # alternative format seen on newer versions of Android + m = re.search("FocusedApplication(.+)/", data) + if m: + line = m.group(0) + # Extract package name: string of non-whitespace ending in forward slash + m = re.search(r"(\S+)/$", line) + if m: + package = m.group(1) + if self._verbose: + self._logger.debug("get_top_activity: %s" % str(package)) + return package + + def _get_top_activity_Q(self, timeout=None): + """Returns the name of the top activity (focused app) reported by dumpsys + for Android 10 and later. + """ + package = None + data = None + cmd = "dumpsys window" + verbose = self._verbose + try: + self._verbose = False + data = self.shell_output(cmd, timeout=timeout) + except Exception as e: + # dumpsys intermittently fails on some platforms (4.3 arm emulator) + self._logger.info("_get_top_activity_Q: Exception %s: %s" % (cmd, e)) + return package + finally: + self._verbose = verbose + m = re.search(r"mFocusedWindow=Window{\S+ \S+ (\S+)/\S+}", data) + if m: + package = m.group(1) + if self._verbose: + self._logger.debug("get_top_activity: %s" % str(package)) + return package + + # System control methods + + def is_device_ready(self, timeout=None): + """Checks if a device is ready for testing. + + This method uses the android only package manager to check for + readiness. + + :param int timeout: The maximum time + in seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. + This timeout is per adb call. The total time spent + may exceed this value. If it is not specified, the value + set in the ADB constructor is used. + :raises: :exc:`ADBTimeoutError` + :exc:`ADBError` + """ + # command_output automatically inserts a 'wait-for-device' + # argument to adb. Issuing an empty command is the same as adb + # -s wait-for-device. We don't send an explicit + # 'wait-for-device' since that would add duplicate + # 'wait-for-device' arguments which is an error in newer + # versions of adb. + self._wait_for_boot_completed(timeout=timeout) + pm_error_string = "Error: Could not access the Package Manager" + ready_path = os.path.join(self.test_root, "ready") + for attempt in range(self._device_ready_retry_attempts): + failure = "Unknown failure" + success = True + try: + state = self.get_state(timeout=timeout) + if state != "device": + failure = "Device state: %s" % state + success = False + else: + if self.enforcing != "Permissive": + self.enforcing = "Permissive" + if self.is_dir(ready_path, timeout=timeout): + self.rmdir(ready_path, timeout=timeout) + self.mkdir(ready_path, timeout=timeout) + self.rmdir(ready_path, timeout=timeout) + # Invoke the pm list packages command to see if it is up and + # running. + data = self.shell_output( + "pm list packages org.mozilla", timeout=timeout + ) + if pm_error_string in data: + failure = data + success = False + except ADBError as e: + success = False + failure = str(e) + + if not success: + self._logger.debug( + "Attempt %s of %s device not ready: %s" + % (attempt + 1, self._device_ready_retry_attempts, failure) + ) + time.sleep(self._device_ready_retry_wait) + + return success + + def power_on(self, timeout=None): + """Sets the device's power stayon value. + + :param int timeout: The maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. + This timeout is per adb call. The total time spent + may exceed this value. If it is not specified, the value + set in the ADB constructor is used. + :raises: :exc:`ADBTimeoutError` + :exc:`ADBError` + """ + try: + self.shell_output("svc power stayon true", timeout=timeout) + except ADBError as e: + # Executing this via adb shell errors, but not interactively. + # Any other exitcode is a real error. + if "exitcode: 137" not in str(e): + raise + self._logger.warning("Unable to set power stayon true: %s" % e) + + # Application management methods + + def add_change_device_settings(self, app_name, timeout=None): + """ + Allows the test to change Android device settings. + :param str: app_name: Name of application (e.g. `org.mozilla.fennec`) + """ + self.shell_output( + "appops set %s android:write_settings allow" % app_name, + timeout=timeout, + enable_run_as=False, + ) + + def add_mock_location(self, app_name, timeout=None): + """ + Allows the Android device to use mock locations. + :param str: app_name: Name of application (e.g. `org.mozilla.fennec`) + """ + self.shell_output( + "appops set %s android:mock_location allow" % app_name, + timeout=timeout, + enable_run_as=False, + ) + + def grant_runtime_permissions(self, app_name, timeout=None): + """ + Grant required runtime permissions to the specified app + (typically org.mozilla.fennec_$USER). + + :param str: app_name: Name of application (e.g. `org.mozilla.fennec`) + """ + if self.version >= version_codes.M: + permissions = [ + "android.permission.READ_EXTERNAL_STORAGE", + "android.permission.ACCESS_COARSE_LOCATION", + "android.permission.ACCESS_FINE_LOCATION", + "android.permission.CAMERA", + "android.permission.RECORD_AUDIO", + ] + if self.version < version_codes.R: + # WRITE_EXTERNAL_STORAGE is no longer available + # in Android 11+ + permissions.append("android.permission.WRITE_EXTERNAL_STORAGE") + self._logger.info("Granting important runtime permissions to %s" % app_name) + for permission in permissions: + try: + self.shell_output( + "pm grant %s %s" % (app_name, permission), + timeout=timeout, + enable_run_as=False, + ) + except ADBError as e: + self._logger.warning( + "Unable to grant runtime permission %s to %s due to %s" + % (permission, app_name, e) + ) + + def install_app_bundle(self, bundletool, bundle_path, java_home=None, timeout=None): + """Installs an app bundle (AAB) on the device. + + :param str bundletool: Path to the bundletool jar + :param str bundle_path: The aab file name to be installed. + :param int timeout: The maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. + This timeout is per adb call. The total time spent + may exceed this value. If it is not specified, the value + set in the ADB constructor is used. + :param str java_home: Path to the JDK location. Will default to + $JAVA_HOME when not specififed. + :raises: :exc:`ADBTimeoutError` + :exc:`ADBError` + """ + device_serial = self._device_serial or os.environ.get("ANDROID_SERIAL") + java_home = java_home or os.environ.get("JAVA_HOME") + with tempfile.TemporaryDirectory() as temporaryDirectory: + # bundletool doesn't come with a debug-key so we need to provide + # one ourselves. + keystore_path = os.path.join(temporaryDirectory, "debug.keystore") + keytool_path = os.path.join(java_home, "bin", "keytool") + key_gen = [ + keytool_path, + "-genkey", + "-v", + "-keystore", + keystore_path, + "-alias", + "androiddebugkey", + "-storepass", + "android", + "-keypass", + "android", + "-keyalg", + "RSA", + "-validity", + "14000", + "-dname", + "cn=Unknown, ou=Unknown, o=Unknown, c=Unknown", + ] + self._logger.info("key_gen: %s" % key_gen) + try: + subprocess.check_call(key_gen, timeout=timeout) + except subprocess.TimeoutExpired: + raise ADBTimeoutError("ADBDevice: unable to generate key") + + apks_path = "{}/tmp.apks".format(temporaryDirectory) + java_path = os.path.join(java_home, "bin", "java") + build_apks = [ + java_path, + "-jar", + bundletool, + "build-apks", + "--bundle={}".format(bundle_path), + "--output={}".format(apks_path), + "--connected-device", + "--device-id={}".format(device_serial), + "--adb={}".format(self._adb_path), + "--ks={}".format(keystore_path), + "--ks-key-alias=androiddebugkey", + "--key-pass=pass:android", + "--ks-pass=pass:android", + ] + self._logger.info("build_apks: %s" % build_apks) + + try: + subprocess.check_call(build_apks, timeout=timeout) + except subprocess.TimeoutExpired: + raise ADBTimeoutError("ADBDevice: unable to generate apks") + install_apks = [ + java_path, + "-jar", + bundletool, + "install-apks", + "--apks={}".format(apks_path), + "--device-id={}".format(device_serial), + "--adb={}".format(self._adb_path), + ] + self._logger.info("install_apks: %s" % install_apks) + + try: + subprocess.check_call(install_apks, timeout=timeout) + except subprocess.TimeoutExpired: + raise ADBTimeoutError("ADBDevice: unable to install apks") + + def install_app(self, apk_path, replace=False, timeout=None): + """Installs an app on the device. + + :param str apk_path: The apk file name to be installed. + :param bool replace: If True, replace existing application. + :param int timeout: The maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. + This timeout is per adb call. The total time spent + may exceed this value. If it is not specified, the value + set in the ADB constructor is used. + :return: string - name of installed package. + :raises: :exc:`ADBTimeoutError` + :exc:`ADBError` + """ + dump_packages = "dumpsys package packages" + packages_before = set(self.shell_output(dump_packages).split("\n")) + cmd = ["install"] + if replace: + cmd.append("-r") + cmd.append(apk_path) + data = self.command_output(cmd, timeout=timeout) + if data.find("Success") == -1: + raise ADBError("install failed for %s. Got: %s" % (apk_path, data)) + packages_after = set(self.shell_output(dump_packages).split("\n")) + packages_diff = packages_after - packages_before + package_name = None + re_pkg = re.compile(r"\s+pkg=Package{[^ ]+ (.*)}") + for diff in packages_diff: + match = re_pkg.match(diff) + if match: + package_name = match.group(1) + break + return package_name + + def is_app_installed(self, app_name, timeout=None): + """Returns True if an app is installed on the device. + + :param str app_name: name of the app to be checked. + :param int timeout: maximum time in seconds for any spawned + adb process to complete before throwing an ADBTimeoutError. + This timeout is per adb call. If it is not specified, + the value set in the ADB constructor is used. + + :raises: :exc:`ADBTimeoutError` + :exc:`ADBError` + """ + pm_error_string = "Error: Could not access the Package Manager" + data = self.shell_output( + "pm list package %s" % app_name, timeout=timeout, enable_run_as=False + ) + if pm_error_string in data: + raise ADBError(pm_error_string) + output = [line for line in data.splitlines() if line.strip()] + return any(["package:{}".format(app_name) == out for out in output]) + + def launch_application( + self, + app_name, + activity_name, + intent, + url=None, + extras=None, + wait=True, + fail_if_running=True, + grant_runtime_permissions=True, + timeout=None, + is_service=False, + ): + """Launches an Android application + + :param str app_name: Name of application (e.g. `com.android.chrome`) + :param str activity_name: Name of activity to launch (e.g. `.Main`) + :param str intent: Intent to launch application with + :param str url: URL to open + :param dict extras: Extra arguments for application. + :param bool wait: If True, wait for application to start before + returning. + :param bool fail_if_running: Raise an exception if instance of + application is already running. + :param bool grant_runtime_permissions: Grant special runtime + permissions. + :param int timeout: The maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. + This timeout is per adb call. The total time spent + may exceed this value. If it is not specified, the value + set in the ADB constructor is used. + :param bool is_service: Whether we want to launch a service or not. + :raises: :exc:`ADBTimeoutError` + :exc:`ADBError` + """ + # If fail_if_running is True, we throw an exception here. Only one + # instance of an application can be running at once on Android, + # starting a new instance may not be what we want depending on what + # we want to do + if fail_if_running and self.process_exist(app_name, timeout=timeout): + raise ADBError( + "Only one instance of an application may be running " "at once" + ) + + if grant_runtime_permissions: + self.grant_runtime_permissions(app_name) + + acmd = ["am"] + ["startservice" if is_service else "start"] + if wait: + acmd.extend(["-W"]) + acmd.extend( + [ + "-n", + "%s/%s" % (app_name, activity_name), + ] + ) + if intent: + acmd.extend(["-a", intent]) + + # Note that isinstance(True, int) and isinstance(False, int) + # is True. This means we must test the type of the value + # against bool prior to testing it against int in order to + # prevent falsely identifying a bool value as an int. + if extras: + for key, val in extras.items(): + if isinstance(val, bool): + extra_type_param = "--ez" + elif isinstance(val, int): + extra_type_param = "--ei" + else: + extra_type_param = "--es" + acmd.extend([extra_type_param, str(key), str(val)]) + + if url: + acmd.extend(["-d", url]) + + cmd = self._escape_command_line(acmd) + self._logger.info("launch_application: %s" % cmd) + cmd_output = self.shell_output(cmd, timeout=timeout) + if "Error:" in cmd_output: + for line in cmd_output.split("\n"): + self._logger.info(line) + raise ADBError( + "launch_application %s/%s failed" % (app_name, activity_name) + ) + + def launch_fennec( + self, + app_name, + intent="android.intent.action.VIEW", + moz_env=None, + extra_args=None, + url=None, + wait=True, + fail_if_running=True, + timeout=None, + ): + """Convenience method to launch Fennec on Android with various + debugging arguments + + :param str app_name: Name of fennec application (e.g. + `org.mozilla.fennec`) + :param str intent: Intent to launch application. + :param str moz_env: Mozilla specific environment to pass into + application. + :param str extra_args: Extra arguments to be parsed by fennec. + :param str url: URL to open + :param bool wait: If True, wait for application to start before + returning. + :param bool fail_if_running: Raise an exception if instance of + application is already running. + :param int timeout: The maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. + This timeout is per adb call. The total time spent + may exceed this value. If it is not specified, the value + set in the ADB constructor is used. + :raises: :exc:`ADBTimeoutError` + :exc:`ADBError` + """ + extras = {} + + if moz_env: + # moz_env is expected to be a dictionary of environment variables: + # Fennec itself will set them when launched + for env_count, (env_key, env_val) in enumerate(moz_env.items()): + extras["env" + str(env_count)] = env_key + "=" + env_val + + # Additional command line arguments that fennec will read and use (e.g. + # with a custom profile) + if extra_args: + extras["args"] = " ".join(extra_args) + + self.launch_application( + app_name, + "org.mozilla.gecko.BrowserApp", + intent, + url=url, + extras=extras, + wait=wait, + fail_if_running=fail_if_running, + timeout=timeout, + ) + + def launch_service( + self, + app_name, + activity_name=None, + intent="android.intent.action.MAIN", + moz_env=None, + extra_args=None, + url=None, + e10s=False, + wait=True, + grant_runtime_permissions=False, + out_file=None, + timeout=None, + ): + """Convenience method to launch a service on Android with various + debugging arguments; convenient for geckoview apps. + + :param str app_name: Name of application (e.g. + `org.mozilla.geckoview_example` or `org.mozilla.geckoview.test_runner`) + :param str activity_name: Activity name, like `GeckoViewActivity`, or + `TestRunnerActivity`. + :param str intent: Intent to launch application. + :param str moz_env: Mozilla specific environment to pass into + application. + :param str extra_args: Extra arguments to be parsed by the app. + :param str url: URL to open + :param bool e10s: If True, run in multiprocess mode. + :param bool wait: If True, wait for application to start before + returning. + :param bool grant_runtime_permissions: Grant special runtime + permissions. + :param str out_file: File where to redirect the output to + :param int timeout: The maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. + This timeout is per adb call. The total time spent + may exceed this value. If it is not specified, the value + set in the ADB constructor is used. + :raises: :exc:`ADBTimeoutError` + :exc:`ADBError` + """ + extras = {} + + if moz_env: + # moz_env is expected to be a dictionary of environment variables: + # geckoview_example itself will set them when launched + for env_count, (env_key, env_val) in enumerate(moz_env.items()): + extras["env" + str(env_count)] = env_key + "=" + env_val + + # Additional command line arguments that the app will read and use (e.g. + # with a custom profile) + if extra_args: + for arg_count, arg in enumerate(extra_args): + extras["arg" + str(arg_count)] = arg + + extras["use_multiprocess"] = e10s + extras["out_file"] = out_file + self.launch_application( + app_name, + "%s.%s" % (app_name, activity_name), + intent, + url=url, + extras=extras, + wait=wait, + grant_runtime_permissions=grant_runtime_permissions, + timeout=timeout, + is_service=True, + fail_if_running=False, + ) + + def launch_activity( + self, + app_name, + activity_name=None, + intent="android.intent.action.MAIN", + moz_env=None, + extra_args=None, + url=None, + e10s=False, + wait=True, + fail_if_running=True, + timeout=None, + ): + """Convenience method to launch an application on Android with various + debugging arguments; convenient for geckoview apps. + + :param str app_name: Name of application (e.g. + `org.mozilla.geckoview_example` or `org.mozilla.geckoview.test_runner`) + :param str activity_name: Activity name, like `GeckoViewActivity`, or + `TestRunnerActivity`. + :param str intent: Intent to launch application. + :param str moz_env: Mozilla specific environment to pass into + application. + :param str extra_args: Extra arguments to be parsed by the app. + :param str url: URL to open + :param bool e10s: If True, run in multiprocess mode. + :param bool wait: If True, wait for application to start before + returning. + :param bool fail_if_running: Raise an exception if instance of + application is already running. + :param int timeout: The maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. + This timeout is per adb call. The total time spent + may exceed this value. If it is not specified, the value + set in the ADB constructor is used. + :raises: :exc:`ADBTimeoutError` + :exc:`ADBError` + """ + extras = {} + + if moz_env: + # moz_env is expected to be a dictionary of environment variables: + # geckoview_example itself will set them when launched + for env_count, (env_key, env_val) in enumerate(moz_env.items()): + extras["env" + str(env_count)] = env_key + "=" + env_val + + # Additional command line arguments that the app will read and use (e.g. + # with a custom profile) + if extra_args: + for arg_count, arg in enumerate(extra_args): + extras["arg" + str(arg_count)] = arg + + extras["use_multiprocess"] = e10s + self.launch_application( + app_name, + "%s.%s" % (app_name, activity_name), + intent, + url=url, + extras=extras, + wait=wait, + fail_if_running=fail_if_running, + timeout=timeout, + ) + + def stop_application(self, app_name, timeout=None): + """Stops the specified application + + For Android 3.0+, we use the "am force-stop" to do this, which + is reliable and does not require root. For earlier versions of + Android, we simply try to manually kill the processes started + by the app repeatedly until none is around any more. This is + less reliable and does require root. + + :param str app_name: Name of application (e.g. `com.android.chrome`) + :param int timeout: The maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. + This timeout is per adb call. The total time spent + may exceed this value. If it is not specified, the value + set in the ADB constructor is used. + :param bool root: Flag specifying if the command should be + executed as root. + :raises: :exc:`ADBTimeoutError` + :exc:`ADBError` + """ + if self.version >= version_codes.HONEYCOMB: + self.shell_output("am force-stop %s" % app_name, timeout=timeout) + else: + num_tries = 0 + max_tries = 5 + while self.process_exist(app_name, timeout=timeout): + if num_tries > max_tries: + raise ADBError( + "Couldn't successfully kill %s after %s " + "tries" % (app_name, max_tries) + ) + self.pkill(app_name, timeout=timeout) + num_tries += 1 + + # sleep for a short duration to make sure there are no + # additional processes in the process of being launched + # (this is not 100% guaranteed to work since it is inherently + # racey, but it's the best we can do) + time.sleep(1) + + def uninstall_app(self, app_name, reboot=False, timeout=None): + """Uninstalls an app on the device. + + :param str app_name: The name of the app to be + uninstalled. + :param bool reboot: Flag indicating that the device should + be rebooted after the app is uninstalled. No reboot occurs + if the app is not installed. + :param int timeout: The maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. + This timeout is per adb call. The total time spent + may exceed this value. If it is not specified, the value + set in the ADB constructor is used. + :raises: :exc:`ADBTimeoutError` + :exc:`ADBError` + """ + if self.is_app_installed(app_name, timeout=timeout): + data = self.command_output(["uninstall", app_name], timeout=timeout) + if data.find("Success") == -1: + self._logger.debug("uninstall_app failed: %s" % data) + raise ADBError("uninstall failed for %s. Got: %s" % (app_name, data)) + self.run_as_package = None + if reboot: + self.reboot(timeout=timeout) + + def update_app(self, apk_path, timeout=None): + """Updates an app on the device and reboots. + + :param str apk_path: The apk file name to be + updated. + :param int timeout: The maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. + This timeout is per adb call. The total time spent + may exceed this value. If it is not specified, the value + set in the ADB constructor is used. + :raises: :exc:`ADBTimeoutError` + :exc:`ADBError` + """ + cmd = ["install", "-r"] + if self.version >= version_codes.M: + cmd.append("-g") + cmd.append(apk_path) + output = self.command_output(cmd, timeout=timeout) + self.reboot(timeout=timeout) + return output diff --git a/testing/mozbase/mozdevice/mozdevice/adb_android.py b/testing/mozbase/mozdevice/mozdevice/adb_android.py new file mode 100644 index 0000000000..135fda4195 --- /dev/null +++ b/testing/mozbase/mozdevice/mozdevice/adb_android.py @@ -0,0 +1,13 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +from .adb import ADBDevice + + +class ADBAndroid(ADBDevice): + """ADBAndroid functionality is now provided by ADBDevice. New callers + should use ADBDevice. + """ + + pass diff --git a/testing/mozbase/mozdevice/mozdevice/remote_process_monitor.py b/testing/mozbase/mozdevice/mozdevice/remote_process_monitor.py new file mode 100644 index 0000000000..2934a9f3d1 --- /dev/null +++ b/testing/mozbase/mozdevice/mozdevice/remote_process_monitor.py @@ -0,0 +1,285 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import re +import time + +import six + +from .adb import ADBTimeoutError + + +class RemoteProcessMonitor: + """ + RemoteProcessMonitor provides a convenient way to run a remote process, + dump its log file, and wait for it to end. + """ + + def __init__( + self, + app_name, + device, + log, + message_logger, + remote_log_file, + remote_profile, + ): + self.app_name = app_name + self.device = device + self.log = log + self.remote_log_file = remote_log_file + self.remote_profile = remote_profile + self.counts = {} + self.counts["pass"] = 0 + self.counts["fail"] = 0 + self.counts["todo"] = 0 + self.last_test_seen = "RemoteProcessMonitor" + self.message_logger = message_logger + if self.device.is_file(self.remote_log_file): + self.device.rm(self.remote_log_file) + self.log.info("deleted remote log %s" % self.remote_log_file) + + def launch(self, app, debugger_info, test_url, extra_args, env, e10s): + """ + Start the remote activity. + """ + if self.app_name and self.device.process_exist(self.app_name): + self.log.info("%s is already running. Stopping..." % self.app_name) + self.device.stop_application(self.app_name) + args = [] + if debugger_info: + args.extend(debugger_info.args) + args.append(app) + args.extend(extra_args) + activity = "TestRunnerActivity" + self.device.launch_activity( + self.app_name, + activity_name=activity, + e10s=e10s, + moz_env=env, + extra_args=args, + url=test_url, + ) + return self.pid + + @property + def pid(self): + """ + Determine the pid of the remote process (or the first process with + the same name). + """ + procs = self.device.get_process_list() + # limit the comparison to the first 75 characters due to a + # limitation in processname length in android. + pids = [proc[0] for proc in procs if proc[1] == self.app_name[:75]] + if pids is None or len(pids) < 1: + return 0 + return pids[0] + + def read_stdout(self): + """ + Fetch the full remote log file, log any new content and return True if new + content is processed. + """ + try: + new_log_content = self.device.get_file( + self.remote_log_file, offset=self.stdout_len + ) + except ADBTimeoutError: + raise + except Exception as e: + self.log.error( + "%s | exception reading log: %s" % (self.last_test_seen, str(e)) + ) + return False + if not new_log_content: + return False + + self.stdout_len += len(new_log_content) + new_log_content = six.ensure_str(new_log_content, errors="replace") + + self.log_buffer += new_log_content + lines = self.log_buffer.split("\n") + lines = [l for l in lines if l] + + if lines: + if self.log_buffer.endswith("\n"): + # all lines are complete; no need to buffer + self.log_buffer = "" + else: + # keep the last (unfinished) line in the buffer + self.log_buffer = lines[-1] + del lines[-1] + if not lines: + return False + + for line in lines: + # This passes the line to the logger (to be logged or buffered) + if isinstance(line, six.text_type): + # if line is unicode - let's encode it to bytes + parsed_messages = self.message_logger.write( + line.encode("UTF-8", "replace") + ) + else: + # if line is bytes type, write it as it is + parsed_messages = self.message_logger.write(line) + + for message in parsed_messages: + if isinstance(message, dict): + if message.get("action") == "test_start": + self.last_test_seen = message["test"] + elif message.get("action") == "test_end": + self.last_test_seen = "{} (finished)".format(message["test"]) + elif message.get("action") == "suite_end": + self.last_test_seen = "Last test finished" + elif message.get("action") == "log": + line = message["message"].strip() + m = re.match(r".*:\s*(\d*)", line) + if m: + try: + val = int(m.group(1)) + if "Passed:" in line: + self.counts["pass"] += val + self.last_test_seen = "Last test finished" + elif "Failed:" in line: + self.counts["fail"] += val + elif "Todo:" in line: + self.counts["todo"] += val + except ADBTimeoutError: + raise + except Exception: + pass + + return True + + def wait(self, timeout=None): + """ + Wait for the remote process to end (or for its activity to go to background). + While waiting, periodically retrieve the process output and print it. + If the process is still running but no output is received in *timeout* + seconds, return False; else, once the process exits/goes to background, + return True. + """ + self.log_buffer = "" + self.stdout_len = 0 + + timer = 0 + output_timer = 0 + interval = 10 + status = True + top = self.app_name + + # wait for log creation on startup + retries = 0 + while retries < 20 and not self.device.is_file(self.remote_log_file): + retries += 1 + time.sleep(1) + if self.device.is_file(self.remote_log_file): + # We must change the remote log's permissions so that the shell can read it. + self.device.chmod(self.remote_log_file, mask="666") + else: + self.log.warning( + "Failed wait for remote log: %s missing?" % self.remote_log_file + ) + + while top == self.app_name: + has_output = self.read_stdout() + if has_output: + output_timer = 0 + if self.counts["pass"] > 0: + interval = 0.5 + time.sleep(interval) + timer += interval + output_timer += interval + if timeout and output_timer > timeout: + status = False + break + if not has_output: + top = self.device.get_top_activity(timeout=60) + if top is None: + self.log.info("Failed to get top activity, retrying, once...") + top = self.device.get_top_activity(timeout=60) + + # Flush anything added to stdout during the sleep + self.read_stdout() + self.log.info("wait for %s complete; top activity=%s" % (self.app_name, top)) + if top == self.app_name: + self.log.info("%s unexpectedly found running. Killing..." % self.app_name) + self.kill() + if not status: + self.log.error( + "TEST-UNEXPECTED-FAIL | %s | " + "application timed out after %d seconds with no output" + % (self.last_test_seen, int(timeout)) + ) + return status + + def kill(self): + """ + End a troublesome remote process: Trigger ANR and breakpad dumps, then + force the application to end. + """ + + # Trigger an ANR report with "kill -3" (SIGQUIT) + try: + self.device.pkill(self.app_name, sig=3, attempts=1) + except ADBTimeoutError: + raise + except: # NOQA: E722 + pass + time.sleep(3) + + # Trigger a breakpad dump with "kill -6" (SIGABRT) + try: + self.device.pkill(self.app_name, sig=6, attempts=1) + except ADBTimeoutError: + raise + except: # NOQA: E722 + pass + + # Wait for process to end + retries = 0 + while retries < 3: + if self.device.process_exist(self.app_name): + self.log.info( + "%s still alive after SIGABRT: waiting..." % self.app_name + ) + time.sleep(5) + else: + break + retries += 1 + if self.device.process_exist(self.app_name): + try: + self.device.pkill(self.app_name, sig=9, attempts=1) + except ADBTimeoutError: + raise + except: # NOQA: E722 + self.log.error("%s still alive after SIGKILL!" % self.app_name) + if self.device.process_exist(self.app_name): + self.device.stop_application(self.app_name) + + # Test harnesses use the MOZ_CRASHREPORTER environment variables to suppress + # the interactive crash reporter, but that may not always be effective; + # check for and cleanup errant crashreporters. + crashreporter = "%s.CrashReporter" % self.app_name + if self.device.process_exist(crashreporter): + self.log.warning( + "%s unexpectedly found running. Killing..." % crashreporter + ) + try: + self.device.pkill(crashreporter) + except ADBTimeoutError: + raise + except: # NOQA: E722 + pass + if self.device.process_exist(crashreporter): + self.log.error("%s still running!!" % crashreporter) + + @staticmethod + def elf_arm(filename): + """ + Determine if the specified file is an ARM binary. + """ + data = open(filename, "rb").read(20) + return data[:4] == "\x7fELF" and ord(data[18]) == 40 # EM_ARM diff --git a/testing/mozbase/mozdevice/mozdevice/version_codes.py b/testing/mozbase/mozdevice/mozdevice/version_codes.py new file mode 100644 index 0000000000..c1d56c7b84 --- /dev/null +++ b/testing/mozbase/mozdevice/mozdevice/version_codes.py @@ -0,0 +1,70 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +""" +VERSION CODES of the android releases. + +See http://developer.android.com/reference/android/os/Build.VERSION_CODES.html. +""" +# Magic version number for a current development build, which has +# not yet turned into an official release. +CUR_DEVELOPMENT = 10000 + +# October 2008: The original, first, version of Android +BASE = 1 +# February 2009: First Android update, officially called 1.1 +BASE_1_1 = 2 +# May 2009: Android 1.5 +CUPCAKE = 3 +# September 2009: Android 1.6 +DONUT = 4 +# November 2009: Android 2.0 +ECLAIR = 5 +# December 2009: Android 2.0.1 +ECLAIR_0_1 = 6 +# January 2010: Android 2.1 +ECLAIR_MR1 = 7 +# June 2010: Android 2.2 +FROYO = 8 +# November 2010: Android 2.3 +GINGERBREAD = 9 +# February 2011: Android 2.3.3 +GINGERBREAD_MR1 = 10 +# February 2011: Android 3.0 +HONEYCOMB = 11 +# May 2011: Android 3.1 +HONEYCOMB_MR1 = 12 +# June 2011: Android 3.2 +HONEYCOMB_MR2 = 13 +# October 2011: Android 4.0 +ICE_CREAM_SANDWICH = 14 +# December 2011: Android 4.0.3 +ICE_CREAM_SANDWICH_MR1 = 15 +# June 2012: Android 4.1 +JELLY_BEAN = 16 +# November 2012: Android 4.2 +JELLY_BEAN_MR1 = 17 +# July 2013: Android 4.3 +JELLY_BEAN_MR2 = 18 +# October 2013: Android 4.4 +KITKAT = 19 +# Android 4.4W +KITKAT_WATCH = 20 +# Lollilop +LOLLIPOP = 21 +LOLLIPOP_MR1 = 22 +# Marshmallow +M = 23 +# Nougat +N = 24 +N_MR1 = 25 +# Oreo +O = 26 +O_MR1 = 27 +# Pie +P = 28 +# 10 +Q = 29 +# 11 +R = 30 diff --git a/testing/mozbase/mozdevice/setup.cfg b/testing/mozbase/mozdevice/setup.cfg new file mode 100644 index 0000000000..2a9acf13da --- /dev/null +++ b/testing/mozbase/mozdevice/setup.cfg @@ -0,0 +1,2 @@ +[bdist_wheel] +universal = 1 diff --git a/testing/mozbase/mozdevice/setup.py b/testing/mozbase/mozdevice/setup.py new file mode 100644 index 0000000000..91ce63d9f6 --- /dev/null +++ b/testing/mozbase/mozdevice/setup.py @@ -0,0 +1,34 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +from setuptools import setup + +PACKAGE_NAME = "mozdevice" +PACKAGE_VERSION = "4.1.1" + +deps = ["mozlog >= 6.0"] + +setup( + name=PACKAGE_NAME, + version=PACKAGE_VERSION, + description="Mozilla-authored device management", + long_description="see https://firefox-source-docs.mozilla.org/mozbase/index.html", + classifiers=[ + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3.5", + ], + # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers + keywords="", + author="Mozilla Automation and Testing Team", + author_email="tools@lists.mozilla.org", + url="https://wiki.mozilla.org/Auto-tools/Projects/Mozbase", + license="MPL", + packages=["mozdevice"], + include_package_data=True, + zip_safe=False, + install_requires=deps, + entry_points=""" + # -*- Entry points: -*- + """, +) diff --git a/testing/mozbase/mozdevice/tests/conftest.py b/testing/mozbase/mozdevice/tests/conftest.py new file mode 100644 index 0000000000..831090a428 --- /dev/null +++ b/testing/mozbase/mozdevice/tests/conftest.py @@ -0,0 +1,236 @@ +import sys +from random import randint, seed +from unittest.mock import patch + +import mozdevice +import pytest +from six import StringIO + +# set up required module-level variables/objects +seed(1488590) + + +def random_tcp_port(): + """Returns a pseudo-random integer generated from a seed. + + :returns: int: pseudo-randomly generated integer + """ + return randint(8000, 12000) + + +@pytest.fixture(autouse=True) +def mock_command_output(monkeypatch): + """Monkeypatches the ADBDevice.command_output() method call. + + Instead of calling the concrete method implemented in adb.py::ADBDevice, + this method simply returns a string representation of the command that was + received. + + As an exception, if the command begins with "forward tcp:0 ", this method + returns a mock port number. + + :param object monkeypatch: pytest provided fixture for mocking. + """ + + def command_output_wrapper(object, cmd, timeout): + """Actual monkeypatch implementation of the command_output method call. + + :param object object: placeholder object representing ADBDevice + :param str cmd: command to be executed + :param timeout: unused parameter to represent timeout threshold + :returns: string - string representation of command to be executed + int - mock port number (only used when cmd begins with "forward tcp:0 ") + """ + + if cmd[0] == "forward" and cmd[1] == "tcp:0": + return 7777 + + print(str(cmd)) + return str(cmd) + + monkeypatch.setattr(mozdevice.ADBDevice, "command_output", command_output_wrapper) + + +@pytest.fixture(autouse=True) +def mock_shell_output(monkeypatch): + """Monkeypatches the ADBDevice.shell_output() method call. + + Instead of returning the output of an adb call, this method will + return appropriate string output. Content of the string output is + in line with the calling method's expectations. + + :param object monkeypatch: pytest provided fixture for mocking. + """ + + def shell_output_wrapper( + object, cmd, env=None, cwd=None, timeout=None, enable_run_as=False + ): + """Actual monkeypatch implementation of the shell_output method call. + + :param object object: placeholder object representing ADBDevice + :param str cmd: command to be executed + :param env: contains the environment variable + :type env: dict or None + :param cwd: The directory from which to execute. + :type cwd: str or None + :param timeout: unused parameter tp represent timeout threshold + :param enable_run_as: bool determining if run_as is to be used + :returns: string - string representation of a simulated call to adb + """ + if "pm list package error" in cmd: + return "Error: Could not access the Package Manager" + elif "pm list package none" in cmd: + return "" + elif "pm list package" in cmd: + apps = ["org.mozilla.fennec", "org.mozilla.geckoview_example"] + return ("package:{}\n" * len(apps)).format(*apps) + else: + print(str(cmd)) + return str(cmd) + + monkeypatch.setattr(mozdevice.ADBDevice, "shell_output", shell_output_wrapper) + + +@pytest.fixture(autouse=True) +def mock_is_path_internal_storage(monkeypatch): + """Monkeypatches the ADBDevice.is_path_internal_storage() method call. + + Instead of returning the outcome of whether the path provided is + internal storage or external, this will always return True. + + :param object monkeypatch: pytest provided fixture for mocking. + """ + + def is_path_internal_storage_wrapper(object, path, timeout=None): + """Actual monkeypatch implementation of the is_path_internal_storage() call. + + :param str path: The path to test. + :param timeout: The maximum time in + seconds for any spawned adb process to complete before + throwing an ADBTimeoutError. This timeout is per adb call. The + total time spent may exceed this value. If it is not + specified, the value set in the ADBDevice constructor is used. + :returns: boolean + + :raises: * ADBTimeoutError + * ADBError + """ + if "internal_storage" in path: + return True + return False + + monkeypatch.setattr( + mozdevice.ADBDevice, + "is_path_internal_storage", + is_path_internal_storage_wrapper, + ) + + +@pytest.fixture(autouse=True) +def mock_enable_run_as_for_path(monkeypatch): + """Monkeypatches the ADBDevice.enable_run_as_for_path(path) method. + + Always return True + + :param object monkeypatch: pytest provided fixture for mocking. + """ + + def enable_run_as_for_path_wrapper(object, path): + """Actual monkeypatch implementation of the enable_run_as_for_path() call. + + :param str path: The path to test. + :returns: boolean + """ + return True + + monkeypatch.setattr( + mozdevice.ADBDevice, "enable_run_as_for_path", enable_run_as_for_path_wrapper + ) + + +@pytest.fixture(autouse=True) +def mock_shell_bool(monkeypatch): + """Monkeypatches the ADBDevice.shell_bool() method call. + + Instead of returning the output of an adb call, this method will + return appropriate string output. Content of the string output is + in line with the calling method's expectations. + + :param object monkeypatch: pytest provided fixture for mocking. + """ + + def shell_bool_wrapper( + object, cmd, env=None, cwd=None, timeout=None, enable_run_as=False + ): + """Actual monkeypatch implementation of the shell_bool method call. + + :param object object: placeholder object representing ADBDevice + :param str cmd: command to be executed + :param env: contains the environment variable + :type env: dict or None + :param cwd: The directory from which to execute. + :type cwd: str or None + :param timeout: unused parameter tp represent timeout threshold + :param enable_run_as: bool determining if run_as is to be used + :returns: string - string representation of a simulated call to adb + """ + print(cmd) + return str(cmd) + + monkeypatch.setattr(mozdevice.ADBDevice, "shell_bool", shell_bool_wrapper) + + +@pytest.fixture(autouse=True) +def mock_adb_object(): + """Patches the __init__ method call when instantiating ADBDevice. + + ADBDevice normally requires instantiated objects in order to execute + its commands. + + With a pytest-mock patch, we are able to mock the initialization of + the ADBDevice object. By yielding the instantiated mock object, + unit tests can be run that call methods that require an instantiated + object. + + :yields: ADBDevice - mock instance of ADBDevice object + """ + with patch.object(mozdevice.ADBDevice, "__init__", lambda self: None): + yield mozdevice.ADBDevice() + + +@pytest.fixture +def redirect_stdout_and_assert(): + """Redirects the stdout pipe temporarily to a StringIO stream. + + This is useful to assert on methods that do not return + a value, such as most ADBDevice methods. + + The original stdout pipe is preserved throughout the process. + + :returns: _wrapper method + """ + + def _wrapper(func, **kwargs): + """Implements the stdout sleight-of-hand. + + After preserving the original sys.stdout, it is switched + to use cStringIO.StringIO. + + Method with no return value is called, and the stdout + pipe is switched back to the original sys.stdout. + + The expected outcome is received as part of the kwargs. + This is asserted against a sanitized output from the method + under test. + + :param object func: method under test + :param dict kwargs: dictionary of function parameters + """ + original_stdout = sys.stdout + sys.stdout = testing_stdout = StringIO() + expected_text = kwargs.pop("text") + func(**kwargs) + sys.stdout = original_stdout + assert expected_text in testing_stdout.getvalue().rstrip() + + return _wrapper diff --git a/testing/mozbase/mozdevice/tests/manifest.toml b/testing/mozbase/mozdevice/tests/manifest.toml new file mode 100644 index 0000000000..22b338ca95 --- /dev/null +++ b/testing/mozbase/mozdevice/tests/manifest.toml @@ -0,0 +1,10 @@ +[DEFAULT] +subsuite = "mozbase" + +["test_chown.py"] + +["test_escape_command_line.py"] + +["test_is_app_installed.py"] + +["test_socket_connection.py"] diff --git a/testing/mozbase/mozdevice/tests/test_chown.py b/testing/mozbase/mozdevice/tests/test_chown.py new file mode 100644 index 0000000000..1bbfcc5d8e --- /dev/null +++ b/testing/mozbase/mozdevice/tests/test_chown.py @@ -0,0 +1,67 @@ +#!/usr/bin/env python + +import logging +from unittest.mock import patch + +import mozunit +import pytest + + +@pytest.mark.parametrize("boolean_value", [True, False]) +def test_set_chown_r_attribute( + mock_adb_object, redirect_stdout_and_assert, boolean_value +): + mock_adb_object._chown_R = boolean_value + assert mock_adb_object._chown_R == boolean_value + + +def test_chown_path_internal(mock_adb_object, redirect_stdout_and_assert): + """Tests whether attempt to chown internal path is ignored""" + with patch.object(logging, "getLogger") as mock_log: + mock_adb_object._logger = mock_log + + testing_parameters = { + "owner": "someuser", + "path": "internal_storage", + } + expected = "Ignoring attempt to chown external storage" + mock_adb_object.chown(**testing_parameters) + assert "".join(mock_adb_object._logger.method_calls[0][1]) != "" + assert "".join(mock_adb_object._logger.method_calls[0][1]) == expected + + +def test_chown_one_path(mock_adb_object, redirect_stdout_and_assert): + """Tests the path where only one path is provided.""" + # set up mock logging and self._chown_R attribute. + with patch.object(logging, "getLogger") as mock_log: + mock_adb_object._logger = mock_log + mock_adb_object._chown_R = True + + testing_parameters = { + "owner": "someuser", + "path": "/system", + } + command = "chown {owner} {path}".format(**testing_parameters) + testing_parameters["text"] = command + redirect_stdout_and_assert(mock_adb_object.chown, **testing_parameters) + + +def test_chown_one_path_with_group(mock_adb_object, redirect_stdout_and_assert): + """Tests the path where group is provided.""" + # set up mock logging and self._chown_R attribute. + with patch.object(logging, "getLogger") as mock_log: + mock_adb_object._logger = mock_log + mock_adb_object._chown_R = True + + testing_parameters = { + "owner": "someuser", + "path": "/system", + "group": "group_2", + } + command = "chown {owner}.{group} {path}".format(**testing_parameters) + testing_parameters["text"] = command + redirect_stdout_and_assert(mock_adb_object.chown, **testing_parameters) + + +if __name__ == "__main__": + mozunit.main() diff --git a/testing/mozbase/mozdevice/tests/test_escape_command_line.py b/testing/mozbase/mozdevice/tests/test_escape_command_line.py new file mode 100644 index 0000000000..112dd936c5 --- /dev/null +++ b/testing/mozbase/mozdevice/tests/test_escape_command_line.py @@ -0,0 +1,21 @@ +#!/usr/bin/env python + +import mozunit + + +def test_escape_command_line(mock_adb_object, redirect_stdout_and_assert): + """Test _escape_command_line.""" + cases = { + # expected output : test input + "adb shell ls -l": ["adb", "shell", "ls", "-l"], + "adb shell 'ls -l'": ["adb", "shell", "ls -l"], + "-e 'if (true)'": ["-e", "if (true)"], + "-e 'if (x === \"hello\")'": ["-e", 'if (x === "hello")'], + "-e 'if (x === '\"'\"'hello'\"'\"')'": ["-e", "if (x === 'hello')"], + } + for expected, input in cases.items(): + assert mock_adb_object._escape_command_line(input) == expected + + +if __name__ == "__main__": + mozunit.main() diff --git a/testing/mozbase/mozdevice/tests/test_is_app_installed.py b/testing/mozbase/mozdevice/tests/test_is_app_installed.py new file mode 100644 index 0000000000..a51836bc02 --- /dev/null +++ b/testing/mozbase/mozdevice/tests/test_is_app_installed.py @@ -0,0 +1,38 @@ +#!/usr/bin/env python + +import mozunit +import pytest +from mozdevice import ADBError + + +def test_is_app_installed(mock_adb_object): + """Tests that is_app_installed returns True if app is installed.""" + assert mock_adb_object.is_app_installed("org.mozilla.geckoview_example") + + +def test_is_app_installed_not_installed(mock_adb_object): + """Tests that is_app_installed returns False if provided app_name + does not resolve.""" + assert not mock_adb_object.is_app_installed("some_random_name") + + +def test_is_app_installed_partial_name(mock_adb_object): + """Tests that is_app_installed returns False if provided app_name + is only a partial match.""" + assert not mock_adb_object.is_app_installed("fennec") + + +def test_is_app_installed_package_manager_error(mock_adb_object): + """Tests that is_app_installed is able to raise an exception.""" + with pytest.raises(ADBError): + mock_adb_object.is_app_installed("error") + + +def test_is_app_installed_no_installed_package_found(mock_adb_object): + """Tests that is_app_installed is able to handle scenario + where no installed packages are found.""" + assert not mock_adb_object.is_app_installed("none") + + +if __name__ == "__main__": + mozunit.main() diff --git a/testing/mozbase/mozdevice/tests/test_socket_connection.py b/testing/mozbase/mozdevice/tests/test_socket_connection.py new file mode 100644 index 0000000000..1182737546 --- /dev/null +++ b/testing/mozbase/mozdevice/tests/test_socket_connection.py @@ -0,0 +1,124 @@ +#!/usr/bin/env python + +import mozunit +import pytest +from conftest import random_tcp_port + + +@pytest.fixture(params=["tcp:{}".format(random_tcp_port()) for _ in range(5)]) +def select_test_port(request): + """Generate a list of ports to be used for testing.""" + yield request.param + + +def test_list_socket_connections_reverse(mock_adb_object): + assert [("['reverse',", "'--list']")] == mock_adb_object.list_socket_connections( + "reverse" + ) + + +def test_list_socket_connections_forward(mock_adb_object): + assert [("['forward',", "'--list']")] == mock_adb_object.list_socket_connections( + "forward" + ) + + +def test_create_socket_connection_reverse( + mock_adb_object, select_test_port, redirect_stdout_and_assert +): + _expected = "['reverse', '{0}', '{0}']".format(select_test_port) + redirect_stdout_and_assert( + mock_adb_object.create_socket_connection, + direction="reverse", + local=select_test_port, + remote=select_test_port, + text=_expected, + ) + + +def test_create_socket_connection_forward( + mock_adb_object, select_test_port, redirect_stdout_and_assert +): + _expected = "['forward', '{0}', '{0}']".format(select_test_port) + redirect_stdout_and_assert( + mock_adb_object.create_socket_connection, + direction="forward", + local=select_test_port, + remote=select_test_port, + text=_expected, + ) + + +def test_create_socket_connection_forward_adb_assigned_port( + mock_adb_object, select_test_port +): + result = mock_adb_object.create_socket_connection( + direction="forward", local="tcp:0", remote=select_test_port + ) + assert isinstance(result, int) and result == 7777 + + +def test_remove_socket_connections_reverse(mock_adb_object, redirect_stdout_and_assert): + _expected = "['reverse', '--remove-all']" + redirect_stdout_and_assert( + mock_adb_object.remove_socket_connections, direction="reverse", text=_expected + ) + + +def test_remove_socket_connections_forward(mock_adb_object, redirect_stdout_and_assert): + _expected = "['forward', '--remove-all']" + redirect_stdout_and_assert( + mock_adb_object.remove_socket_connections, direction="forward", text=_expected + ) + + +def test_legacy_forward(mock_adb_object, select_test_port, redirect_stdout_and_assert): + _expected = "['forward', '{0}', '{0}']".format(select_test_port) + redirect_stdout_and_assert( + mock_adb_object.forward, + local=select_test_port, + remote=select_test_port, + text=_expected, + ) + + +def test_legacy_forward_adb_assigned_port(mock_adb_object, select_test_port): + result = mock_adb_object.forward(local="tcp:0", remote=select_test_port) + assert isinstance(result, int) and result == 7777 + + +def test_legacy_reverse(mock_adb_object, select_test_port, redirect_stdout_and_assert): + _expected = "['reverse', '{0}', '{0}']".format(select_test_port) + redirect_stdout_and_assert( + mock_adb_object.reverse, + local=select_test_port, + remote=select_test_port, + text=_expected, + ) + + +def test_validate_port_invalid_prefix(mock_adb_object): + with pytest.raises(ValueError): + mock_adb_object._validate_port("{}".format("invalid"), is_local=True) + + +@pytest.mark.xfail +def test_validate_port_non_numerical_port_identifier(mock_adb_object): + with pytest.raises(AttributeError): + mock_adb_object._validate_port( + "{}".format("tcp:this:is:not:a:number"), is_local=True + ) + + +def test_validate_port_identifier_length_short(mock_adb_object): + with pytest.raises(ValueError): + mock_adb_object._validate_port("{}".format("tcp"), is_local=True) + + +def test_validate_direction(mock_adb_object): + with pytest.raises(ValueError): + mock_adb_object._validate_direction("{}".format("bad direction")) + + +if __name__ == "__main__": + mozunit.main() diff --git a/testing/mozbase/mozfile/mozfile/__init__.py b/testing/mozbase/mozfile/mozfile/__init__.py new file mode 100644 index 0000000000..5d45755ac7 --- /dev/null +++ b/testing/mozbase/mozfile/mozfile/__init__.py @@ -0,0 +1,6 @@ +# flake8: noqa +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from .mozfile import * diff --git a/testing/mozbase/mozfile/mozfile/mozfile.py b/testing/mozbase/mozfile/mozfile/mozfile.py new file mode 100644 index 0000000000..892f8ee20f --- /dev/null +++ b/testing/mozbase/mozfile/mozfile/mozfile.py @@ -0,0 +1,691 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# We don't import all modules at the top for performance reasons. See Bug 1008943 + +import errno +import os +import re +import stat +import sys +import time +import warnings +from contextlib import contextmanager +from textwrap import dedent + +from six.moves import urllib + +__all__ = [ + "extract_tarball", + "extract_zip", + "extract", + "is_url", + "load", + "load_source", + "copy_contents", + "match", + "move", + "remove", + "rmtree", + "tree", + "which", + "NamedTemporaryFile", + "TemporaryDirectory", +] + +# utilities for extracting archives + + +def extract_tarball(src, dest, ignore=None): + """extract a .tar file""" + + import tarfile + + def _is_within_directory(directory, target): + real_directory = os.path.realpath(directory) + real_target = os.path.realpath(target) + prefix = os.path.commonprefix([real_directory, real_target]) + return prefix == real_directory + + with tarfile.open(src) as bundle: + namelist = [] + + for m in bundle: + # Mitigation for CVE-2007-4559, Python's tarfile library will allow + # writing files outside of the intended destination. + member_path = os.path.join(dest, m.name) + if not _is_within_directory(dest, member_path): + raise RuntimeError( + dedent( + f""" + Tar bundle '{src}' may be maliciously crafted to escape the destination! + The following path was detected: + + {m.name} + """ + ) + ) + if m.issym(): + link_path = os.path.join(os.path.dirname(member_path), m.linkname) + if not _is_within_directory(dest, link_path): + raise RuntimeError( + dedent( + f""" + Tar bundle '{src}' may be maliciously crafted to escape the destination! + The following path was detected: + + {m.name} + """ + ) + ) + + if m.mode & (stat.S_ISUID | stat.S_ISGID): + raise RuntimeError( + dedent( + f""" + Tar bundle '{src}' may be maliciously crafted to setuid/setgid! + The following path was detected: + + {m.name} + """ + ) + ) + + if ignore and any(match(m.name, i) for i in ignore): + continue + bundle.extract(m, path=dest) + namelist.append(m.name) + + return namelist + + +def extract_zip(src, dest, ignore=None): + """extract a zip file""" + + import zipfile + + if isinstance(src, zipfile.ZipFile): + bundle = src + else: + try: + bundle = zipfile.ZipFile(src) + except Exception: + print("src: %s" % src) + raise + + namelist = bundle.namelist() + + for name in namelist: + if ignore and any(match(name, i) for i in ignore): + continue + + bundle.extract(name, dest) + filename = os.path.realpath(os.path.join(dest, name)) + mode = bundle.getinfo(name).external_attr >> 16 & 0x1FF + # Only update permissions if attributes are set. Otherwise fallback to the defaults. + if mode: + os.chmod(filename, mode) + bundle.close() + return namelist + + +def extract(src, dest=None, ignore=None): + """ + Takes in a tar or zip file and extracts it to dest + + If dest is not specified, extracts to os.path.dirname(src) + + Returns the list of top level files that were extracted + """ + + import tarfile + import zipfile + + assert os.path.exists(src), "'%s' does not exist" % src + + if dest is None: + dest = os.path.dirname(src) + elif not os.path.isdir(dest): + os.makedirs(dest) + assert not os.path.isfile(dest), "dest cannot be a file" + + if tarfile.is_tarfile(src): + namelist = extract_tarball(src, dest, ignore=ignore) + elif zipfile.is_zipfile(src): + namelist = extract_zip(src, dest, ignore=ignore) + else: + raise Exception("mozfile.extract: no archive format found for '%s'" % src) + + # namelist returns paths with forward slashes even in windows + top_level_files = [ + os.path.join(dest, name.rstrip("/")) + for name in namelist + if len(name.rstrip("/").split("/")) == 1 + ] + + # namelist doesn't include folders, append these to the list + for name in namelist: + index = name.find("/") + if index != -1: + root = os.path.join(dest, name[:index]) + if root not in top_level_files: + top_level_files.append(root) + + return top_level_files + + +# utilities for removal of files and directories + + +def rmtree(dir): + """Deprecated wrapper method to remove a directory tree. + + Ensure to update your code to use mozfile.remove() directly + + :param dir: directory to be removed + """ + + warnings.warn( + "mozfile.rmtree() is deprecated in favor of mozfile.remove()", + PendingDeprecationWarning, + stacklevel=2, + ) + return remove(dir) + + +def _call_windows_retry(func, args=(), retry_max=5, retry_delay=0.5): + """ + It's possible to see spurious errors on Windows due to various things + keeping a handle to the directory open (explorer, virus scanners, etc) + So we try a few times if it fails with a known error. + retry_delay is multiplied by the number of failed attempts to increase + the likelihood of success in subsequent attempts. + """ + retry_count = 0 + while True: + try: + func(*args) + except OSError as e: + # Error codes are defined in: + # http://docs.python.org/2/library/errno.html#module-errno + if e.errno not in (errno.EACCES, errno.ENOTEMPTY): + raise + + if retry_count == retry_max: + raise + + retry_count += 1 + + print( + '%s() failed for "%s". Reason: %s (%s). Retrying...' + % (func.__name__, args, e.strerror, e.errno) + ) + time.sleep(retry_count * retry_delay) + else: + # If no exception has been thrown it should be done + break + + +def remove(path): + """Removes the specified file, link, or directory tree. + + This is a replacement for shutil.rmtree that works better under + windows. It does the following things: + + - check path access for the current user before trying to remove + - retry operations on some known errors due to various things keeping + a handle on file paths - like explorer, virus scanners, etc. The + known errors are errno.EACCES and errno.ENOTEMPTY, and it will + retry up to 5 five times with a delay of (failed_attempts * 0.5) seconds + between each attempt. + + Note that no error will be raised if the given path does not exists. + + :param path: path to be removed + """ + + import shutil + + def _call_with_windows_retry(*args, **kwargs): + try: + _call_windows_retry(*args, **kwargs) + except OSError as e: + # The file or directory to be removed doesn't exist anymore + if e.errno != errno.ENOENT: + raise + + def _update_permissions(path): + """Sets specified pemissions depending on filetype""" + if os.path.islink(path): + # Path is a symlink which we don't have to modify + # because it should already have all the needed permissions + return + + stats = os.stat(path) + + if os.path.isfile(path): + mode = stats.st_mode | stat.S_IWUSR + elif os.path.isdir(path): + mode = stats.st_mode | stat.S_IWUSR | stat.S_IXUSR + else: + # Not supported type + return + + _call_with_windows_retry(os.chmod, (path, mode)) + + if not os.path.lexists(path): + return + + """ + On Windows, adds '\\\\?\\' to paths which match ^[A-Za-z]:\\.* to access + files or directories that exceed MAX_PATH(260) limitation or that ends + with a period. + """ + if ( + sys.platform in ("win32", "cygwin") + and len(path) >= 3 + and path[1] == ":" + and path[2] == "\\" + ): + path = "\\\\?\\%s" % path + + if os.path.isfile(path) or os.path.islink(path): + # Verify the file or link is read/write for the current user + _update_permissions(path) + _call_with_windows_retry(os.remove, (path,)) + + elif os.path.isdir(path): + # Verify the directory is read/write/execute for the current user + _update_permissions(path) + + # We're ensuring that every nested item has writable permission. + for root, dirs, files in os.walk(path): + for entry in dirs + files: + _update_permissions(os.path.join(root, entry)) + _call_with_windows_retry(shutil.rmtree, (path,)) + + +def copy_contents(srcdir, dstdir, ignore_dangling_symlinks=False): + """ + Copy the contents of the srcdir into the dstdir, preserving + subdirectories. + + If an existing file of the same name exists in dstdir, it will be overwritten. + """ + import shutil + + # dirs_exist_ok was introduced in Python 3.8 + # On earlier versions, or Windows, use the verbose mechanism. + # We use it on Windows because _call_with_windows_retry doesn't allow + # named arguments to be passed. + if (sys.version_info.major < 3 or sys.version_info.minor < 8) or (os.name == "nt"): + names = os.listdir(srcdir) + if not os.path.isdir(dstdir): + os.makedirs(dstdir) + errors = [] + for name in names: + srcname = os.path.join(srcdir, name) + dstname = os.path.join(dstdir, name) + try: + if os.path.islink(srcname): + linkto = os.readlink(srcname) + os.symlink(linkto, dstname) + elif os.path.isdir(srcname): + copy_contents(srcname, dstname) + else: + _call_windows_retry(shutil.copy2, (srcname, dstname)) + except OSError as why: + errors.append((srcname, dstname, str(why))) + except Exception as err: + errors.extend(err) + try: + _call_windows_retry(shutil.copystat, (srcdir, dstdir)) + except OSError as why: + if why.winerror is None: + errors.extend((srcdir, dstdir, str(why))) + if errors: + raise Exception(errors) + else: + shutil.copytree( + srcdir, + dstdir, + dirs_exist_ok=True, + ignore_dangling_symlinks=ignore_dangling_symlinks, + ) + + +def move(src, dst): + """ + Move a file or directory path. + + This is a replacement for shutil.move that works better under windows, + retrying operations on some known errors due to various things keeping + a handle on file paths. + """ + import shutil + + _call_windows_retry(shutil.move, (src, dst)) + + +def depth(directory): + """returns the integer depth of a directory or path relative to '/'""" + + directory = os.path.abspath(directory) + level = 0 + while True: + directory, remainder = os.path.split(directory) + level += 1 + if not remainder: + break + return level + + +def tree(directory, sort_key=lambda x: x.lower()): + """Display tree directory structure for `directory`.""" + vertical_line = "│" + item_marker = "├" + last_child = "└" + + retval = [] + indent = [] + last = {} + top = depth(directory) + + for dirpath, dirnames, filenames in os.walk(directory, topdown=True): + abspath = os.path.abspath(dirpath) + basename = os.path.basename(abspath) + parent = os.path.dirname(abspath) + level = depth(abspath) - top + + # sort articles of interest + for resource in (dirnames, filenames): + resource[:] = sorted(resource, key=sort_key) + + if level > len(indent): + indent.append(vertical_line) + indent = indent[:level] + + if dirnames: + files_end = item_marker + last[abspath] = dirnames[-1] + else: + files_end = last_child + + if last.get(parent) == os.path.basename(abspath): + # last directory of parent + dirpath_mark = last_child + indent[-1] = " " + elif not indent: + dirpath_mark = "" + else: + dirpath_mark = item_marker + + # append the directory and piece of tree structure + # if the top-level entry directory, print as passed + retval.append( + "%s%s%s" + % ("".join(indent[:-1]), dirpath_mark, basename if retval else directory) + ) + # add the files + if filenames: + last_file = filenames[-1] + retval.extend( + [ + ( + "%s%s%s" + % ( + "".join(indent), + files_end if filename == last_file else item_marker, + filename, + ) + ) + for index, filename in enumerate(filenames) + ] + ) + + return "\n".join(retval) + + +def which(cmd, mode=os.F_OK | os.X_OK, path=None, exts=None, extra_search_dirs=()): + """A wrapper around `shutil.which` to make the behavior on Windows + consistent with other platforms. + + On non-Windows platforms, this is a direct call to `shutil.which`. On + Windows, this: + + * Ensures that `cmd` without an extension will be found. Previously it was + only found if it had an extension in `PATHEXT`. + * Ensures the absolute path to the binary is returned. Previously if the + binary was found in `cwd`, a relative path was returned. + * Checks the Windows registry if shutil.which doesn't come up with anything. + + The arguments are the same as the ones in `shutil.which`. In addition there + is an `exts` argument that only has an effect on Windows. This is used to + set a custom value for PATHEXT and is formatted as a list of file + extensions. + + extra_search_dirs is a convenience argument. If provided, the strings in + the sequence will be appended to the END of the given `path`. + """ + from shutil import which as shutil_which + + if isinstance(path, (list, tuple)): + path = os.pathsep.join(path) + + if not path: + path = os.environ.get("PATH", os.defpath) + + if extra_search_dirs: + path = os.pathsep.join([path] + list(extra_search_dirs)) + + if sys.platform != "win32": + return shutil_which(cmd, mode=mode, path=path) + + oldexts = os.environ.get("PATHEXT", "") + if not exts: + exts = oldexts.split(os.pathsep) + + # This ensures that `cmd` without any extensions will be found. + # See: https://bugs.python.org/issue31405 + if "." not in exts: + exts.append(".") + + os.environ["PATHEXT"] = os.pathsep.join(exts) + try: + path = shutil_which(cmd, mode=mode, path=path) + if path: + return os.path.abspath(path.rstrip(".")) + finally: + if oldexts: + os.environ["PATHEXT"] = oldexts + else: + del os.environ["PATHEXT"] + + # If we've gotten this far, we need to check for registered executables + # before giving up. + try: + import winreg + except ImportError: + import _winreg as winreg + if not cmd.lower().endswith(".exe"): + cmd += ".exe" + try: + ret = winreg.QueryValue( + winreg.HKEY_LOCAL_MACHINE, + r"SOFTWARE\Microsoft\Windows\CurrentVersion\App Paths\%s" % cmd, + ) + return os.path.abspath(ret) if ret else None + except winreg.error: + return None + + +# utilities for temporary resources + + +class NamedTemporaryFile(object): + """ + Like tempfile.NamedTemporaryFile except it works on Windows + in the case where you open the created file a second time. + + This behaves very similarly to tempfile.NamedTemporaryFile but may + not behave exactly the same. For example, this function does not + prevent fd inheritance by children. + + Example usage: + + with NamedTemporaryFile() as fh: + fh.write(b'foobar') + + print('Filename: %s' % fh.name) + + see https://bugzilla.mozilla.org/show_bug.cgi?id=821362 + """ + + def __init__( + self, mode="w+b", bufsize=-1, suffix="", prefix="tmp", dir=None, delete=True + ): + import tempfile + + fd, path = tempfile.mkstemp(suffix, prefix, dir, "t" in mode) + os.close(fd) + + self.file = open(path, mode) + self._path = path + self._delete = delete + self._unlinked = False + + def __getattr__(self, k): + return getattr(self.__dict__["file"], k) + + def __iter__(self): + return self.__dict__["file"] + + def __enter__(self): + self.file.__enter__() + return self + + def __exit__(self, exc, value, tb): + self.file.__exit__(exc, value, tb) + if self.__dict__["_delete"]: + os.unlink(self.__dict__["_path"]) + self._unlinked = True + + def __del__(self): + if self.__dict__["_unlinked"]: + return + self.file.__exit__(None, None, None) + if self.__dict__["_delete"]: + os.unlink(self.__dict__["_path"]) + + +@contextmanager +def TemporaryDirectory(): + """ + create a temporary directory using tempfile.mkdtemp, and then clean it up. + + Example usage: + with TemporaryDirectory() as tmp: + open(os.path.join(tmp, "a_temp_file"), "w").write("data") + + """ + + import shutil + import tempfile + + tempdir = tempfile.mkdtemp() + try: + yield tempdir + finally: + shutil.rmtree(tempdir) + + +# utilities dealing with URLs + + +def is_url(thing): + """ + Return True if thing looks like a URL. + """ + + parsed = urllib.parse.urlparse(thing) + if "scheme" in parsed: + return len(parsed.scheme) >= 2 + else: + return len(parsed[0]) >= 2 + + +def load(resource): + """ + open a file or URL for reading. If the passed resource string is not a URL, + or begins with 'file://', return a ``file``. Otherwise, return the + result of urllib.urlopen() + """ + + # handle file URLs separately due to python stdlib limitations + if resource.startswith("file://"): + resource = resource[len("file://") :] + + if not is_url(resource): + # if no scheme is given, it is a file path + return open(resource) + + return urllib.request.urlopen(resource) + + +# see https://docs.python.org/3/whatsnew/3.12.html#imp +def load_source(modname, filename): + import importlib.machinery + import importlib.util + + loader = importlib.machinery.SourceFileLoader(modname, filename) + spec = importlib.util.spec_from_file_location(modname, filename, loader=loader) + module = importlib.util.module_from_spec(spec) + sys.modules[module.__name__] = module + loader.exec_module(module) + return module + + +# We can't depend on mozpack.path here, so copy the 'match' function over. + +re_cache = {} +# Python versions < 3.7 return r'\/' for re.escape('/'). +if re.escape("/") == "/": + MATCH_STAR_STAR_RE = re.compile(r"(^|/)\\\*\\\*/") + MATCH_STAR_STAR_END_RE = re.compile(r"(^|/)\\\*\\\*$") +else: + MATCH_STAR_STAR_RE = re.compile(r"(^|\\\/)\\\*\\\*\\\/") + MATCH_STAR_STAR_END_RE = re.compile(r"(^|\\\/)\\\*\\\*$") + + +def match(path, pattern): + """ + Return whether the given path matches the given pattern. + An asterisk can be used to match any string, including the null string, in + one part of the path: + + ``foo`` matches ``*``, ``f*`` or ``fo*o`` + + However, an asterisk matching a subdirectory may not match the null string: + + ``foo/bar`` does *not* match ``foo/*/bar`` + + If the pattern matches one of the ancestor directories of the path, the + patch is considered matching: + + ``foo/bar`` matches ``foo`` + + Two adjacent asterisks can be used to match files and zero or more + directories and subdirectories. + + ``foo/bar`` matches ``foo/**/bar``, or ``**/bar`` + """ + if not pattern: + return True + if pattern not in re_cache: + p = re.escape(pattern) + p = MATCH_STAR_STAR_RE.sub(r"\1(?:.+/)?", p) + p = MATCH_STAR_STAR_END_RE.sub(r"(?:\1.+)?", p) + p = p.replace(r"\*", "[^/]*") + "(?:/.*)?$" + re_cache[pattern] = re.compile(p) + return re_cache[pattern].match(path) is not None diff --git a/testing/mozbase/mozfile/setup.cfg b/testing/mozbase/mozfile/setup.cfg new file mode 100644 index 0000000000..2a9acf13da --- /dev/null +++ b/testing/mozbase/mozfile/setup.cfg @@ -0,0 +1,2 @@ +[bdist_wheel] +universal = 1 diff --git a/testing/mozbase/mozfile/setup.py b/testing/mozbase/mozfile/setup.py new file mode 100644 index 0000000000..172df3e68e --- /dev/null +++ b/testing/mozbase/mozfile/setup.py @@ -0,0 +1,34 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +from setuptools import setup + +PACKAGE_NAME = "mozfile" +PACKAGE_VERSION = "3.0.0" + +setup( + name=PACKAGE_NAME, + version=PACKAGE_VERSION, + description="Library of file utilities for use in Mozilla testing", + long_description="see https://firefox-source-docs.mozilla.org/mozbase/index.html", + classifiers=[ + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)", + ], + keywords="mozilla", + author="Mozilla Automation and Tools team", + author_email="tools@lists.mozilla.org", + url="https://wiki.mozilla.org/Auto-tools/Projects/Mozbase", + license="MPL", + packages=["mozfile"], + include_package_data=True, + zip_safe=False, + install_requires=["six >= 1.13.0"], + tests_require=["wptserve"], +) diff --git a/testing/mozbase/mozfile/tests/files/missing_file_attributes.zip b/testing/mozbase/mozfile/tests/files/missing_file_attributes.zip new file mode 100644 index 0000000000..2b5409e89c Binary files /dev/null and b/testing/mozbase/mozfile/tests/files/missing_file_attributes.zip differ diff --git a/testing/mozbase/mozfile/tests/files/which/baz b/testing/mozbase/mozfile/tests/files/which/baz new file mode 100755 index 0000000000..e69de29bb2 diff --git a/testing/mozbase/mozfile/tests/files/which/baz.exe b/testing/mozbase/mozfile/tests/files/which/baz.exe new file mode 100755 index 0000000000..e69de29bb2 diff --git a/testing/mozbase/mozfile/tests/files/which/registered/quux.exe b/testing/mozbase/mozfile/tests/files/which/registered/quux.exe new file mode 100755 index 0000000000..e69de29bb2 diff --git a/testing/mozbase/mozfile/tests/files/which/unix/baz.exe b/testing/mozbase/mozfile/tests/files/which/unix/baz.exe new file mode 100755 index 0000000000..e69de29bb2 diff --git a/testing/mozbase/mozfile/tests/files/which/unix/file b/testing/mozbase/mozfile/tests/files/which/unix/file new file mode 100644 index 0000000000..e69de29bb2 diff --git a/testing/mozbase/mozfile/tests/files/which/unix/foo b/testing/mozbase/mozfile/tests/files/which/unix/foo new file mode 100755 index 0000000000..e69de29bb2 diff --git a/testing/mozbase/mozfile/tests/files/which/win/bar b/testing/mozbase/mozfile/tests/files/which/win/bar new file mode 100755 index 0000000000..e69de29bb2 diff --git a/testing/mozbase/mozfile/tests/files/which/win/baz.exe b/testing/mozbase/mozfile/tests/files/which/win/baz.exe new file mode 100755 index 0000000000..e69de29bb2 diff --git a/testing/mozbase/mozfile/tests/files/which/win/foo b/testing/mozbase/mozfile/tests/files/which/win/foo new file mode 100755 index 0000000000..e69de29bb2 diff --git a/testing/mozbase/mozfile/tests/files/which/win/foo.exe b/testing/mozbase/mozfile/tests/files/which/win/foo.exe new file mode 100755 index 0000000000..e69de29bb2 diff --git a/testing/mozbase/mozfile/tests/manifest.toml b/testing/mozbase/mozfile/tests/manifest.toml new file mode 100644 index 0000000000..643b9c4c6e --- /dev/null +++ b/testing/mozbase/mozfile/tests/manifest.toml @@ -0,0 +1,18 @@ +[DEFAULT] +subsuite = "mozbase" + +["test_extract.py"] + +["test_load.py"] + +["test_move_remove.py"] + +["test_tempdir.py"] + +["test_tempfile.py"] + +["test_tree.py"] + +["test_url.py"] + +["test_which.py"] diff --git a/testing/mozbase/mozfile/tests/stubs.py b/testing/mozbase/mozfile/tests/stubs.py new file mode 100644 index 0000000000..3c1bd47207 --- /dev/null +++ b/testing/mozbase/mozfile/tests/stubs.py @@ -0,0 +1,56 @@ +import os +import shutil +import tempfile + +# stub file paths +files = [ + ("foo.txt",), + ( + "foo", + "bar.txt", + ), + ( + "foo", + "bar", + "fleem.txt", + ), + ( + "foobar", + "fleem.txt", + ), + ("bar.txt",), + ( + "nested_tree", + "bar", + "fleem.txt", + ), + ("readonly.txt",), +] + + +def create_empty_stub(): + tempdir = tempfile.mkdtemp() + return tempdir + + +def create_stub(tempdir=None): + """create a stub directory""" + + tempdir = tempdir or tempfile.mkdtemp() + try: + for path in files: + fullpath = os.path.join(tempdir, *path) + dirname = os.path.dirname(fullpath) + if not os.path.exists(dirname): + os.makedirs(dirname) + contents = path[-1] + f = open(fullpath, "w") + f.write(contents) + f.close() + return tempdir + except Exception: + try: + shutil.rmtree(tempdir) + except Exception: + pass + raise diff --git a/testing/mozbase/mozfile/tests/test_copycontents.py b/testing/mozbase/mozfile/tests/test_copycontents.py new file mode 100644 index 0000000000..b829d7b3a4 --- /dev/null +++ b/testing/mozbase/mozfile/tests/test_copycontents.py @@ -0,0 +1,126 @@ +#!/usr/bin/env python + +import os +import shutil +import unittest + +import mozfile +import mozunit +import stubs + + +class MozfileCopyContentsTestCase(unittest.TestCase): + """Test our ability to copy the contents of directories""" + + def _directory_is_subset(self, set_, subset_): + """ + Confirm that all the contents of 'subset_' are contained in 'set_' + """ + names = os.listdir(subset_) + for name in names: + full_set_path = os.path.join(set_, name) + full_subset_path = os.path.join(subset_, name) + if os.path.isdir(full_subset_path): + self.assertTrue(os.path.isdir(full_set_path)) + self._directory_is_subset(full_set_path, full_subset_path) + elif os.path.islink(full_subset_path): + self.assertTrue(os.path.islink(full_set_path)) + else: + self.assertTrue(os.stat(full_set_path)) + + def _directories_are_equal(self, dir1, dir2): + """ + Confirm that the contents of 'dir1' are the same as 'dir2' + """ + names1 = os.listdir(dir1) + names2 = os.listdir(dir2) + self.assertTrue(len(names1) == len(names2)) + for name in names1: + self.assertTrue(name in names2) + dir1_path = os.path.join(dir1, name) + dir2_path = os.path.join(dir2, name) + if os.path.isdir(dir1_path): + self.assertTrue(os.path.isdir(dir2_path)) + self._directories_are_equal(dir1_path, dir2_path) + elif os.path.islink(dir1_path): + self.assertTrue(os.path.islink(dir2_path)) + else: + self.assertTrue(os.stat(dir2_path)) + + def test_copy_empty_directory(self): + tempdir = stubs.create_empty_stub() + dstdir = stubs.create_empty_stub() + self.assertTrue(os.path.isdir(tempdir)) + + mozfile.copy_contents(tempdir, dstdir) + self._directories_are_equal(dstdir, tempdir) + + if os.path.isdir(tempdir): + shutil.rmtree(tempdir) + if os.path.isdir(dstdir): + shutil.rmtree(dstdir) + + def test_copy_full_directory(self): + tempdir = stubs.create_stub() + dstdir = stubs.create_empty_stub() + self.assertTrue(os.path.isdir(tempdir)) + + mozfile.copy_contents(tempdir, dstdir) + self._directories_are_equal(dstdir, tempdir) + + if os.path.isdir(tempdir): + shutil.rmtree(tempdir) + if os.path.isdir(dstdir): + shutil.rmtree(dstdir) + + def test_copy_full_directory_with_existing_file(self): + tempdir = stubs.create_stub() + dstdir = stubs.create_empty_stub() + + filename = "i_dont_exist_in_tempdir" + f = open(os.path.join(dstdir, filename), "w") + f.write("Hello World") + f.close() + + self.assertTrue(os.path.isdir(tempdir)) + + mozfile.copy_contents(tempdir, dstdir) + self._directory_is_subset(dstdir, tempdir) + self.assertTrue(os.path.exists(os.path.join(dstdir, filename))) + + if os.path.isdir(tempdir): + shutil.rmtree(tempdir) + if os.path.isdir(dstdir): + shutil.rmtree(dstdir) + + def test_copy_full_directory_with_overlapping_file(self): + tempdir = stubs.create_stub() + dstdir = stubs.create_empty_stub() + + filename = "i_do_exist_in_tempdir" + for d in [tempdir, dstdir]: + f = open(os.path.join(d, filename), "w") + f.write("Hello " + d) + f.close() + + self.assertTrue(os.path.isdir(tempdir)) + self.assertTrue(os.path.exists(os.path.join(tempdir, filename))) + self.assertTrue(os.path.exists(os.path.join(dstdir, filename))) + + line = open(os.path.join(dstdir, filename), "r").readlines()[0] + self.assertTrue(line == "Hello " + dstdir) + + mozfile.copy_contents(tempdir, dstdir) + + line = open(os.path.join(dstdir, filename), "r").readlines()[0] + self.assertTrue(line == "Hello " + tempdir) + self._directories_are_equal(tempdir, dstdir) + + if os.path.isdir(tempdir): + shutil.rmtree(tempdir) + if os.path.isdir(dstdir): + shutil.rmtree(dstdir) + + +if __name__ == "__main__": + mozunit.main() diff --git a/testing/mozbase/mozfile/tests/test_extract.py b/testing/mozbase/mozfile/tests/test_extract.py new file mode 100644 index 0000000000..c2675d77f7 --- /dev/null +++ b/testing/mozbase/mozfile/tests/test_extract.py @@ -0,0 +1,152 @@ +#!/usr/bin/env python + +import os +import tarfile +import tempfile +import zipfile + +import mozfile +import mozunit +import pytest +import stubs + + +@pytest.fixture +def ensure_directory_contents(): + """ensure the directory contents match""" + + def inner(directory): + for f in stubs.files: + path = os.path.join(directory, *f) + exists = os.path.exists(path) + if not exists: + print("%s does not exist" % (os.path.join(f))) + assert exists + if exists: + contents = open(path).read().strip() + assert contents == f[-1] + + return inner + + +@pytest.fixture(scope="module") +def tarpath(tmpdir_factory): + """create a stub tarball for testing""" + tmpdir = tmpdir_factory.mktemp("test_extract") + + tempdir = tmpdir.join("stubs").strpath + stubs.create_stub(tempdir) + filename = tmpdir.join("bundle.tar").strpath + archive = tarfile.TarFile(filename, mode="w") + for path in stubs.files: + archive.add(os.path.join(tempdir, *path), arcname=os.path.join(*path)) + archive.close() + + assert os.path.exists(filename) + return filename + + +@pytest.fixture(scope="module") +def zippath(tmpdir_factory): + """create a stub zipfile for testing""" + tmpdir = tmpdir_factory.mktemp("test_extract") + + tempdir = tmpdir.join("stubs").strpath + stubs.create_stub(tempdir) + filename = tmpdir.join("bundle.zip").strpath + archive = zipfile.ZipFile(filename, mode="w") + for path in stubs.files: + archive.write(os.path.join(tempdir, *path), arcname=os.path.join(*path)) + archive.close() + + assert os.path.exists(filename) + return filename + + +@pytest.fixture(scope="module", params=["tar", "zip"]) +def bundlepath(request, tarpath, zippath): + if request.param == "tar": + return tarpath + else: + return zippath + + +def test_extract(tmpdir, bundlepath, ensure_directory_contents): + """test extracting a zipfile""" + dest = tmpdir.mkdir("dest").strpath + mozfile.extract(bundlepath, dest) + ensure_directory_contents(dest) + + +def test_extract_zipfile_missing_file_attributes(tmpdir): + """if files do not have attributes set the default permissions have to be inherited.""" + _zipfile = os.path.join( + os.path.dirname(__file__), "files", "missing_file_attributes.zip" + ) + assert os.path.exists(_zipfile) + dest = tmpdir.mkdir("dest").strpath + + # Get the default file permissions for the user + fname = os.path.join(dest, "foo") + with open(fname, "w"): + pass + default_stmode = os.stat(fname).st_mode + + files = mozfile.extract_zip(_zipfile, dest) + for filename in files: + assert os.stat(os.path.join(dest, filename)).st_mode == default_stmode + + +def test_extract_non_archive(tarpath, zippath): + """test the generalized extract function""" + # test extracting some non-archive; this should fail + fd, filename = tempfile.mkstemp() + os.write(fd, b"This is not a zipfile or tarball") + os.close(fd) + exception = None + + try: + dest = tempfile.mkdtemp() + mozfile.extract(filename, dest) + except Exception as exc: + exception = exc + finally: + os.remove(filename) + os.rmdir(dest) + + assert isinstance(exception, Exception) + + +def test_extract_ignore(tmpdir, bundlepath): + dest = tmpdir.mkdir("dest").strpath + ignore = ("foo", "**/fleem.txt", "read*.txt") + mozfile.extract(bundlepath, dest, ignore=ignore) + + assert sorted(os.listdir(dest)) == ["bar.txt", "foo.txt"] + + +def test_tarball_escape(tmpdir): + """Ensures that extracting a tarball can't write outside of the intended + destination directory. + """ + workdir = tmpdir.mkdir("workdir") + os.chdir(workdir) + + # Generate a "malicious" bundle. + with open("bad.txt", "w") as fh: + fh.write("pwned!") + + def change_name(tarinfo): + tarinfo.name = "../" + tarinfo.name + return tarinfo + + with tarfile.open("evil.tar", "w:xz") as tar: + tar.add("bad.txt", filter=change_name) + + with pytest.raises(RuntimeError): + mozfile.extract_tarball("evil.tar", workdir) + assert not os.path.exists(tmpdir.join("bad.txt")) + + +if __name__ == "__main__": + mozunit.main() diff --git a/testing/mozbase/mozfile/tests/test_load.py b/testing/mozbase/mozfile/tests/test_load.py new file mode 100755 index 0000000000..7a3896e33b --- /dev/null +++ b/testing/mozbase/mozfile/tests/test_load.py @@ -0,0 +1,63 @@ +#!/usr/bin/env python + +""" +tests for mozfile.load +""" + +import mozunit +import pytest +from mozfile import load +from wptserve.handlers import handler +from wptserve.server import WebTestHttpd + + +@pytest.fixture(name="httpd_url") +def fixture_httpd_url(): + """Yield a started WebTestHttpd server.""" + + @handler + def example(request, response): + """Example request handler.""" + body = b"example" + return ( + 200, + [("Content-type", "text/plain"), ("Content-length", len(body))], + body, + ) + + httpd = WebTestHttpd(host="127.0.0.1", routes=[("GET", "*", example)]) + + httpd.start() + yield httpd.get_url() + httpd.stop() + + +def test_http(httpd_url): + """Test with WebTestHttpd and a http:// URL.""" + content = load(httpd_url).read() + assert content == b"example" + + +@pytest.fixture(name="temporary_file") +def fixture_temporary_file(tmpdir): + """Yield a path to a temporary file.""" + foobar = tmpdir.join("foobar.txt") + foobar.write("hello world") + + yield str(foobar) + + foobar.remove() + + +def test_file_path(temporary_file): + """Test loading from a file path.""" + assert load(temporary_file).read() == "hello world" + + +def test_file_url(temporary_file): + """Test loading from a file URL.""" + assert load("file://%s" % temporary_file).read() == "hello world" + + +if __name__ == "__main__": + mozunit.main() diff --git a/testing/mozbase/mozfile/tests/test_move_remove.py b/testing/mozbase/mozfile/tests/test_move_remove.py new file mode 100644 index 0000000000..0679c6c3fa --- /dev/null +++ b/testing/mozbase/mozfile/tests/test_move_remove.py @@ -0,0 +1,253 @@ +#!/usr/bin/env python + +import errno +import os +import shutil +import stat +import threading +import time +import unittest +from contextlib import contextmanager + +import mozfile +import mozinfo +import mozunit +import stubs + + +def mark_readonly(path): + """Removes all write permissions from given file/directory. + + :param path: path of directory/file of which modes must be changed + """ + mode = os.stat(path)[stat.ST_MODE] + os.chmod(path, mode & ~stat.S_IWUSR & ~stat.S_IWGRP & ~stat.S_IWOTH) + + +class FileOpenCloseThread(threading.Thread): + """Helper thread for asynchronous file handling""" + + def __init__(self, path, delay, delete=False): + threading.Thread.__init__(self) + self.file_opened = threading.Event() + self.delay = delay + self.path = path + self.delete = delete + + def run(self): + with open(self.path): + self.file_opened.set() + time.sleep(self.delay) + if self.delete: + try: + os.remove(self.path) + except Exception: + pass + + +@contextmanager +def wait_file_opened_in_thread(*args, **kwargs): + thread = FileOpenCloseThread(*args, **kwargs) + thread.start() + thread.file_opened.wait() + try: + yield thread + finally: + thread.join() + + +class MozfileRemoveTestCase(unittest.TestCase): + """Test our ability to remove directories and files""" + + def setUp(self): + # Generate a stub + self.tempdir = stubs.create_stub() + + def tearDown(self): + if os.path.isdir(self.tempdir): + shutil.rmtree(self.tempdir) + + def test_remove_directory(self): + """Test the removal of a directory""" + self.assertTrue(os.path.isdir(self.tempdir)) + mozfile.remove(self.tempdir) + self.assertFalse(os.path.exists(self.tempdir)) + + def test_remove_directory_with_open_file(self): + """Test removing a directory with an open file""" + # Open a file in the generated stub + filepath = os.path.join(self.tempdir, *stubs.files[1]) + f = open(filepath, "w") + f.write("foo-bar") + + # keep file open and then try removing the dir-tree + if mozinfo.isWin: + # On the Windows family WindowsError should be raised. + self.assertRaises(OSError, mozfile.remove, self.tempdir) + self.assertTrue(os.path.exists(self.tempdir)) + else: + # Folder should be deleted on all other platforms + mozfile.remove(self.tempdir) + self.assertFalse(os.path.exists(self.tempdir)) + + def test_remove_closed_file(self): + """Test removing a closed file""" + # Open a file in the generated stub + filepath = os.path.join(self.tempdir, *stubs.files[1]) + with open(filepath, "w") as f: + f.write("foo-bar") + + # Folder should be deleted on all platforms + mozfile.remove(self.tempdir) + self.assertFalse(os.path.exists(self.tempdir)) + + def test_removing_open_file_with_retry(self): + """Test removing a file in use with retry""" + filepath = os.path.join(self.tempdir, *stubs.files[1]) + + with wait_file_opened_in_thread(filepath, 0.2): + # on windows first attempt will fail, + # and it will be retried until the thread leave the handle + mozfile.remove(filepath) + + # Check deletion was successful + self.assertFalse(os.path.exists(filepath)) + + def test_removing_already_deleted_file_with_retry(self): + """Test removing a meanwhile removed file with retry""" + filepath = os.path.join(self.tempdir, *stubs.files[1]) + + with wait_file_opened_in_thread(filepath, 0.2, True): + # on windows first attempt will fail, and before + # the retry the opened file will be deleted in the thread + mozfile.remove(filepath) + + # Check deletion was successful + self.assertFalse(os.path.exists(filepath)) + + def test_remove_readonly_tree(self): + """Test removing a read-only directory""" + + dirpath = os.path.join(self.tempdir, "nested_tree") + mark_readonly(dirpath) + + # However, mozfile should change write permissions and remove dir. + mozfile.remove(dirpath) + + self.assertFalse(os.path.exists(dirpath)) + + def test_remove_readonly_file(self): + """Test removing read-only files""" + filepath = os.path.join(self.tempdir, *stubs.files[1]) + mark_readonly(filepath) + + # However, mozfile should change write permission and then remove file. + mozfile.remove(filepath) + + self.assertFalse(os.path.exists(filepath)) + + @unittest.skipIf(mozinfo.isWin, "Symlinks are not supported on Windows") + def test_remove_symlink(self): + """Test removing a symlink""" + file_path = os.path.join(self.tempdir, *stubs.files[1]) + symlink_path = os.path.join(self.tempdir, "symlink") + + os.symlink(file_path, symlink_path) + self.assertTrue(os.path.islink(symlink_path)) + + # The linked folder and files should not be deleted + mozfile.remove(symlink_path) + self.assertFalse(os.path.exists(symlink_path)) + self.assertTrue(os.path.exists(file_path)) + + @unittest.skipIf(mozinfo.isWin, "Symlinks are not supported on Windows") + def test_remove_symlink_in_subfolder(self): + """Test removing a folder with an contained symlink""" + file_path = os.path.join(self.tempdir, *stubs.files[0]) + dir_path = os.path.dirname(os.path.join(self.tempdir, *stubs.files[1])) + symlink_path = os.path.join(dir_path, "symlink") + + os.symlink(file_path, symlink_path) + self.assertTrue(os.path.islink(symlink_path)) + + # The folder with the contained symlink will be deleted but not the + # original linked file + mozfile.remove(dir_path) + self.assertFalse(os.path.exists(dir_path)) + self.assertFalse(os.path.exists(symlink_path)) + self.assertTrue(os.path.exists(file_path)) + + @unittest.skipIf(mozinfo.isWin, "Symlinks are not supported on Windows") + def test_remove_broken_symlink(self): + """Test removing a folder with an contained symlink""" + file_path = os.path.join(self.tempdir, "readonly.txt") + working_link = os.path.join(self.tempdir, "link_to_readonly.txt") + broken_link = os.path.join(self.tempdir, "broken_link") + os.symlink(file_path, working_link) + os.symlink(os.path.join(self.tempdir, "broken.txt"), broken_link) + + self.assertTrue(os.path.exists(file_path)) + self.assertTrue(os.path.islink(working_link)) + self.assertTrue(os.path.islink(broken_link)) + + mozfile.remove(working_link) + self.assertFalse(os.path.lexists(working_link)) + self.assertTrue(os.path.exists(file_path)) + + mozfile.remove(broken_link) + self.assertFalse(os.path.lexists(broken_link)) + + @unittest.skipIf( + mozinfo.isWin or not os.geteuid(), + "Symlinks are not supported on Windows and cannot run test as root", + ) + def test_remove_symlink_for_system_path(self): + """Test removing a symlink which points to a system folder""" + symlink_path = os.path.join(self.tempdir, "symlink") + + os.symlink(os.path.dirname(self.tempdir), symlink_path) + self.assertTrue(os.path.islink(symlink_path)) + + # The folder with the contained symlink will be deleted but not the + # original linked file + mozfile.remove(symlink_path) + self.assertFalse(os.path.exists(symlink_path)) + + def test_remove_path_that_does_not_exists(self): + not_existing_path = os.path.join(self.tempdir, "I_do_not_not_exists") + try: + mozfile.remove(not_existing_path) + except OSError as exc: + if exc.errno == errno.ENOENT: + self.fail("removing non existing path must not raise error") + raise + + +class MozFileMoveTestCase(unittest.TestCase): + def setUp(self): + # Generate a stub + self.tempdir = stubs.create_stub() + self.addCleanup(mozfile.rmtree, self.tempdir) + + def test_move_file(self): + file_path = os.path.join(self.tempdir, *stubs.files[1]) + moved_path = file_path + ".moved" + self.assertTrue(os.path.isfile(file_path)) + self.assertFalse(os.path.exists(moved_path)) + mozfile.move(file_path, moved_path) + self.assertFalse(os.path.exists(file_path)) + self.assertTrue(os.path.isfile(moved_path)) + + def test_move_file_with_retry(self): + file_path = os.path.join(self.tempdir, *stubs.files[1]) + moved_path = file_path + ".moved" + + with wait_file_opened_in_thread(file_path, 0.2): + # first move attempt should fail on windows and be retried + mozfile.move(file_path, moved_path) + self.assertFalse(os.path.exists(file_path)) + self.assertTrue(os.path.isfile(moved_path)) + + +if __name__ == "__main__": + mozunit.main() diff --git a/testing/mozbase/mozfile/tests/test_tempdir.py b/testing/mozbase/mozfile/tests/test_tempdir.py new file mode 100644 index 0000000000..ba16b478b6 --- /dev/null +++ b/testing/mozbase/mozfile/tests/test_tempdir.py @@ -0,0 +1,44 @@ +#!/usr/bin/env python + +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +""" +tests for mozfile.TemporaryDirectory +""" + +import os +import unittest + +import mozunit +from mozfile import TemporaryDirectory + + +class TestTemporaryDirectory(unittest.TestCase): + def test_removed(self): + """ensure that a TemporaryDirectory gets removed""" + path = None + with TemporaryDirectory() as tmp: + path = tmp + self.assertTrue(os.path.isdir(tmp)) + tmpfile = os.path.join(tmp, "a_temp_file") + open(tmpfile, "w").write("data") + self.assertTrue(os.path.isfile(tmpfile)) + self.assertFalse(os.path.isdir(path)) + self.assertFalse(os.path.exists(path)) + + def test_exception(self): + """ensure that TemporaryDirectory handles exceptions""" + path = None + with self.assertRaises(Exception): + with TemporaryDirectory() as tmp: + path = tmp + self.assertTrue(os.path.isdir(tmp)) + raise Exception("oops") + self.assertFalse(os.path.isdir(path)) + self.assertFalse(os.path.exists(path)) + + +if __name__ == "__main__": + mozunit.main() diff --git a/testing/mozbase/mozfile/tests/test_tempfile.py b/testing/mozbase/mozfile/tests/test_tempfile.py new file mode 100644 index 0000000000..3e250d6a76 --- /dev/null +++ b/testing/mozbase/mozfile/tests/test_tempfile.py @@ -0,0 +1,105 @@ +#!/usr/bin/env python + +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +""" +tests for mozfile.NamedTemporaryFile +""" +import os +import unittest + +import mozfile +import mozunit +import six + + +class TestNamedTemporaryFile(unittest.TestCase): + """test our fix for NamedTemporaryFile""" + + def test_named_temporary_file(self): + """Ensure the fix for re-opening a NamedTemporaryFile works + + Refer to https://bugzilla.mozilla.org/show_bug.cgi?id=818777 + and https://bugzilla.mozilla.org/show_bug.cgi?id=821362 + """ + + test_string = b"A simple test" + with mozfile.NamedTemporaryFile() as temp: + # Test we can write to file + temp.write(test_string) + # Forced flush, so that we can read later + temp.flush() + + # Test we can open the file again on all platforms + self.assertEqual(open(temp.name, "rb").read(), test_string) + + def test_iteration(self): + """ensure the line iterator works""" + + # make a file and write to it + tf = mozfile.NamedTemporaryFile() + notes = [b"doe", b"rae", b"mi"] + for note in notes: + tf.write(b"%s\n" % note) + tf.flush() + + # now read from it + tf.seek(0) + lines = [line.rstrip(b"\n") for line in tf.readlines()] + self.assertEqual(lines, notes) + + # now read from it iteratively + lines = [] + for line in tf: + lines.append(line.strip()) + self.assertEqual(lines, []) # because we did not seek(0) + tf.seek(0) + lines = [] + for line in tf: + lines.append(line.strip()) + self.assertEqual(lines, notes) + + def test_delete(self): + """ensure ``delete=True/False`` works as expected""" + + # make a deleteable file; ensure it gets cleaned up + path = None + with mozfile.NamedTemporaryFile(delete=True) as tf: + path = tf.name + self.assertTrue(isinstance(path, six.string_types)) + self.assertFalse(os.path.exists(path)) + + # it is also deleted when __del__ is called + # here we will do so explicitly + tf = mozfile.NamedTemporaryFile(delete=True) + path = tf.name + self.assertTrue(os.path.exists(path)) + del tf + self.assertFalse(os.path.exists(path)) + + # Now the same thing but we won't delete the file + path = None + try: + with mozfile.NamedTemporaryFile(delete=False) as tf: + path = tf.name + self.assertTrue(os.path.exists(path)) + finally: + if path and os.path.exists(path): + os.remove(path) + + path = None + try: + tf = mozfile.NamedTemporaryFile(delete=False) + path = tf.name + self.assertTrue(os.path.exists(path)) + del tf + self.assertTrue(os.path.exists(path)) + finally: + if path and os.path.exists(path): + os.remove(path) + + +if __name__ == "__main__": + mozunit.main() diff --git a/testing/mozbase/mozfile/tests/test_tree.py b/testing/mozbase/mozfile/tests/test_tree.py new file mode 100644 index 0000000000..556c1b9139 --- /dev/null +++ b/testing/mozbase/mozfile/tests/test_tree.py @@ -0,0 +1,30 @@ +#!/usr/bin/env python +# coding=UTF-8 + +import os +import shutil +import tempfile +import unittest + +import mozunit +from mozfile import tree + + +class TestTree(unittest.TestCase): + """Test the tree function.""" + + def test_unicode_paths(self): + """Test creating tree structure from a Unicode path.""" + try: + tmpdir = tempfile.mkdtemp(suffix="tmp🍪") + os.mkdir(os.path.join(tmpdir, "dir🍪")) + with open(os.path.join(tmpdir, "file🍪"), "w") as f: + f.write("foo") + + self.assertEqual("{}\n├file🍪\n└dir🍪".format(tmpdir), tree(tmpdir)) + finally: + shutil.rmtree(tmpdir) + + +if __name__ == "__main__": + mozunit.main() diff --git a/testing/mozbase/mozfile/tests/test_url.py b/testing/mozbase/mozfile/tests/test_url.py new file mode 100755 index 0000000000..a19f5f16a8 --- /dev/null +++ b/testing/mozbase/mozfile/tests/test_url.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python + +""" +tests for is_url +""" +import unittest + +import mozunit +from mozfile import is_url + + +class TestIsUrl(unittest.TestCase): + """test the is_url function""" + + def test_is_url(self): + self.assertTrue(is_url("http://mozilla.org")) + self.assertFalse(is_url("/usr/bin/mozilla.org")) + self.assertTrue(is_url("file:///usr/bin/mozilla.org")) + self.assertFalse(is_url("c:\foo\bar")) + + +if __name__ == "__main__": + mozunit.main() diff --git a/testing/mozbase/mozfile/tests/test_which.py b/testing/mozbase/mozfile/tests/test_which.py new file mode 100644 index 0000000000..b02f13ccdf --- /dev/null +++ b/testing/mozbase/mozfile/tests/test_which.py @@ -0,0 +1,63 @@ +# Any copyright is dedicated to the Public Domain. +# https://creativecommons.org/publicdomain/zero/1.0/ + +import os +import sys + +import mozunit +import six +from mozfile import which + +here = os.path.abspath(os.path.dirname(__file__)) + + +def test_which(monkeypatch): + cwd = os.path.join(here, "files", "which") + monkeypatch.chdir(cwd) + + if sys.platform == "win32": + if six.PY3: + import winreg + else: + import _winreg as winreg + bindir = os.path.join(cwd, "win") + monkeypatch.setenv("PATH", bindir) + monkeypatch.setattr(winreg, "QueryValue", (lambda k, sk: None)) + + assert which("foo.exe").lower() == os.path.join(bindir, "foo.exe").lower() + assert which("foo").lower() == os.path.join(bindir, "foo.exe").lower() + assert ( + which("foo", exts=[".FOO", ".BAR"]).lower() + == os.path.join(bindir, "foo").lower() + ) + assert os.environ.get("PATHEXT") != [".FOO", ".BAR"] + assert which("foo.txt") is None + + assert which("bar").lower() == os.path.join(bindir, "bar").lower() + assert which("baz").lower() == os.path.join(cwd, "baz.exe").lower() + + registered_dir = os.path.join(cwd, "registered") + quux = os.path.join(registered_dir, "quux.exe").lower() + + def mock_registry(key, subkey): + assert subkey == ( + r"SOFTWARE\Microsoft\Windows\CurrentVersion\App Paths\quux.exe" + ) + return quux + + monkeypatch.setattr(winreg, "QueryValue", mock_registry) + assert which("quux").lower() == quux + assert which("quux.exe").lower() == quux + + else: + bindir = os.path.join(cwd, "unix") + monkeypatch.setenv("PATH", bindir) + assert which("foo") == os.path.join(bindir, "foo") + assert which("baz") is None + assert which("baz", exts=[".EXE"]) is None + assert "PATHEXT" not in os.environ + assert which("file") is None + + +if __name__ == "__main__": + mozunit.main() diff --git a/testing/mozbase/mozgeckoprofiler/mozgeckoprofiler/__init__.py b/testing/mozbase/mozgeckoprofiler/mozgeckoprofiler/__init__.py new file mode 100644 index 0000000000..ce0337db09 --- /dev/null +++ b/testing/mozbase/mozgeckoprofiler/mozgeckoprofiler/__init__.py @@ -0,0 +1,17 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +""" +mozgeckoprofiler has utilities to symbolicate and load gecko profiles. +""" +from .profiling import save_gecko_profile, symbolicate_profile_json +from .symbolication import ProfileSymbolicator +from .viewgeckoprofile import view_gecko_profile + +__all__ = [ + "save_gecko_profile", + "symbolicate_profile_json", + "ProfileSymbolicator", + "view_gecko_profile", +] diff --git a/testing/mozbase/mozgeckoprofiler/mozgeckoprofiler/dump_syms_mac b/testing/mozbase/mozgeckoprofiler/mozgeckoprofiler/dump_syms_mac new file mode 100755 index 0000000000..e9b8edf879 Binary files /dev/null and b/testing/mozbase/mozgeckoprofiler/mozgeckoprofiler/dump_syms_mac differ diff --git a/testing/mozbase/mozgeckoprofiler/mozgeckoprofiler/profiling.py b/testing/mozbase/mozgeckoprofiler/mozgeckoprofiler/profiling.py new file mode 100644 index 0000000000..5a8d9b0269 --- /dev/null +++ b/testing/mozbase/mozgeckoprofiler/mozgeckoprofiler/profiling.py @@ -0,0 +1,85 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +import json +import os +import shutil +import tempfile + +from mozlog import get_proxy_logger + +from .symbolication import ProfileSymbolicator + +LOG = get_proxy_logger("profiler") + + +def save_gecko_profile(profile, filename): + with open(filename, "w") as f: + json.dump(profile, f) + + +def symbolicate_profile_json(profile_path, objdir_path): + """ + Symbolicate a single JSON profile. + """ + temp_dir = tempfile.mkdtemp() + missing_symbols_zip = os.path.join(temp_dir, "missingsymbols.zip") + + firefox_symbol_path = os.path.join(objdir_path, "dist", "crashreporter-symbols") + if not os.path.isdir(firefox_symbol_path): + os.mkdir(firefox_symbol_path) + + windows_symbol_path = os.path.join(temp_dir, "windows") + os.mkdir(windows_symbol_path) + + symbol_paths = {"FIREFOX": firefox_symbol_path, "WINDOWS": windows_symbol_path} + + symbolicator = ProfileSymbolicator( + { + # Trace-level logging (verbose) + "enableTracing": 0, + # Fallback server if symbol is not found locally + "remoteSymbolServer": "https://symbolication.services.mozilla.com/symbolicate/v4", + # Maximum number of symbol files to keep in memory + "maxCacheEntries": 2000000, + # Frequency of checking for recent symbols to + # cache (in hours) + "prefetchInterval": 12, + # Oldest file age to prefetch (in hours) + "prefetchThreshold": 48, + # Maximum number of library versions to pre-fetch + # per library + "prefetchMaxSymbolsPerLib": 3, + # Default symbol lookup directories + "defaultApp": "FIREFOX", + "defaultOs": "WINDOWS", + # Paths to .SYM files, expressed internally as a + # mapping of app or platform names to directories + # Note: App & OS names from requests are converted + # to all-uppercase internally + "symbolPaths": symbol_paths, + } + ) + + LOG.info( + "Symbolicating the performance profile... This could take a couple " + "of minutes." + ) + + try: + with open(profile_path, "r", encoding="utf-8") as profile_file: + profile = json.load(profile_file) + symbolicator.dump_and_integrate_missing_symbols(profile, missing_symbols_zip) + symbolicator.symbolicate_profile(profile) + # Overwrite the profile in place. + save_gecko_profile(profile, profile_path) + except MemoryError: + LOG.error( + "Ran out of memory while trying" + " to symbolicate profile {0}".format(profile_path) + ) + except Exception as e: + LOG.error("Encountered an exception during profile symbolication") + LOG.error(e) + + shutil.rmtree(temp_dir) diff --git a/testing/mozbase/mozgeckoprofiler/mozgeckoprofiler/symFileManager.py b/testing/mozbase/mozgeckoprofiler/mozgeckoprofiler/symFileManager.py new file mode 100644 index 0000000000..92cfcf3230 --- /dev/null +++ b/testing/mozbase/mozgeckoprofiler/mozgeckoprofiler/symFileManager.py @@ -0,0 +1,353 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +import itertools +import os +import re +import threading +import time +from bisect import bisect + +from mozlog import get_proxy_logger + +LOG = get_proxy_logger("profiler") + +# Libraries to keep prefetched +PREFETCHED_LIBS = ["xul.pdb", "firefox.pdb"] + + +class SymbolInfo: + def __init__(self, addressMap): + self.sortedAddresses = sorted(addressMap.keys()) + self.sortedSymbols = [addressMap[address] for address in self.sortedAddresses] + self.entryCount = len(self.sortedAddresses) + + # TODO: Add checks for address < funcEnd ? + def Lookup(self, address): + nearest = bisect(self.sortedAddresses, address) - 1 + if nearest < 0: + return None + return self.sortedSymbols[nearest] + + def GetEntryCount(self): + return self.entryCount + + +# Singleton for .sym / .nmsym file cache management + + +class SymFileManager: + """This class fetches symbols from files and caches the results. + + options (obj) + symbolPaths : dictionary + Paths to .SYM files, expressed internally as a mapping of app or platform + names to directories. App & OS names from requests are converted to + all-uppercase internally + e.g. { "FIREFOX": "/tmp/path" } + maxCacheEntries : number + Maximum number of symbol files to keep in memory + prefetchInterval : number + Frequency of checking for recent symbols to cache (in hours) + prefetchThreshold : number + Oldest file age to prefetch (in hours) + prefetchMaxSymbolsPerLib : (number) + Maximum number of library versions to pre-fetch per library + """ + + sCache = {} + sCacheCount = 0 + sCacheLock = threading.Lock() + sMruSymbols = [] + + sOptions = {} + sCallbackTimer = None + + def __init__(self, options): + self.sOptions = options + + def GetLibSymbolMap(self, libName, breakpadId, symbolSources): + # Empty lib name means client couldn't associate frame with any lib + if libName == "": + return None + + # Check cache first + libSymbolMap = None + self.sCacheLock.acquire() + try: + if libName in self.sCache and breakpadId in self.sCache[libName]: + libSymbolMap = self.sCache[libName][breakpadId] + self.UpdateMruList(libName, breakpadId) + finally: + self.sCacheLock.release() + + if libSymbolMap is None: + LOG.debug("Need to fetch PDB file for " + libName + " " + breakpadId) + + # Guess the name of the .sym or .nmsym file on disk + if libName[-4:] == ".pdb": + symFileNameWithoutExtension = re.sub(r"\.[^\.]+$", "", libName) + else: + symFileNameWithoutExtension = libName + + # Look in the symbol dirs for this .sym or .nmsym file + for extension, source in itertools.product( + [".sym", ".nmsym"], symbolSources + ): + symFileName = symFileNameWithoutExtension + extension + pathSuffix = ( + os.sep + libName + os.sep + breakpadId + os.sep + symFileName + ) + path = self.sOptions["symbolPaths"][source] + pathSuffix + libSymbolMap = self.FetchSymbolsFromFile(path) + if libSymbolMap: + break + + if not libSymbolMap: + LOG.debug("No matching sym files, tried " + str(symbolSources)) + return None + + LOG.debug( + "Storing libSymbolMap under [" + libName + "][" + breakpadId + "]" + ) + self.sCacheLock.acquire() + try: + self.MaybeEvict(libSymbolMap.GetEntryCount()) + if libName not in self.sCache: + self.sCache[libName] = {} + self.sCache[libName][breakpadId] = libSymbolMap + self.sCacheCount += libSymbolMap.GetEntryCount() + self.UpdateMruList(libName, breakpadId) + LOG.debug( + str(self.sCacheCount) + + " symbols in cache after fetching symbol file" + ) + finally: + self.sCacheLock.release() + + return libSymbolMap + + def FetchSymbolsFromFile(self, path): + try: + symFile = open(path, "r") + except Exception as e: + LOG.debug("Error opening file " + path + ": " + str(e)) + return None + + LOG.debug("Parsing SYM file at " + path) + + try: + symbolMap = {} + lineNum = 0 + publicCount = 0 + funcCount = 0 + if path.endswith(".sym"): + for line in symFile: + lineNum += 1 + if line[0:7] == "PUBLIC ": + line = line.rstrip() + fields = line.split(" ") + if len(fields) < 4: + LOG.debug("Line " + str(lineNum) + " is messed") + continue + if fields[1] == "m": + address = int(fields[2], 16) + symbolMap[address] = " ".join(fields[4:]) + else: + address = int(fields[1], 16) + symbolMap[address] = " ".join(fields[3:]) + publicCount += 1 + elif line[0:5] == "FUNC ": + line = line.rstrip() + fields = line.split(" ") + if len(fields) < 5: + LOG.debug("Line " + str(lineNum) + " is messed") + continue + if fields[1] == "m": + address = int(fields[2], 16) + symbolMap[address] = " ".join(fields[5:]) + else: + address = int(fields[1], 16) + symbolMap[address] = " ".join(fields[4:]) + funcCount += 1 + elif path.endswith(".nmsym"): + addressLength = 0 + for line in symFile: + lineNum += 1 + if line.startswith(" "): + continue + if addressLength == 0: + addressLength = line.find(" ") + address = int(line[0:addressLength], 16) + # Some lines have the form + # "address space letter space symbol", + # some have the form "address space symbol". + # The letter has a meaning, but we ignore it. + if line[addressLength + 2] == " ": + symbol = line[addressLength + 3 :].rstrip() + else: + symbol = line[addressLength + 1 :].rstrip() + symbolMap[address] = symbol + publicCount += 1 + except Exception: + LOG.error("Error parsing SYM file " + path) + return None + + logString = "Found " + str(len(symbolMap)) + " unique entries from " + logString += ( + str(publicCount) + " PUBLIC lines, " + str(funcCount) + " FUNC lines" + ) + LOG.debug(logString) + + return SymbolInfo(symbolMap) + + def PrefetchRecentSymbolFiles(self): + """This method runs in a loop. Use the options "prefetchThreshold" to adjust""" + global PREFETCHED_LIBS + + LOG.info("Prefetching recent symbol files") + # Schedule next timer callback + interval = self.sOptions["prefetchInterval"] * 60 * 60 + self.sCallbackTimer = threading.Timer(interval, self.PrefetchRecentSymbolFiles) + self.sCallbackTimer.start() + + thresholdTime = time.time() - self.sOptions["prefetchThreshold"] * 60 * 60 + symDirsToInspect = {} + for pdbName in PREFETCHED_LIBS: + symDirsToInspect[pdbName] = [] + topLibPath = self.sOptions["symbolPaths"]["FIREFOX"] + os.sep + pdbName + + try: + symbolDirs = os.listdir(topLibPath) + for symbolDir in symbolDirs: + candidatePath = topLibPath + os.sep + symbolDir + mtime = os.path.getmtime(candidatePath) + if mtime > thresholdTime: + symDirsToInspect[pdbName].append((mtime, candidatePath)) + except Exception as e: + LOG.error("Error while pre-fetching: " + str(e)) + + LOG.info( + "Found " + + str(len(symDirsToInspect[pdbName])) + + " new " + + pdbName + + " recent dirs" + ) + + # Only prefetch the most recent N entries + symDirsToInspect[pdbName].sort(reverse=True) + symDirsToInspect[pdbName] = symDirsToInspect[pdbName][ + : self.sOptions["prefetchMaxSymbolsPerLib"] + ] + + # Don't fetch symbols already in cache. + # Ideally, mutex would be held from check to insert in self.sCache, + # but we don't want to hold the lock during I/O. This won't cause + # inconsistencies. + self.sCacheLock.acquire() + try: + for pdbName in symDirsToInspect: + for mtime, symbolDirPath in symDirsToInspect[pdbName]: + pdbId = os.path.basename(symbolDirPath) + if pdbName in self.sCache and pdbId in self.sCache[pdbName]: + symDirsToInspect[pdbName].remove((mtime, symbolDirPath)) + finally: + self.sCacheLock.release() + + # Read all new symbol files in at once + fetchedSymbols = {} + fetchedCount = 0 + for pdbName in symDirsToInspect: + # The corresponding symbol file name ends with .sym + symFileName = re.sub(r"\.[^\.]+$", ".sym", pdbName) + + for mtime, symbolDirPath in symDirsToInspect[pdbName]: + pdbId = os.path.basename(symbolDirPath) + symbolFilePath = symbolDirPath + os.sep + symFileName + symbolInfo = self.FetchSymbolsFromFile(symbolFilePath) + if symbolInfo: + # Stop if the prefetched items are bigger than the cache + if ( + fetchedCount + symbolInfo.GetEntryCount() + > self.sOptions["maxCacheEntries"] + ): + break + fetchedSymbols[(pdbName, pdbId)] = symbolInfo + fetchedCount += symbolInfo.GetEntryCount() + else: + LOG.error("Couldn't fetch .sym file symbols for " + symbolFilePath) + continue + + # Insert new symbols into global symbol cache + self.sCacheLock.acquire() + try: + # Make room for the new symbols + self.MaybeEvict(fetchedCount) + + for pdbName, pdbId in fetchedSymbols: + if pdbName not in self.sCache: + self.sCache[pdbName] = {} + + if pdbId in self.sCache[pdbName]: + continue + + newSymbolFile = fetchedSymbols[(pdbName, pdbId)] + self.sCache[pdbName][pdbId] = newSymbolFile + self.sCacheCount += newSymbolFile.GetEntryCount() + + # Move new symbols to front of MRU list to give them a chance + self.UpdateMruList(pdbName, pdbId) + + finally: + self.sCacheLock.release() + + LOG.info("Finished prefetching recent symbol files") + + def UpdateMruList(self, pdbName, pdbId): + libId = (pdbName, pdbId) + if libId in self.sMruSymbols: + self.sMruSymbols.remove(libId) + self.sMruSymbols.insert(0, libId) + + def MaybeEvict(self, freeEntriesNeeded): + maxCacheSize = self.sOptions["maxCacheEntries"] + LOG.debug( + "Cache occupancy before MaybeEvict: " + + str(self.sCacheCount) + + "/" + + str(maxCacheSize) + ) + + if ( + self.sCacheCount == 0 + or self.sCacheCount + freeEntriesNeeded <= maxCacheSize + ): + # No need to lock mutex here, this doesn't need to be 100% + return + + # If adding the new entries would exceed the max cache size, + # evict so that cache is at 70% capacity after new entries added + numOldEntriesAfterEvict = max(0, (0.70 * maxCacheSize) - freeEntriesNeeded) + numToEvict = self.sCacheCount - numOldEntriesAfterEvict + + # Evict symbols until evict quota is met, starting with least recently + # used + for pdbName, pdbId in reversed(self.sMruSymbols): + if numToEvict <= 0: + break + + evicteeCount = self.sCache[pdbName][pdbId].GetEntryCount() + + del self.sCache[pdbName][pdbId] + self.sCacheCount -= evicteeCount + self.sMruSymbols.pop() + + numToEvict -= evicteeCount + + LOG.debug( + "Cache occupancy after MaybeEvict: " + + str(self.sCacheCount) + + "/" + + str(maxCacheSize) + ) diff --git a/testing/mozbase/mozgeckoprofiler/mozgeckoprofiler/symbolication.py b/testing/mozbase/mozgeckoprofiler/mozgeckoprofiler/symbolication.py new file mode 100644 index 0000000000..ecec5c1d9d --- /dev/null +++ b/testing/mozbase/mozgeckoprofiler/mozgeckoprofiler/symbolication.py @@ -0,0 +1,360 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +import hashlib +import http.client +import os +import platform +import subprocess +import zipfile +from distutils import spawn + +import six +from mozlog import get_proxy_logger + +from .symbolicationRequest import SymbolicationRequest +from .symFileManager import SymFileManager + +LOG = get_proxy_logger("profiler") + +if six.PY2: + # Import for Python 2 + from cStringIO import StringIO as sio + from urllib2 import urlopen +else: + # Import for Python 3 + from io import BytesIO as sio + from urllib.request import urlopen + + # Symbolication is broken when using type 'str' in python 2.7, so we use 'basestring'. + # But for python 3.0 compatibility, 'basestring' isn't defined, but the 'str' type works. + # So we force 'basestring' to 'str'. + basestring = str + + +class SymbolError(Exception): + pass + + +class OSXSymbolDumper: + def __init__(self): + self.dump_syms_bin = os.path.join(os.path.dirname(__file__), "dump_syms_mac") + if not os.path.exists(self.dump_syms_bin): + raise SymbolError("No dump_syms_mac binary in this directory") + + def store_symbols( + self, lib_path, expected_breakpad_id, output_filename_without_extension + ): + """ + Returns the filename at which the .sym file was created, or None if no + symbols were dumped. + """ + output_filename = output_filename_without_extension + ".sym" + + def get_archs(filename): + """ + Find the list of architectures present in a Mach-O file. + """ + return ( + subprocess.Popen(["lipo", "-info", filename], stdout=subprocess.PIPE) + .communicate()[0] + .split(b":")[2] + .strip() + .split() + ) + + def process_file(arch): + proc = subprocess.Popen( + [self.dump_syms_bin, "-a", arch, lib_path], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + stdout, stderr = proc.communicate() + if proc.returncode != 0: + return None + + module = stdout.splitlines()[0] + bits = module.split(b" ", 4) + if len(bits) != 5: + return None + _, platform, cpu_arch, actual_breakpad_id, debug_file = bits + + if str(actual_breakpad_id, "utf-8") != expected_breakpad_id: + return None + + with open(output_filename, "wb") as f: + f.write(stdout) + return output_filename + + for arch in get_archs(lib_path): + result = process_file(arch) + if result is not None: + return result + return None + + +class LinuxSymbolDumper: + def __init__(self): + self.nm = spawn.find_executable("nm") + if not self.nm: + raise SymbolError("Could not find nm, necessary for symbol dumping") + + def store_symbols(self, lib_path, breakpad_id, output_filename_without_extension): + """ + Returns the filename at which the .sym file was created, or None if no + symbols were dumped. + """ + output_filename = output_filename_without_extension + ".nmsym" + + proc = subprocess.Popen( + [self.nm, "--demangle", lib_path], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + stdout, stderr = proc.communicate() + + if proc.returncode != 0: + return + + with open(output_filename, "wb") as f: + f.write(stdout) + + # Append nm -D output to the file. On Linux, most system libraries + # have no "normal" symbols, but they have "dynamic" symbols, which + # nm -D shows. + proc = subprocess.Popen( + [self.nm, "--demangle", "-D", lib_path], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + stdout, stderr = proc.communicate() + if proc.returncode == 0: + f.write(stdout) + return output_filename + + +class ProfileSymbolicator: + """This class orchestrates symbolication for a Gecko profile. + + It can be used by multiple pieces of testing infrastructure that generate Gecko + performance profiles. + + Args: + options (obj): See SymFileManager for details on these options. + """ + + def __init__(self, options): + self.options = options + self.sym_file_manager = SymFileManager(self.options) + self.symbol_dumper = self.get_symbol_dumper() + + def get_symbol_dumper(self): + try: + if platform.system() == "Darwin": + return OSXSymbolDumper() + elif platform.system() == "Linux": + return LinuxSymbolDumper() + except SymbolError: + return None + + def integrate_symbol_zip_from_url(self, symbol_zip_url): + if self.have_integrated(symbol_zip_url): + return + LOG.info( + "Retrieving symbol zip from {symbol_zip_url}...".format( + symbol_zip_url=symbol_zip_url + ) + ) + try: + io = urlopen(symbol_zip_url, None, 30) + with zipfile.ZipFile(sio(io.read())) as zf: + self.integrate_symbol_zip(zf) + self._create_file_if_not_exists(self._marker_file(symbol_zip_url)) + except (IOError, http.client.IncompleteRead): + LOG.info("Symbol zip request failed.") + + def integrate_symbol_zip_from_file(self, filename): + if self.have_integrated(filename): + return + with open(filename, "rb") as f: + with zipfile.ZipFile(f) as zf: + self.integrate_symbol_zip(zf) + self._create_file_if_not_exists(self._marker_file(filename)) + + def _create_file_if_not_exists(self, filename): + try: + os.makedirs(os.path.dirname(filename)) + except OSError: + pass + try: + open(filename, "a").close() + except IOError: + pass + + def integrate_symbol_zip(self, symbol_zip_file): + symbol_zip_file.extractall(self.options["symbolPaths"]["FIREFOX"]) + + def _marker_file(self, symbol_zip_url): + marker_dir = os.path.join(self.options["symbolPaths"]["FIREFOX"], ".markers") + return os.path.join( + marker_dir, hashlib.sha1(symbol_zip_url.encode("utf-8")).hexdigest() + ) + + def have_integrated(self, symbol_zip_url): + return os.path.isfile(self._marker_file(symbol_zip_url)) + + def get_unknown_modules_in_profile(self, profile_json): + if "libs" not in profile_json: + return [] + shared_libraries = profile_json["libs"] + memoryMap = [] + for lib in shared_libraries: + memoryMap.append([lib["debugName"], lib["breakpadId"]]) + + rawRequest = { + "stacks": [[]], + "memoryMap": memoryMap, + "version": 4, + "symbolSources": ["FIREFOX", "WINDOWS"], + } + request = SymbolicationRequest(self.sym_file_manager, rawRequest) + if not request.isValidRequest: + return [] + request.Symbolicate(0) # This sets request.knownModules + + unknown_modules = [] + for i, lib in enumerate(shared_libraries): + if not request.knownModules[i]: + unknown_modules.append(lib) + return unknown_modules + + def dump_and_integrate_missing_symbols(self, profile_json, symbol_zip_path): + if not self.symbol_dumper: + return + + unknown_modules = self.get_unknown_modules_in_profile(profile_json) + if not unknown_modules: + return + + # We integrate the dumped symbols by dumping them directly into our + # symbol directory. + output_dir = self.options["symbolPaths"]["FIREFOX"] + + # Additionally, we add all dumped symbol files to the missingsymbols + # zip file. + with zipfile.ZipFile(symbol_zip_path, "a", zipfile.ZIP_DEFLATED) as zf: + for lib in unknown_modules: + self.dump_and_integrate_symbols_for_lib(lib, output_dir, zf) + + def dump_and_integrate_symbols_for_lib(self, lib, output_dir, zip): + name = lib["debugName"] + expected_name_without_extension = os.path.join(name, lib["breakpadId"], name) + for extension in [".sym", ".nmsym"]: + expected_name = expected_name_without_extension + extension + if expected_name in zip.namelist(): + # No need to dump the symbols again if we already have it in + # the missingsymbols zip file from a previous run. + zip.extract(expected_name, output_dir) + return + + lib_path = lib["path"] + if not os.path.exists(lib_path): + return + + output_filename_without_extension = os.path.join( + output_dir, expected_name_without_extension + ) + store_path = os.path.dirname(output_filename_without_extension) + if not os.path.exists(store_path): + os.makedirs(store_path) + + # Dump the symbols. + sym_file = self.symbol_dumper.store_symbols( + lib_path, lib["breakpadId"], output_filename_without_extension + ) + if sym_file: + rootlen = len(os.path.join(output_dir, "_")) - 1 + output_filename = sym_file[rootlen:] + if output_filename not in zip.namelist(): + zip.write(sym_file, output_filename) + + def symbolicate_profile(self, profile_json): + if "libs" not in profile_json: + return + + shared_libraries = profile_json["libs"] + addresses = self._find_addresses(profile_json) + symbols_to_resolve = self._assign_symbols_to_libraries( + addresses, shared_libraries + ) + symbolication_table = self._resolve_symbols(symbols_to_resolve) + self._substitute_symbols(profile_json, symbolication_table) + + for process in profile_json["processes"]: + self.symbolicate_profile(process) + + def _find_addresses(self, profile_json): + addresses = set() + for thread in profile_json["threads"]: + if isinstance(thread, basestring): + continue + for s in thread["stringTable"]: + if s[0:2] == "0x": + addresses.add(s) + return addresses + + def _substitute_symbols(self, profile_json, symbolication_table): + for thread in profile_json["threads"]: + if isinstance(thread, basestring): + continue + for i, s in enumerate(thread["stringTable"]): + thread["stringTable"][i] = symbolication_table.get(s, s) + + def _get_containing_library(self, address, libs): + left = 0 + right = len(libs) - 1 + while left <= right: + mid = (left + right) // 2 + if address >= libs[mid]["end"]: + left = mid + 1 + elif address < libs[mid]["start"]: + right = mid - 1 + else: + return libs[mid] + return None + + def _assign_symbols_to_libraries(self, addresses, shared_libraries): + libs_with_symbols = {} + for address in addresses: + lib = self._get_containing_library(int(address, 0), shared_libraries) + if not lib: + continue + if lib["start"] not in libs_with_symbols: + libs_with_symbols[lib["start"]] = {"library": lib, "symbols": set()} + libs_with_symbols[lib["start"]]["symbols"].add(address) + # pylint: disable=W1656 + return libs_with_symbols.values() + + def _resolve_symbols(self, symbols_to_resolve): + memoryMap = [] + processedStack = [] + all_symbols = [] + for moduleIndex, library_with_symbols in enumerate(symbols_to_resolve): + lib = library_with_symbols["library"] + symbols = library_with_symbols["symbols"] + memoryMap.append([lib["debugName"], lib["breakpadId"]]) + all_symbols += symbols + for symbol in symbols: + processedStack.append([moduleIndex, int(symbol, 0) - lib["start"]]) + + rawRequest = { + "stacks": [processedStack], + "memoryMap": memoryMap, + "version": 4, + "symbolSources": ["FIREFOX", "WINDOWS"], + } + request = SymbolicationRequest(self.sym_file_manager, rawRequest) + if not request.isValidRequest: + return {} + symbolicated_stack = request.Symbolicate(0) + return dict(zip(all_symbols, symbolicated_stack)) diff --git a/testing/mozbase/mozgeckoprofiler/mozgeckoprofiler/symbolicationRequest.py b/testing/mozbase/mozgeckoprofiler/mozgeckoprofiler/symbolicationRequest.py new file mode 100644 index 0000000000..1b277abbde --- /dev/null +++ b/testing/mozbase/mozgeckoprofiler/mozgeckoprofiler/symbolicationRequest.py @@ -0,0 +1,331 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +import json +import re + +import six +from mozlog import get_proxy_logger + +LOG = get_proxy_logger("profiler") + +# Precompiled regex for validating lib names +# Empty lib name means client couldn't associate frame with any lib +gLibNameRE = re.compile("[0-9a-zA-Z_+\-\.]*$") + +# Maximum number of times a request can be forwarded to a different server +# for symbolication. Also prevents loops. +MAX_FORWARDED_REQUESTS = 3 + +if six.PY2: + # Import for Python 2 + from urllib2 import Request, urlopen +else: + # Import for Python 3 + from urllib.request import Request, urlopen + + # Symbolication is broken when using type 'str' in python 2.7, so we use 'basestring'. + # But for python 3.0 compatibility, 'basestring' isn't defined, but the 'str' type works. + # So we force 'basestring' to 'str'. + basestring = str + + +class ModuleV3: + def __init__(self, libName, breakpadId): + self.libName = libName + self.breakpadId = breakpadId + + +def getModuleV3(libName, breakpadId): + if not isinstance(libName, basestring) or not gLibNameRE.match(libName): + LOG.debug("Bad library name: " + str(libName)) + return None + + if not isinstance(breakpadId, basestring): + LOG.debug("Bad breakpad id: " + str(breakpadId)) + return None + + return ModuleV3(libName, breakpadId) + + +class SymbolicationRequest: + def __init__(self, symFileManager, rawRequests): + self.Reset() + self.symFileManager = symFileManager + self.stacks = [] + self.combinedMemoryMap = [] + self.knownModules = [] + self.symbolSources = [] + self.ParseRequests(rawRequests) + + def Reset(self): + self.symFileManager = None + self.isValidRequest = False + self.combinedMemoryMap = [] + self.knownModules = [] + self.stacks = [] + self.forwardCount = 0 + + def ParseRequests(self, rawRequests): + self.isValidRequest = False + + try: + if not isinstance(rawRequests, dict): + LOG.debug("Request is not a dictionary") + return + + if "version" not in rawRequests: + LOG.debug("Request is missing 'version' field") + return + version = rawRequests["version"] + if version != 4: + LOG.debug("Invalid version: %s" % version) + return + + if "forwarded" in rawRequests: + if not isinstance(rawRequests["forwarded"], (int, int)): + LOG.debug("Invalid 'forwards' field: %s" % rawRequests["forwarded"]) + return + self.forwardCount = rawRequests["forwarded"] + + # Client specifies which sets of symbols should be used + if "symbolSources" in rawRequests: + try: + sourceList = [x.upper() for x in rawRequests["symbolSources"]] + for source in sourceList: + if source in self.symFileManager.sOptions["symbolPaths"]: + self.symbolSources.append(source) + else: + LOG.debug("Unrecognized symbol source: " + source) + continue + except Exception: + self.symbolSources = [] + pass + + if not self.symbolSources: + self.symbolSources.append(self.symFileManager.sOptions["defaultApp"]) + self.symbolSources.append(self.symFileManager.sOptions["defaultOs"]) + + if "memoryMap" not in rawRequests: + LOG.debug("Request is missing 'memoryMap' field") + return + memoryMap = rawRequests["memoryMap"] + if not isinstance(memoryMap, list): + LOG.debug("'memoryMap' field in request is not a list") + + if "stacks" not in rawRequests: + LOG.debug("Request is missing 'stacks' field") + return + stacks = rawRequests["stacks"] + if not isinstance(stacks, list): + LOG.debug("'stacks' field in request is not a list") + return + + # Check memory map is well-formatted + # We try to be more permissive here with the modules. If a module is not + # well-formatted, we ignore that one by adding a None to the clean memory map. We have + # to add a None instead of simply omitting that module because the indexes of the + # modules in the memory map has to match the indexes of the shared libraries in the + # profile data. + cleanMemoryMap = [] + for module in memoryMap: + if not isinstance(module, list): + LOG.debug("Entry in memory map is not a list: " + str(module)) + cleanMemoryMap.append(None) + continue + + if len(module) != 2: + LOG.debug( + "Entry in memory map is not a 2 item list: " + str(module) + ) + cleanMemoryMap.append(None) + continue + moduleV3 = getModuleV3(*module) + + if moduleV3 is None: + LOG.debug("Failed to get Module V3.") + + cleanMemoryMap.append(moduleV3) + + self.combinedMemoryMap = cleanMemoryMap + self.knownModules = [False] * len(self.combinedMemoryMap) + + # Check stack is well-formatted + for stack in stacks: + if not isinstance(stack, list): + LOG.debug("stack is not a list") + return + for entry in stack: + if not isinstance(entry, list): + LOG.debug("stack entry is not a list") + return + if len(entry) != 2: + LOG.debug("stack entry doesn't have exactly 2 elements") + return + + self.stacks.append(stack) + + except Exception as e: + LOG.debug("Exception while parsing request: " + str(e)) + return + + self.isValidRequest = True + + def ForwardRequest(self, indexes, stack, modules, symbolicatedStack): + LOG.debug("Forwarding " + str(len(stack)) + " PCs for symbolication") + + try: + url = self.symFileManager.sOptions["remoteSymbolServer"] + rawModules = [] + moduleToIndex = {} + newIndexToOldIndex = {} + for moduleIndex, m in modules: + l = [m.libName, m.breakpadId] + newModuleIndex = len(rawModules) + rawModules.append(l) + moduleToIndex[m] = newModuleIndex + newIndexToOldIndex[newModuleIndex] = moduleIndex + + rawStack = [] + for entry in stack: + moduleIndex = entry[0] + offset = entry[1] + module = self.combinedMemoryMap[moduleIndex] + if module is None: + continue + newIndex = moduleToIndex[module] + rawStack.append([newIndex, offset]) + + requestVersion = 4 + while True: + requestObj = { + "symbolSources": self.symbolSources, + "stacks": [rawStack], + "memoryMap": rawModules, + "forwarded": self.forwardCount + 1, + "version": requestVersion, + } + requestJson = json.dumps(requestObj).encode() + headers = {"Content-Type": "application/json"} + requestHandle = Request(url, requestJson, headers) + try: + response = urlopen(requestHandle) + except Exception as e: + if requestVersion == 4: + # Try again with version 3 + requestVersion = 3 + continue + raise e + succeededVersion = requestVersion + break + + except Exception as e: + LOG.error("Exception while forwarding request: " + str(e)) + return + + try: + responseJson = json.loads(response.read()) + except Exception as e: + LOG.error( + "Exception while reading server response to forwarded" + " request: " + str(e) + ) + return + + try: + if succeededVersion == 4: + responseKnownModules = responseJson["knownModules"] + for newIndex, known in enumerate(responseKnownModules): + if known and newIndex in newIndexToOldIndex: + self.knownModules[newIndexToOldIndex[newIndex]] = True + + responseSymbols = responseJson["symbolicatedStacks"][0] + else: + responseSymbols = responseJson[0] + if len(responseSymbols) != len(stack): + LOG.error( + str(len(responseSymbols)) + + " symbols in response, " + + str(len(stack)) + + " PCs in request!" + ) + return + + for index in range(0, len(stack)): + symbol = responseSymbols[index] + originalIndex = indexes[index] + symbolicatedStack[originalIndex] = symbol + except Exception as e: + LOG.error( + "Exception while parsing server response to forwarded" + " request: " + str(e) + ) + return + + def Symbolicate(self, stackNum): + # Check if we should forward requests when required sym files don't + # exist + shouldForwardRequests = False + if ( + self.symFileManager.sOptions["remoteSymbolServer"] + and self.forwardCount < MAX_FORWARDED_REQUESTS + ): + shouldForwardRequests = True + + # Symbolicate each PC + pcIndex = -1 + symbolicatedStack = [] + missingSymFiles = [] + unresolvedIndexes = [] + unresolvedStack = [] + unresolvedModules = [] + stack = self.stacks[stackNum] + + for moduleIndex, module in enumerate(self.combinedMemoryMap): + if module is None: + continue + + if not self.symFileManager.GetLibSymbolMap( + module.libName, module.breakpadId, self.symbolSources + ): + missingSymFiles.append((module.libName, module.breakpadId)) + if shouldForwardRequests: + unresolvedModules.append((moduleIndex, module)) + else: + self.knownModules[moduleIndex] = True + + for entry in stack: + pcIndex += 1 + moduleIndex = entry[0] + offset = entry[1] + if moduleIndex == -1: + symbolicatedStack.append(hex(offset)) + continue + module = self.combinedMemoryMap[moduleIndex] + if module is None: + continue + + if (module.libName, module.breakpadId) in missingSymFiles: + if shouldForwardRequests: + unresolvedIndexes.append(pcIndex) + unresolvedStack.append(entry) + symbolicatedStack.append(hex(offset) + " (in " + module.libName + ")") + continue + + functionName = None + libSymbolMap = self.symFileManager.GetLibSymbolMap( + module.libName, module.breakpadId, self.symbolSources + ) + functionName = libSymbolMap.Lookup(offset) + + if functionName is None: + functionName = hex(offset) + symbolicatedStack.append(functionName + " (in " + module.libName + ")") + + # Ask another server for help symbolicating unresolved addresses + if len(unresolvedStack) > 0 or len(unresolvedModules) > 0: + self.ForwardRequest( + unresolvedIndexes, unresolvedStack, unresolvedModules, symbolicatedStack + ) + + return symbolicatedStack diff --git a/testing/mozbase/mozgeckoprofiler/mozgeckoprofiler/viewgeckoprofile.py b/testing/mozbase/mozgeckoprofiler/mozgeckoprofiler/viewgeckoprofile.py new file mode 100644 index 0000000000..95c73cf503 --- /dev/null +++ b/testing/mozbase/mozgeckoprofiler/mozgeckoprofiler/viewgeckoprofile.py @@ -0,0 +1,136 @@ +#!/usr/bin/env python + +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import argparse +import os +import socket +import sys +import webbrowser + +import six +from mozlog import commandline, get_proxy_logger +from mozlog.commandline import add_logging_group + +here = os.path.abspath(os.path.dirname(__file__)) +LOG = get_proxy_logger("profiler") + +if six.PY2: + # Import for Python 2 + from urllib import quote + + from SimpleHTTPServer import SimpleHTTPRequestHandler + from SocketServer import TCPServer +else: + # Import for Python 3 + from http.server import SimpleHTTPRequestHandler + from socketserver import TCPServer + from urllib.parse import quote + + +class ProfileServingHTTPRequestHandler(SimpleHTTPRequestHandler): + """Extends the basic SimpleHTTPRequestHandler (which serves a directory + of files) to include request headers required by profiler.firefox.com""" + + def end_headers(self): + self.send_header("Access-Control-Allow-Origin", "https://profiler.firefox.com") + SimpleHTTPRequestHandler.end_headers(self) + + +class ViewGeckoProfile(object): + """Container class for ViewGeckoProfile""" + + def __init__(self, gecko_profile_data_path): + self.gecko_profile_data_path = gecko_profile_data_path + self.gecko_profile_dir = os.path.dirname(gecko_profile_data_path) + self.profiler_url = "https://profiler.firefox.com/from-url/" + self.httpd = None + self.host = "127.0.0.1" + self.port = None + self.oldcwd = os.getcwd() + + def setup_http_server(self): + # pick a free port + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + sock.bind(("", 0)) + self.port = sock.getsockname()[1] + sock.close() + + # Temporarily change the directory to the profile directory. + os.chdir(self.gecko_profile_dir) + self.httpd = TCPServer((self.host, self.port), ProfileServingHTTPRequestHandler) + + def handle_single_request(self): + self.httpd.handle_request() + # Go back to the old cwd, which some infrastructure may be relying on. + os.chdir(self.oldcwd) + + def encode_url(self): + # Encode url i.e.: https://profiler.firefox.com/from-url/http... + file_url = "http://{}:{}/{}".format( + self.host, self.port, os.path.basename(self.gecko_profile_data_path) + ) + + self.profiler_url = self.profiler_url + quote(file_url, safe="") + LOG.info("Temporarily serving the profile from: %s" % file_url) + + def open_profile_in_browser(self): + # Open the file in the user's preferred browser. + LOG.info("Opening the profile: %s" % self.profiler_url) + webbrowser.open_new_tab(self.profiler_url) + + +def create_parser(mach_interface=False): + parser = argparse.ArgumentParser() + add_arg = parser.add_argument + + add_arg( + "-p", + "--profile-zip", + required=True, + dest="profile_zip", + help="path to the gecko profiles zip file to open in profiler.firefox.com", + ) + + add_logging_group(parser) + return parser + + +def verify_options(parser, args): + ctx = vars(args) + + if not os.path.isfile(args.profile_zip): + parser.error("{profile_zip} does not exist!".format(**ctx)) + + +def parse_args(argv=None): + parser = create_parser() + args = parser.parse_args(argv) + verify_options(parser, args) + return args + + +def view_gecko_profile(profile_path): + """ + Open a gecko profile in the user's default browser. This function opens + up a special URL to profiler.firefox.com and serves up the local profile. + """ + view_gecko_profile = ViewGeckoProfile(profile_path) + + view_gecko_profile.setup_http_server() + view_gecko_profile.encode_url() + view_gecko_profile.open_profile_in_browser() + view_gecko_profile.handle_single_request() + + +def start_from_command_line(): + args = parse_args(sys.argv[1:]) + commandline.setup_logging("view-gecko-profile", args, {"tbpl": sys.stdout}) + + view_gecko_profile(args.profile_zip) + + +if __name__ == "__main__": + start_from_command_line() diff --git a/testing/mozbase/mozgeckoprofiler/setup.py b/testing/mozbase/mozgeckoprofiler/setup.py new file mode 100644 index 0000000000..0c7949cae9 --- /dev/null +++ b/testing/mozbase/mozgeckoprofiler/setup.py @@ -0,0 +1,32 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +from setuptools import setup + +PACKAGE_NAME = "mozgeckoprofiler" +PACKAGE_VERSION = "1.0.0" + +setup( + name=PACKAGE_NAME, + version=PACKAGE_VERSION, + description="Library to generate and view performance data in the Firefox Profiler", + long_description="see https://firefox-source-docs.mozilla.org/mozgeckoprofiler/index.html", + classifiers=[ + "Programming Language :: Python :: 2", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.5", + "License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)", + ], + keywords="mozilla", + author="Mozilla Automation and Tools team", + author_email="tools@lists.mozilla.org", + url="https://wiki.mozilla.org/Auto-tools/Projects/Mozbase", + license="MPL", + packages=["mozgeckoprofiler"], + include_package_data=True, + zip_safe=False, + install_requires=[], + tests_require=[], +) diff --git a/testing/mozbase/mozgeckoprofiler/tests/manifest.toml b/testing/mozbase/mozgeckoprofiler/tests/manifest.toml new file mode 100644 index 0000000000..95bae86eab --- /dev/null +++ b/testing/mozbase/mozgeckoprofiler/tests/manifest.toml @@ -0,0 +1,4 @@ +[DEFAULT] +subsuite = "mozbase" + +["test_view_gecko_profiler.py"] diff --git a/testing/mozbase/mozgeckoprofiler/tests/test_view_gecko_profiler.py b/testing/mozbase/mozgeckoprofiler/tests/test_view_gecko_profiler.py new file mode 100644 index 0000000000..76dd0f4594 --- /dev/null +++ b/testing/mozbase/mozgeckoprofiler/tests/test_view_gecko_profiler.py @@ -0,0 +1,105 @@ +#!/usr/bin/env python + +import io +import os +import re +import shutil +import tempfile +import threading +import time +import unittest +from unittest import mock + +import mozunit +import requests +import six +from mozgeckoprofiler import view_gecko_profile + +if six.PY2: + # Import for Python 2 + from urllib import unquote +else: + # Import for Python 3 + from urllib.parse import unquote + + +def access_profiler_link(file_url, response): + """Attempts to access the profile in a loop for 5 seconds. + + This is run from a separate thread. + """ + timeout = 5 # seconds + start = time.time() + + while time.time() - start < timeout: + # Poll the server to try and get a response. + result = requests.get(url=file_url) + if result.ok: + # Return the text back in a list. + response[0] = result.text + return + time.sleep(0.1) + + response[0] = "Accessing the profiler link timed out after %s seconds" % timeout + + +class TestViewGeckoProfile(unittest.TestCase): + """Tests the opening local profiles in the Firefox Profiler.""" + + def setUp(self): + self.firefox_profiler_url = None + self.thread = None + self.response = [None] + + def test_view_gecko_profile(self): + # Create a temporary fake performance profile. + temp_dir = tempfile.mkdtemp() + profile_path = os.path.join(temp_dir, "fakeprofile.json") + with io.open(profile_path, "w") as f: + f.write("FAKE_PROFILE") + + # Mock the open_new_tab function so that we know when the view_gecko_profile + # function has done all of its work, and we can assert ressult of the + # user behavior. + def mocked_open_new_tab(firefox_profiler_url): + self.firefox_profiler_url = firefox_profiler_url + encoded_file_url = firefox_profiler_url.split("/")[-1] + decoded_file_url = unquote(encoded_file_url) + # Extract the actual file from the path. + self.thread = threading.Thread( + target=access_profiler_link, args=(decoded_file_url, self.response) + ) + print("firefox_profiler_url %s" % firefox_profiler_url) + print("encoded_file_url %s" % encoded_file_url) + print("decoded_file_url %s" % decoded_file_url) + self.thread.start() + + with mock.patch("webbrowser.open_new_tab", new=mocked_open_new_tab): + # Run the test + view_gecko_profile(profile_path) + + self.thread.join() + + # Compare the URLs, but replace the PORT value supplied, as that is dynamic. + expected_url = ( + "https://profiler.firefox.com/from-url/" + "http%3A%2F%2F127.0.0.1%3A{PORT}%2Ffakeprofile.json" + ) + actual_url = re.sub("%3A\d+%2F", "%3A{PORT}%2F", self.firefox_profiler_url) + + self.assertEqual( + actual_url, + expected_url, + "The URL generated was correct for the Firefox Profiler.", + ) + self.assertEqual( + self.response[0], + "FAKE_PROFILE", + "The response from the serve provided the profile contents.", + ) + + shutil.rmtree(temp_dir) + + +if __name__ == "__main__": + mozunit.main() diff --git a/testing/mozbase/mozhttpd/mozhttpd/__init__.py b/testing/mozbase/mozhttpd/mozhttpd/__init__.py new file mode 100644 index 0000000000..65c860f9c5 --- /dev/null +++ b/testing/mozbase/mozhttpd/mozhttpd/__init__.py @@ -0,0 +1,47 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +""" +Mozhttpd is a simple http webserver written in python, designed expressly +for use in automated testing scenarios. It is designed to both serve static +content and provide simple web services. + +The server is based on python standard library modules such as +SimpleHttpServer, urlparse, etc. The ThreadingMixIn is used to +serve each request on a discrete thread. + +Some existing uses of mozhttpd include Peptest_, Eideticker_, and Talos_. + +.. _Peptest: https://github.com/mozilla/peptest/ + +.. _Eideticker: https://github.com/mozilla/eideticker/ + +.. _Talos: http://hg.mozilla.org/build/ + +The following simple example creates a basic HTTP server which serves +content from the current directory, defines a single API endpoint +`/api/resource/` and then serves requests indefinitely: + +:: + + import mozhttpd + + @mozhttpd.handlers.json_response + def resource_get(request, objid): + return (200, { 'id': objid, + 'query': request.query }) + + + httpd = mozhttpd.MozHttpd(port=8080, docroot='.', + urlhandlers = [ { 'method': 'GET', + 'path': '/api/resources/([^/]+)/?', + 'function': resource_get } ]) + print "Serving '%s' at %s:%s" % (httpd.docroot, httpd.host, httpd.port) + httpd.start(block=True) + +""" +from .handlers import json_response +from .mozhttpd import MozHttpd, Request, RequestHandler, main + +__all__ = ["MozHttpd", "Request", "RequestHandler", "main", "json_response"] diff --git a/testing/mozbase/mozhttpd/mozhttpd/handlers.py b/testing/mozbase/mozhttpd/mozhttpd/handlers.py new file mode 100644 index 0000000000..44f657031a --- /dev/null +++ b/testing/mozbase/mozhttpd/mozhttpd/handlers.py @@ -0,0 +1,20 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +import json + + +def json_response(func): + """Translates results of 'func' into a JSON response.""" + + def wrap(*a, **kw): + (code, data) = func(*a, **kw) + json_data = json.dumps(data) + return ( + code, + {"Content-type": "application/json", "Content-Length": len(json_data)}, + json_data, + ) + + return wrap diff --git a/testing/mozbase/mozhttpd/mozhttpd/mozhttpd.py b/testing/mozbase/mozhttpd/mozhttpd/mozhttpd.py new file mode 100755 index 0000000000..dd4e606a55 --- /dev/null +++ b/testing/mozbase/mozhttpd/mozhttpd/mozhttpd.py @@ -0,0 +1,350 @@ +#!/usr/bin/env python + +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +import errno +import logging +import os +import posixpath +import re +import socket +import sys +import threading +import time +import traceback +from argparse import ArgumentDefaultsHelpFormatter, ArgumentParser + +import moznetwork +from six import ensure_binary, iteritems +from six.moves.BaseHTTPServer import HTTPServer +from six.moves.SimpleHTTPServer import SimpleHTTPRequestHandler +from six.moves.socketserver import ThreadingMixIn +from six.moves.urllib.parse import unquote, urlsplit + + +class EasyServer(ThreadingMixIn, HTTPServer): + allow_reuse_address = True + acceptable_errors = (errno.EPIPE, errno.ECONNABORTED) + + def handle_error(self, request, client_address): + error = sys.exc_info()[1] + + if ( + isinstance(error, socket.error) + and isinstance(error.args, tuple) + and error.args[0] in self.acceptable_errors + ) or (isinstance(error, IOError) and error.errno in self.acceptable_errors): + pass # remote hang up before the result is sent + else: + logging.error(error) + # The error can be ambiguous just the short description is logged, so we + # dump a stack trace to discover its origin. + traceback.print_exc() + + +class Request(object): + """Details of a request.""" + + # attributes from urlsplit that this class also sets + uri_attrs = ("scheme", "netloc", "path", "query", "fragment") + + def __init__(self, uri, headers, rfile=None): + self.uri = uri + self.headers = headers + parsed = urlsplit(uri) + for i, attr in enumerate(self.uri_attrs): + setattr(self, attr, parsed[i]) + try: + body_len = int(self.headers.get("Content-length", 0)) + except ValueError: + body_len = 0 + if body_len and rfile: + self.body = rfile.read(body_len) + else: + self.body = None + + +class RequestHandler(SimpleHTTPRequestHandler): + docroot = os.getcwd() # current working directory at time of import + proxy_host_dirs = False + request_log = [] + log_requests = False + request = None + + def __init__(self, *args, **kwargs): + SimpleHTTPRequestHandler.__init__(self, *args, **kwargs) + self.extensions_map[".svg"] = "image/svg+xml" + + def _try_handler(self, method): + if self.log_requests: + self.request_log.append( + {"method": method, "path": self.request.path, "time": time.time()} + ) + + handlers = [ + handler for handler in self.urlhandlers if handler["method"] == method + ] + for handler in handlers: + m = re.match(handler["path"], self.request.path) + if m: + (response_code, headerdict, data) = handler["function"]( + self.request, *m.groups() + ) + self.send_response(response_code) + for keyword, value in iteritems(headerdict): + self.send_header(keyword, value) + self.end_headers() + self.wfile.write(ensure_binary(data)) + + return True + + return False + + def _find_path(self): + """Find the on-disk path to serve this request from, + using self.path_mappings and self.docroot. + Return (url_path, disk_path).""" + path_components = list(filter(None, self.request.path.split("/"))) + for prefix, disk_path in iteritems(self.path_mappings): + prefix_components = list(filter(None, prefix.split("/"))) + if len(path_components) < len(prefix_components): + continue + if path_components[: len(prefix_components)] == prefix_components: + return ("/".join(path_components[len(prefix_components) :]), disk_path) + if self.docroot: + return self.request.path, self.docroot + return None + + def parse_request(self): + retval = SimpleHTTPRequestHandler.parse_request(self) + self.request = Request(self.path, self.headers, self.rfile) + return retval + + def do_GET(self): + if not self._try_handler("GET"): + res = self._find_path() + if res: + self.path, self.disk_root = res + # don't include query string and fragment, and prepend + # host directory if required. + if self.request.netloc and self.proxy_host_dirs: + self.path = "/" + self.request.netloc + self.path + SimpleHTTPRequestHandler.do_GET(self) + else: + self.send_response(404) + self.end_headers() + self.wfile.write(b"") + + def do_POST(self): + # if we don't have a match, we always fall through to 404 (this may + # not be "technically" correct if we have a local file at the same + # path as the resource but... meh) + if not self._try_handler("POST"): + self.send_response(404) + self.end_headers() + self.wfile.write(b"") + + def do_DEL(self): + # if we don't have a match, we always fall through to 404 (this may + # not be "technically" correct if we have a local file at the same + # path as the resource but... meh) + if not self._try_handler("DEL"): + self.send_response(404) + self.end_headers() + self.wfile.write(b"") + + def translate_path(self, path): + # this is taken from SimpleHTTPRequestHandler.translate_path(), + # except we serve from self.docroot instead of os.getcwd(), and + # parse_request()/do_GET() have already stripped the query string and + # fragment and mangled the path for proxying, if required. + path = posixpath.normpath(unquote(self.path)) + words = path.split("/") + words = list(filter(None, words)) + path = self.disk_root + for word in words: + drive, word = os.path.splitdrive(word) + head, word = os.path.split(word) + if word in (os.curdir, os.pardir): + continue + path = os.path.join(path, word) + return path + + # I found on my local network that calls to this were timing out + # I believe all of these calls are from log_message + def address_string(self): + return "a.b.c.d" + + # This produces a LOT of noise + def log_message(self, format, *args): + pass + + +class MozHttpd(object): + """ + :param host: Host from which to serve (default 127.0.0.1) + :param port: Port from which to serve (default 8888) + :param docroot: Server root (default os.getcwd()) + :param urlhandlers: Handlers to specify behavior against method and path match (default None) + :param path_mappings: A dict mapping URL prefixes to additional on-disk paths. + :param proxy_host_dirs: Toggle proxy behavior (default False) + :param log_requests: Toggle logging behavior (default False) + + Very basic HTTP server class. Takes a docroot (path on the filesystem) + and a set of urlhandler dictionaries of the form: + + :: + + { + 'method': HTTP method (string): GET, POST, or DEL, + 'path': PATH_INFO (regular expression string), + 'function': function of form fn(arg1, arg2, arg3, ..., request) + } + + and serves HTTP. For each request, MozHttpd will either return a file + off the docroot, or dispatch to a handler function (if both path and + method match). + + Note that one of docroot or urlhandlers may be None (in which case no + local files or handlers, respectively, will be used). If both docroot or + urlhandlers are None then MozHttpd will default to serving just the local + directory. + + MozHttpd also handles proxy requests (i.e. with a full URI on the request + line). By default files are served from docroot according to the request + URI's path component, but if proxy_host_dirs is True, files are served + from //. + + For example, the request "GET http://foo.bar/dir/file.html" would + (assuming no handlers match) serve /dir/file.html if + proxy_host_dirs is False, or /foo.bar/dir/file.html if it is + True. + """ + + def __init__( + self, + host="127.0.0.1", + port=0, + docroot=None, + urlhandlers=None, + path_mappings=None, + proxy_host_dirs=False, + log_requests=False, + ): + self.host = host + self.port = int(port) + self.docroot = docroot + if not (urlhandlers or docroot or path_mappings): + self.docroot = os.getcwd() + self.proxy_host_dirs = proxy_host_dirs + self.httpd = None + self.urlhandlers = urlhandlers or [] + self.path_mappings = path_mappings or {} + self.log_requests = log_requests + self.request_log = [] + + class RequestHandlerInstance(RequestHandler): + docroot = self.docroot + urlhandlers = self.urlhandlers + path_mappings = self.path_mappings + proxy_host_dirs = self.proxy_host_dirs + request_log = self.request_log + log_requests = self.log_requests + + self.handler_class = RequestHandlerInstance + + def start(self, block=False): + """ + Starts the server. + + If `block` is True, the call will not return. If `block` is False, the + server will be started on a separate thread that can be terminated by + a call to stop(). + """ + self.httpd = EasyServer((self.host, self.port), self.handler_class) + if block: + self.httpd.serve_forever() + else: + self.server = threading.Thread(target=self.httpd.serve_forever) + self.server.setDaemon(True) # don't hang on exit + self.server.start() + + def stop(self): + """ + Stops the server. + + If the server is not running, this method has no effect. + """ + if self.httpd: + # FIXME: There is no shutdown() method in Python 2.4... + try: + self.httpd.shutdown() + except AttributeError: + pass + self.httpd = None + + def get_url(self, path="/"): + """ + Returns a URL that can be used for accessing the server (e.g. http://192.168.1.3:4321/) + + :param path: Path to append to URL (e.g. if path were /foobar.html you would get a URL like + http://192.168.1.3:4321/foobar.html). Default is `/`. + """ + if not self.httpd: + return None + + return "http://%s:%s%s" % (self.host, self.httpd.server_port, path) + + __del__ = stop + + +def main(args=sys.argv[1:]): + # parse command line options + parser = ArgumentParser( + description="Basic python webserver.", + formatter_class=ArgumentDefaultsHelpFormatter, + ) + parser.add_argument( + "-p", + "--port", + dest="port", + type=int, + default=8888, + help="port to run the server on", + ) + parser.add_argument( + "-H", "--host", dest="host", default="127.0.0.1", help="host address" + ) + parser.add_argument( + "-i", + "--external-ip", + action="store_true", + dest="external_ip", + default=False, + help="find and use external ip for host", + ) + parser.add_argument( + "-d", + "--docroot", + dest="docroot", + default=os.getcwd(), + help="directory to serve files from", + ) + args = parser.parse_args() + + if args.external_ip: + host = moznetwork.get_lan_ip() + else: + host = args.host + + # create the server + server = MozHttpd(host=host, port=args.port, docroot=args.docroot) + + print("Serving '%s' at %s:%s" % (server.docroot, server.host, server.port)) + server.start(block=True) + + +if __name__ == "__main__": + main() diff --git a/testing/mozbase/mozhttpd/setup.py b/testing/mozbase/mozhttpd/setup.py new file mode 100644 index 0000000000..4d4f689113 --- /dev/null +++ b/testing/mozbase/mozhttpd/setup.py @@ -0,0 +1,34 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +from setuptools import setup + +PACKAGE_VERSION = "0.7.1" +deps = ["moznetwork >= 0.24", "mozinfo >= 1.0.0", "six >= 1.13.0"] + +setup( + name="mozhttpd", + version=PACKAGE_VERSION, + description="Python webserver intended for use with Mozilla testing", + long_description="see https://firefox-source-docs.mozilla.org/mozbase/index.html", + classifiers=[ + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 2 :: Only", + ], + # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers + keywords="mozilla", + author="Mozilla Automation and Testing Team", + author_email="tools@lists.mozilla.org", + url="https://wiki.mozilla.org/Auto-tools/Projects/Mozbase", + license="MPL", + packages=["mozhttpd"], + include_package_data=True, + zip_safe=False, + install_requires=deps, + entry_points=""" + # -*- Entry points: -*- + [console_scripts] + mozhttpd = mozhttpd:main + """, +) diff --git a/testing/mozbase/mozhttpd/tests/api.py b/testing/mozbase/mozhttpd/tests/api.py new file mode 100644 index 0000000000..c2fce58be9 --- /dev/null +++ b/testing/mozbase/mozhttpd/tests/api.py @@ -0,0 +1,381 @@ +#!/usr/bin/env python + +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +import collections +import json +import os + +import mozhttpd +import mozunit +import pytest +from six import ensure_binary, ensure_str +from six.moves.urllib.error import HTTPError +from six.moves.urllib.request import ( + HTTPHandler, + ProxyHandler, + Request, + build_opener, + install_opener, + urlopen, +) + + +def httpd_url(httpd, path, querystr=None): + """Return the URL to a started MozHttpd server for the given info.""" + + url = "http://127.0.0.1:{port}{path}".format( + port=httpd.httpd.server_port, + path=path, + ) + + if querystr is not None: + url = "{url}?{querystr}".format( + url=url, + querystr=querystr, + ) + + return url + + +@pytest.fixture(name="num_requests") +def fixture_num_requests(): + """Return a defaultdict to count requests to HTTP handlers.""" + return collections.defaultdict(int) + + +@pytest.fixture(name="try_get") +def fixture_try_get(num_requests): + """Return a function to try GET requests to the server.""" + + def try_get(httpd, querystr): + """Try GET requests to the server.""" + + num_requests["get_handler"] = 0 + + f = urlopen(httpd_url(httpd, "/api/resource/1", querystr)) + + assert f.getcode() == 200 + assert json.loads(f.read()) == {"called": 1, "id": "1", "query": querystr} + assert num_requests["get_handler"] == 1 + + return try_get + + +@pytest.fixture(name="try_post") +def fixture_try_post(num_requests): + """Return a function to try POST calls to the server.""" + + def try_post(httpd, querystr): + """Try POST calls to the server.""" + + num_requests["post_handler"] = 0 + + postdata = {"hamburgers": "1234"} + + f = urlopen( + httpd_url(httpd, "/api/resource/", querystr), + data=ensure_binary(json.dumps(postdata)), + ) + + assert f.getcode() == 201 + assert json.loads(f.read()) == { + "called": 1, + "data": postdata, + "query": querystr, + } + assert num_requests["post_handler"] == 1 + + return try_post + + +@pytest.fixture(name="try_del") +def fixture_try_del(num_requests): + """Return a function to try DEL calls to the server.""" + + def try_del(httpd, querystr): + """Try DEL calls to the server.""" + + num_requests["del_handler"] = 0 + + opener = build_opener(HTTPHandler) + request = Request(httpd_url(httpd, "/api/resource/1", querystr)) + request.get_method = lambda: "DEL" + f = opener.open(request) + + assert f.getcode() == 200 + assert json.loads(f.read()) == {"called": 1, "id": "1", "query": querystr} + assert num_requests["del_handler"] == 1 + + return try_del + + +@pytest.fixture(name="httpd_no_urlhandlers") +def fixture_httpd_no_urlhandlers(): + """Yields a started MozHttpd server with no URL handlers.""" + httpd = mozhttpd.MozHttpd(port=0) + httpd.start(block=False) + yield httpd + httpd.stop() + + +@pytest.fixture(name="httpd_with_docroot") +def fixture_httpd_with_docroot(num_requests): + """Yields a started MozHttpd server with docroot set.""" + + @mozhttpd.handlers.json_response + def get_handler(request, objid): + """Handler for HTTP GET requests.""" + + num_requests["get_handler"] += 1 + + return ( + 200, + { + "called": num_requests["get_handler"], + "id": objid, + "query": request.query, + }, + ) + + httpd = mozhttpd.MozHttpd( + port=0, + docroot=os.path.dirname(os.path.abspath(__file__)), + urlhandlers=[ + { + "method": "GET", + "path": "/api/resource/([^/]+)/?", + "function": get_handler, + } + ], + ) + + httpd.start(block=False) + yield httpd + httpd.stop() + + +@pytest.fixture(name="httpd") +def fixture_httpd(num_requests): + """Yield a started MozHttpd server.""" + + @mozhttpd.handlers.json_response + def get_handler(request, objid): + """Handler for HTTP GET requests.""" + + num_requests["get_handler"] += 1 + + return ( + 200, + { + "called": num_requests["get_handler"], + "id": objid, + "query": request.query, + }, + ) + + @mozhttpd.handlers.json_response + def post_handler(request): + """Handler for HTTP POST requests.""" + + num_requests["post_handler"] += 1 + + return ( + 201, + { + "called": num_requests["post_handler"], + "data": json.loads(request.body), + "query": request.query, + }, + ) + + @mozhttpd.handlers.json_response + def del_handler(request, objid): + """Handler for HTTP DEL requests.""" + + num_requests["del_handler"] += 1 + + return ( + 200, + { + "called": num_requests["del_handler"], + "id": objid, + "query": request.query, + }, + ) + + httpd = mozhttpd.MozHttpd( + port=0, + urlhandlers=[ + { + "method": "GET", + "path": "/api/resource/([^/]+)/?", + "function": get_handler, + }, + { + "method": "POST", + "path": "/api/resource/?", + "function": post_handler, + }, + { + "method": "DEL", + "path": "/api/resource/([^/]+)/?", + "function": del_handler, + }, + ], + ) + + httpd.start(block=False) + yield httpd + httpd.stop() + + +def test_api(httpd, try_get, try_post, try_del): + # GET requests + try_get(httpd, "") + try_get(httpd, "?foo=bar") + + # POST requests + try_post(httpd, "") + try_post(httpd, "?foo=bar") + + # DEL requests + try_del(httpd, "") + try_del(httpd, "?foo=bar") + + # GET: By default we don't serve any files if we just define an API + with pytest.raises(HTTPError) as exc_info: + urlopen(httpd_url(httpd, "/")) + + assert exc_info.value.code == 404 + + +def test_nonexistent_resources(httpd_no_urlhandlers): + # GET: Return 404 for non-existent endpoint + with pytest.raises(HTTPError) as excinfo: + urlopen(httpd_url(httpd_no_urlhandlers, "/api/resource/")) + assert excinfo.value.code == 404 + + # POST: POST should also return 404 + with pytest.raises(HTTPError) as excinfo: + urlopen( + httpd_url(httpd_no_urlhandlers, "/api/resource/"), + data=ensure_binary(json.dumps({})), + ) + assert excinfo.value.code == 404 + + # DEL: DEL should also return 404 + opener = build_opener(HTTPHandler) + request = Request(httpd_url(httpd_no_urlhandlers, "/api/resource/")) + request.get_method = lambda: "DEL" + + with pytest.raises(HTTPError) as excinfo: + opener.open(request) + assert excinfo.value.code == 404 + + +def test_api_with_docroot(httpd_with_docroot, try_get): + f = urlopen(httpd_url(httpd_with_docroot, "/")) + assert f.getcode() == 200 + assert "Directory listing for" in ensure_str(f.read()) + + # Make sure API methods still work + try_get(httpd_with_docroot, "") + try_get(httpd_with_docroot, "?foo=bar") + + +def index_contents(host): + """Return the expected index contents for the given host.""" + return "{host} index".format(host=host) + + +@pytest.fixture(name="hosts") +def fixture_hosts(): + """Returns a tuple of hosts.""" + return ("mozilla.com", "mozilla.org") + + +@pytest.fixture(name="docroot") +def fixture_docroot(tmpdir): + """Returns a path object to a temporary docroot directory.""" + docroot = tmpdir.mkdir("docroot") + index_file = docroot.join("index.html") + index_file.write(index_contents("*")) + + yield docroot + + docroot.remove() + + +@pytest.fixture(name="httpd_with_proxy_handler") +def fixture_httpd_with_proxy_handler(docroot): + """Yields a started MozHttpd server for the proxy test.""" + + httpd = mozhttpd.MozHttpd(port=0, docroot=str(docroot)) + httpd.start(block=False) + + port = httpd.httpd.server_port + proxy_support = ProxyHandler( + { + "http": "http://127.0.0.1:{port:d}".format(port=port), + } + ) + install_opener(build_opener(proxy_support)) + + yield httpd + + httpd.stop() + + # Reset proxy opener in case it changed + install_opener(None) + + +def test_proxy(httpd_with_proxy_handler, hosts): + for host in hosts: + f = urlopen("http://{host}/".format(host=host)) + assert f.getcode() == 200 + assert f.read() == ensure_binary(index_contents("*")) + + +@pytest.fixture(name="httpd_with_proxy_host_dirs") +def fixture_httpd_with_proxy_host_dirs(docroot, hosts): + for host in hosts: + index_file = docroot.mkdir(host).join("index.html") + index_file.write(index_contents(host)) + + httpd = mozhttpd.MozHttpd(port=0, docroot=str(docroot), proxy_host_dirs=True) + + httpd.start(block=False) + + port = httpd.httpd.server_port + proxy_support = ProxyHandler( + {"http": "http://127.0.0.1:{port:d}".format(port=port)} + ) + install_opener(build_opener(proxy_support)) + + yield httpd + + httpd.stop() + + # Reset proxy opener in case it changed + install_opener(None) + + +def test_proxy_separate_directories(httpd_with_proxy_host_dirs, hosts): + for host in hosts: + f = urlopen("http://{host}/".format(host=host)) + assert f.getcode() == 200 + assert f.read() == ensure_binary(index_contents(host)) + + unproxied_host = "notmozilla.org" + + with pytest.raises(HTTPError) as excinfo: + urlopen("http://{host}/".format(host=unproxied_host)) + + assert excinfo.value.code == 404 + + +if __name__ == "__main__": + mozunit.main() diff --git a/testing/mozbase/mozhttpd/tests/baseurl.py b/testing/mozbase/mozhttpd/tests/baseurl.py new file mode 100644 index 0000000000..4bf923a8d7 --- /dev/null +++ b/testing/mozbase/mozhttpd/tests/baseurl.py @@ -0,0 +1,33 @@ +import mozhttpd +import mozunit +import pytest + + +@pytest.fixture(name="httpd") +def fixture_httpd(): + """Yields a started MozHttpd server.""" + httpd = mozhttpd.MozHttpd(port=0) + httpd.start(block=False) + yield httpd + httpd.stop() + + +def test_base_url(httpd): + port = httpd.httpd.server_port + + want = "http://127.0.0.1:{}/".format(port) + got = httpd.get_url() + assert got == want + + want = "http://127.0.0.1:{}/cheezburgers.html".format(port) + got = httpd.get_url(path="/cheezburgers.html") + assert got == want + + +def test_base_url_when_not_started(): + httpd = mozhttpd.MozHttpd(port=0) + assert httpd.get_url() is None + + +if __name__ == "__main__": + mozunit.main() diff --git a/testing/mozbase/mozhttpd/tests/basic.py b/testing/mozbase/mozhttpd/tests/basic.py new file mode 100644 index 0000000000..a9dcf109e0 --- /dev/null +++ b/testing/mozbase/mozhttpd/tests/basic.py @@ -0,0 +1,50 @@ +#!/usr/bin/env python + +import os + +import mozfile +import mozhttpd +import mozunit +import pytest + + +@pytest.fixture(name="files") +def fixture_files(): + """Return a list of tuples with name and binary_string.""" + return [("small", os.urandom(128)), ("large", os.urandom(16384))] + + +@pytest.fixture(name="docroot") +def fixture_docroot(tmpdir, files): + """Yield a str path to docroot.""" + docroot = tmpdir.mkdir("docroot") + + for name, binary_string in files: + filename = docroot.join(name) + filename.write_binary(binary_string) + + yield str(docroot) + + docroot.remove() + + +@pytest.fixture(name="httpd_url") +def fixture_httpd_url(docroot): + """Yield the URL to a started MozHttpd server.""" + httpd = mozhttpd.MozHttpd(docroot=docroot) + httpd.start() + yield httpd.get_url() + httpd.stop() + + +def test_basic(httpd_url, files): + """Test that mozhttpd can serve files.""" + + # Retrieve file and check contents matchup + for name, binary_string in files: + retrieved_content = mozfile.load(httpd_url + name).read() + assert retrieved_content == binary_string + + +if __name__ == "__main__": + mozunit.main() diff --git a/testing/mozbase/mozhttpd/tests/filelisting.py b/testing/mozbase/mozhttpd/tests/filelisting.py new file mode 100644 index 0000000000..195059a261 --- /dev/null +++ b/testing/mozbase/mozhttpd/tests/filelisting.py @@ -0,0 +1,68 @@ +#!/usr/bin/env python + +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import re + +import mozhttpd +import mozunit +import pytest +from six import ensure_str +from six.moves.urllib.request import urlopen + + +@pytest.fixture(name="docroot") +def fixture_docroot(): + """Returns a docroot path.""" + return os.path.dirname(os.path.abspath(__file__)) + + +@pytest.fixture(name="httpd") +def fixture_httpd(docroot): + """Yields a started MozHttpd server.""" + httpd = mozhttpd.MozHttpd(port=0, docroot=docroot) + httpd.start(block=False) + yield httpd + httpd.stop() + + +@pytest.mark.parametrize( + "path", + [ + pytest.param("", id="no_params"), + pytest.param("?foo=bar&fleem=&foo=fleem", id="with_params"), + ], +) +def test_filelist(httpd, docroot, path): + f = urlopen( + "http://{host}:{port}/{path}".format( + host="127.0.0.1", port=httpd.httpd.server_port, path=path + ) + ) + + filelist = os.listdir(docroot) + + pattern = "\<[a-zA-Z0-9\-\_\.\=\"'\/\\\%\!\@\#\$\^\&\*\(\) :;]*\>" + + for line in f.readlines(): + subbed_lined = re.sub(pattern, "", ensure_str(line).strip("\n")) + webline = subbed_lined.strip("/").strip().strip("@") + + if ( + webline + and not webline.startswith("Directory listing for") + and not webline.startswith("= 22000: + major = 11 + os_version = "%d.%d" % (major, minor) +elif system.startswith(("MINGW", "MSYS_NT")): + # windows/mingw python build (msys) + info["os"] = "win" + os_version = version = unknown +elif system == "Linux": + # Attempt to use distro package to determine Linux distribution first. + # Failing that, fall back to use the platform method. + # Note that platform.linux_distribution() will be deprecated as of 3.8 + # and this block will be removed once support for 2.7/3.5 is dropped. + try: + from distro import linux_distribution + except ImportError: + from platform import linux_distribution + + output = linux_distribution() + (distribution, os_version, codename) = tuple(str(item.title()) for item in output) + + if not processor: + processor = machine + if not distribution: + distribution = "lfs" + if not os_version: + os_version = release + if not codename: + codename = "unknown" + version = "%s %s" % (distribution, os_version) + + if os.environ.get("WAYLAND_DISPLAY"): + info["display"] = "wayland" + elif os.environ.get("DISPLAY"): + info["display"] = "x11" + + info["os"] = "linux" + info["linux_distro"] = distribution +elif system in ["DragonFly", "FreeBSD", "NetBSD", "OpenBSD"]: + info["os"] = "bsd" + version = os_version = sys.platform +elif system == "Darwin": + (release, versioninfo, machine) = platform.mac_ver() + version = "OS X %s" % release + versionNums = release.split(".")[:2] + os_version = "%s.%s" % (versionNums[0], versionNums[1]) + info["os"] = "mac" +elif sys.platform in ("solaris", "sunos5"): + info["os"] = "unix" + os_version = version = sys.platform +else: + os_version = version = unknown + +info["apple_silicon"] = False +if ( + info["os"] == "mac" + and float(os_version) > 10.15 + and processor == "arm" + and bits == "64bit" +): + info["apple_silicon"] = True + +info["apple_catalina"] = False +if info["os"] == "mac" and float(os_version) == 10.15: + info["apple_catalina"] = True + +info["win10_2009"] = False +if info["os"] == "win" and version == "10.0.19045": + info["win10_2009"] = True + +info["win11_2009"] = False +if info["os"] == "win" and version == "10.0.22621": + info["win11_2009"] = True + +info["version"] = version +info["os_version"] = StringVersion(os_version) +info["is_ubuntu"] = "Ubuntu" in version + + +# processor type and bits +if processor in ["i386", "i686"]: + if bits == "32bit": + processor = "x86" + elif bits == "64bit": + processor = "x86_64" +elif processor.upper() == "AMD64": + bits = "64bit" + processor = "x86_64" +elif processor.upper() == "ARM64": + bits = "64bit" + processor = "aarch64" +elif processor == "Power Macintosh": + processor = "ppc" +elif processor == "arm" and bits == "64bit": + processor = "aarch64" + +bits = re.search(r"(\d+)bit", bits).group(1) +info.update( + { + "processor": processor, + "bits": int(bits), + } +) + +# we want to transition to this instead of using `!debug`, etc. +info["arch"] = info["processor"] + + +if info["os"] == "linux": + import ctypes + import errno + + PR_SET_SECCOMP = 22 + SECCOMP_MODE_FILTER = 2 + ctypes.CDLL(find_library("c"), use_errno=True).prctl( + PR_SET_SECCOMP, SECCOMP_MODE_FILTER, 0 + ) + info["has_sandbox"] = ctypes.get_errno() == errno.EFAULT +else: + info["has_sandbox"] = True + +# standard value of choices, for easy inspection +choices = { + "os": ["linux", "bsd", "win", "mac", "unix"], + "bits": [32, 64], + "processor": ["x86", "x86_64", "ppc"], +} + + +def sanitize(info): + """Do some sanitization of input values, primarily + to handle universal Mac builds.""" + if "processor" in info and info["processor"] == "universal-x86-x86_64": + # If we're running on OS X 10.6 or newer, assume 64-bit + if release[:4] >= "10.6": # Note this is a string comparison + info["processor"] = "x86_64" + info["bits"] = 64 + else: + info["processor"] = "x86" + info["bits"] = 32 + + +# method for updating information + + +def update(new_info): + """ + Update the info. + + :param new_info: Either a dict containing the new info or a path/url + to a json file containing the new info. + """ + from six import string_types + + if isinstance(new_info, string_types): + # lazy import + import json + + import mozfile + + f = mozfile.load(new_info) + new_info = json.loads(f.read()) + f.close() + + info.update(new_info) + sanitize(info) + globals().update(info) + + # convenience data for os access + for os_name in choices["os"]: + globals()["is" + os_name.title()] = info["os"] == os_name + # unix is special + if isLinux or isBsd: # noqa + globals()["isUnix"] = True + + +def find_and_update_from_json(*dirs, **kwargs): + """Find a mozinfo.json file, load it, and update global symbol table. + + This method will first check the relevant objdir directory for the + necessary mozinfo.json file, if the current script is being run from a + Mozilla objdir. + + If the objdir directory did not supply the necessary data, this method + will then look for the required mozinfo.json file from the provided + tuple of directories. + + If file is found, the global symbols table is updated via a helper method. + + If no valid files are found, this method no-ops unless the raise_exception + kwargs is provided with explicit boolean value of True. + + :param tuple dirs: Directories in which to look for the file. + :param dict kwargs: optional values: + raise_exception: if True, exceptions are raised. + False by default. + :returns: None: default behavior if mozinfo.json cannot be found. + json_path: string representation of mozinfo.json path. + :raises: IOError: if raise_exception is True and file is not found. + """ + # First, see if we're in an objdir + try: + from mozboot.mozconfig import MozconfigFindException + from mozbuild.base import BuildEnvironmentNotFoundException, MozbuildObject + + build = MozbuildObject.from_environment() + json_path = _os.path.join(build.topobjdir, "mozinfo.json") + if _os.path.isfile(json_path): + update(json_path) + return json_path + except ImportError: + pass + except (BuildEnvironmentNotFoundException, MozconfigFindException): + pass + + for d in dirs: + d = _os.path.abspath(d) + json_path = _os.path.join(d, "mozinfo.json") + if _os.path.isfile(json_path): + update(json_path) + return json_path + + # by default, exceptions are suppressed. Set this to True if otherwise + # desired. + if kwargs.get("raise_exception", False): + raise IOError("mozinfo.json could not be found.") + return None + + +def output_to_file(path): + import json + + with open(path, "w") as f: + f.write(json.dumps(info)) + + +update({}) + +# exports +__all__ = list(info.keys()) +__all__ += ["is" + os_name.title() for os_name in choices["os"]] +__all__ += [ + "info", + "unknown", + "main", + "choices", + "update", + "find_and_update_from_json", + "output_to_file", + "StringVersion", +] + + +def main(args=None): + # parse the command line + from optparse import OptionParser + + parser = OptionParser(description=__doc__) + for key in choices: + parser.add_option( + "--%s" % key, + dest=key, + action="store_true", + default=False, + help="display choices for %s" % key, + ) + options, args = parser.parse_args() + + # args are JSON blobs to override info + if args: + # lazy import + import json + + for arg in args: + if _os.path.exists(arg): + string = open(arg).read() + else: + string = arg + update(json.loads(string)) + + # print out choices if requested + flag = False + for key, value in options.__dict__.items(): + if value is True: + print( + "%s choices: %s" + % (key, " ".join([str(choice) for choice in choices[key]])) + ) + flag = True + if flag: + return + + # otherwise, print out all info + for key, value in info.items(): + print("%s: %s" % (key, value)) + + +if __name__ == "__main__": + main() diff --git a/testing/mozbase/mozinfo/mozinfo/string_version.py b/testing/mozbase/mozinfo/mozinfo/string_version.py new file mode 100644 index 0000000000..fc1c5b46c6 --- /dev/null +++ b/testing/mozbase/mozinfo/mozinfo/string_version.py @@ -0,0 +1,73 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +import re + +import six + + +class StringVersion(six.text_type): + """ + A string version that can be compared with comparison operators. + """ + + # Pick out numeric and non-numeric parts (a match group for each type). + pat = re.compile(r"(\d+)|([^\d.]+)") + + def __init__(self, vstring): + super(StringVersion, self).__init__() + + # We'll use unicode internally. + # This check is mainly for python2 strings (which are bytes). + if isinstance(vstring, bytes): + vstring = vstring.decode("ascii") + + self.vstring = vstring + + # Store parts as strings to ease comparisons. + self.version = [] + parts = self.pat.findall(vstring) + # Pad numeric parts with leading zeros for ordering. + for i, obj in enumerate(parts): + if obj[0]: + self.version.append(obj[0].zfill(8)) + else: + self.version.append(obj[1]) + + def __str__(self): + return self.vstring + + def __repr__(self): + return "StringVersion ('%s')" % str(self) + + def _cmp(self, other): + if not isinstance(other, StringVersion): + other = StringVersion(other) + + if self.version == other.version: + return 0 + if self.version < other.version: + return -1 + if self.version > other.version: + return 1 + + def __hash__(self): + # pylint --py3k: W1641 + return hash(self.version) + + # operator overloads + def __eq__(self, other): + return self._cmp(other) == 0 + + def __lt__(self, other): + return self._cmp(other) < 0 + + def __le__(self, other): + return self._cmp(other) <= 0 + + def __gt__(self, other): + return self._cmp(other) > 0 + + def __ge__(self, other): + return self._cmp(other) >= 0 diff --git a/testing/mozbase/mozinfo/setup.cfg b/testing/mozbase/mozinfo/setup.cfg new file mode 100644 index 0000000000..3c6e79cf31 --- /dev/null +++ b/testing/mozbase/mozinfo/setup.cfg @@ -0,0 +1,2 @@ +[bdist_wheel] +universal=1 diff --git a/testing/mozbase/mozinfo/setup.py b/testing/mozbase/mozinfo/setup.py new file mode 100644 index 0000000000..87db88d1e4 --- /dev/null +++ b/testing/mozbase/mozinfo/setup.py @@ -0,0 +1,41 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +from setuptools import setup + +PACKAGE_VERSION = "1.2.3" + +# dependencies +deps = [ + "distro >= 1.4.0", + "mozfile >= 0.12", +] + +setup( + name="mozinfo", + version=PACKAGE_VERSION, + description="Library to get system information for use in Mozilla testing", + long_description="see https://firefox-source-docs.mozilla.org/mozbase/index.html", + classifiers=[ + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.5", + "Development Status :: 5 - Production/Stable", + ], + # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers + keywords="mozilla", + author="Mozilla Automation and Testing Team", + author_email="tools@lists.mozilla.org", + url="https://wiki.mozilla.org/Auto-tools/Projects/Mozbase", + license="MPL", + packages=["mozinfo"], + include_package_data=True, + zip_safe=False, + install_requires=deps, + entry_points=""" + # -*- Entry points: -*- + [console_scripts] + mozinfo = mozinfo:main + """, +) diff --git a/testing/mozbase/mozinfo/tests/manifest.toml b/testing/mozbase/mozinfo/tests/manifest.toml new file mode 100644 index 0000000000..147e23872e --- /dev/null +++ b/testing/mozbase/mozinfo/tests/manifest.toml @@ -0,0 +1,4 @@ +[DEFAULT] +subsuite = "mozbase" + +["test.py"] diff --git a/testing/mozbase/mozinfo/tests/test.py b/testing/mozbase/mozinfo/tests/test.py new file mode 100644 index 0000000000..f1d971d317 --- /dev/null +++ b/testing/mozbase/mozinfo/tests/test.py @@ -0,0 +1,176 @@ +#!/usr/bin/env python +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +import json +import os +import sys +from importlib import reload +from unittest import mock + +import mozinfo +import mozunit +import pytest + + +@pytest.fixture(autouse=True) +def on_every_test(): + # per-test set up + reload(mozinfo) + + # When running from an objdir mozinfo will use a build generated json file + # instead of the ones created for testing. Prevent that from happening. + # See bug 896038 for details. + sys.modules["mozbuild"] = None + + yield + + # per-test tear down + del sys.modules["mozbuild"] + + +def test_basic(): + """Test that mozinfo has a few attributes.""" + assert mozinfo.os is not None + # should have isFoo == True where os == "foo" + assert getattr(mozinfo, "is" + mozinfo.os[0].upper() + mozinfo.os[1:]) + + +def test_update(): + """Test that mozinfo.update works.""" + mozinfo.update({"foo": 123}) + assert mozinfo.info["foo"] == 123 + + +def test_update_file(tmpdir): + """Test that mozinfo.update can load a JSON file.""" + j = os.path.join(tmpdir, "mozinfo.json") + with open(j, "w") as f: + f.write(json.dumps({"foo": "xyz"})) + mozinfo.update(j) + assert mozinfo.info["foo"] == "xyz" + + +def test_update_file_invalid_json(tmpdir): + """Test that mozinfo.update handles invalid JSON correctly""" + j = os.path.join(tmpdir, "test.json") + with open(j, "w") as f: + f.write('invalid{"json":') + with pytest.raises(ValueError): + mozinfo.update([j]) + + +def test_find_and_update_file(tmpdir): + """Test that mozinfo.find_and_update_from_json can + find mozinfo.json in a directory passed to it.""" + j = os.path.join(tmpdir, "mozinfo.json") + with open(j, "w") as f: + f.write(json.dumps({"foo": "abcdefg"})) + assert mozinfo.find_and_update_from_json(tmpdir) == j + assert mozinfo.info["foo"] == "abcdefg" + + +def test_find_and_update_file_no_argument(): + """Test that mozinfo.find_and_update_from_json no-ops on not being + given any arguments. + """ + assert mozinfo.find_and_update_from_json() is None + + +def test_find_and_update_file_invalid_json(tmpdir): + """Test that mozinfo.find_and_update_from_json can + handle invalid JSON""" + j = os.path.join(tmpdir, "mozinfo.json") + with open(j, "w") as f: + f.write('invalid{"json":') + with pytest.raises(ValueError): + mozinfo.find_and_update_from_json(tmpdir) + + +def test_find_and_update_file_raise_exception(): + """Test that mozinfo.find_and_update_from_json raises + an IOError when exceptions are unsuppressed. + """ + with pytest.raises(IOError): + mozinfo.find_and_update_from_json(raise_exception=True) + + +def test_find_and_update_file_suppress_exception(): + """Test that mozinfo.find_and_update_from_json suppresses + an IOError exception if a False boolean value is + provided as the only argument. + """ + assert mozinfo.find_and_update_from_json(raise_exception=False) is None + + +def test_find_and_update_file_mozbuild(tmpdir): + """Test that mozinfo.find_and_update_from_json can + find mozinfo.json using the mozbuild module.""" + j = os.path.join(tmpdir, "mozinfo.json") + with open(j, "w") as f: + f.write(json.dumps({"foo": "123456"})) + m = mock.MagicMock() + # Mock the value of MozbuildObject.from_environment().topobjdir. + m.MozbuildObject.from_environment.return_value.topobjdir = tmpdir + + mocked_modules = { + "mozbuild": m, + "mozbuild.base": m, + "mozbuild.mozconfig": m, + } + with mock.patch.dict(sys.modules, mocked_modules): + assert mozinfo.find_and_update_from_json() == j + assert mozinfo.info["foo"] == "123456" + + +def test_output_to_file(tmpdir): + """Test that mozinfo.output_to_file works.""" + path = os.path.join(tmpdir, "mozinfo.json") + mozinfo.output_to_file(path) + assert open(path).read() == json.dumps(mozinfo.info) + + +def test_os_version_is_a_StringVersion(): + assert isinstance(mozinfo.os_version, mozinfo.StringVersion) + + +def test_compare_to_string(): + version = mozinfo.StringVersion("10.10") + + assert version > "10.2" + assert "11" > version + assert version >= "10.10" + assert "10.11" >= version + assert version == "10.10" + assert "10.10" == version + assert version != "10.2" + assert "11" != version + assert version < "11.8.5" + assert "10.2" < version + assert version <= "11" + assert "10.10" <= version + + # Can have non-numeric versions (Bug 1654915) + assert version != mozinfo.StringVersion("Testing") + assert mozinfo.StringVersion("Testing") != version + assert mozinfo.StringVersion("") == "" + assert "" == mozinfo.StringVersion("") + + a = mozinfo.StringVersion("1.2.5a") + b = mozinfo.StringVersion("1.2.5b") + assert a < b + assert b > a + + # Make sure we can compare against unicode (for python 2). + assert a == "1.2.5a" + assert "1.2.5a" == a + + +def test_to_string(): + assert "10.10" == str(mozinfo.StringVersion("10.10")) + + +if __name__ == "__main__": + mozunit.main() diff --git a/testing/mozbase/mozinstall/mozinstall/__init__.py b/testing/mozbase/mozinstall/mozinstall/__init__.py new file mode 100644 index 0000000000..09c6d10a3d --- /dev/null +++ b/testing/mozbase/mozinstall/mozinstall/__init__.py @@ -0,0 +1,6 @@ +# flake8: noqa +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +from .mozinstall import * diff --git a/testing/mozbase/mozinstall/mozinstall/mozinstall.py b/testing/mozbase/mozinstall/mozinstall/mozinstall.py new file mode 100644 index 0000000000..7ff5b52d18 --- /dev/null +++ b/testing/mozbase/mozinstall/mozinstall/mozinstall.py @@ -0,0 +1,443 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import plistlib +import shutil +import subprocess +import sys +import tarfile +import tempfile +import time +import zipfile +from optparse import OptionParser + +import mozfile +import mozinfo +import requests +from six import PY3, reraise + +try: + import pefile + + has_pefile = True +except ImportError: + has_pefile = False + + +TIMEOUT_UNINSTALL = 60 + + +class InstallError(Exception): + """Thrown when installation fails. Includes traceback if available.""" + + +class InvalidBinary(Exception): + """Thrown when the binary cannot be found after the installation.""" + + +class InvalidSource(Exception): + """Thrown when the specified source is not a recognized file type. + + Supported types: + Linux: tar.gz, tar.bz2 + Mac: dmg + Windows: zip, exe + + """ + + +class UninstallError(Exception): + """Thrown when uninstallation fails. Includes traceback if available.""" + + +def _readPlist(path): + if PY3: + with open(path, "rb") as fp: + return plistlib.load(fp) + return plistlib.readPlist(path) + + +def get_binary(path, app_name): + """Find the binary in the specified path, and return its path. If binary is + not found throw an InvalidBinary exception. + + :param path: Path within to search for the binary + :param app_name: Application binary without file extension to look for + """ + binary = None + + # On OS X we can get the real binary from the app bundle + if mozinfo.isMac: + plist = "%s/Contents/Info.plist" % path + if not os.path.isfile(plist): + raise InvalidBinary("%s/Contents/Info.plist not found" % path) + + binary = os.path.join( + path, "Contents/MacOS/", _readPlist(plist)["CFBundleExecutable"] + ) + + else: + app_name = app_name.lower() + + if mozinfo.isWin: + app_name = app_name + ".exe" + + for root, dirs, files in os.walk(path): + for filename in files: + # os.access evaluates to False for some reason, so not using it + if filename.lower() == app_name: + binary = os.path.realpath(os.path.join(root, filename)) + break + + if not binary: + # The expected binary has not been found. + raise InvalidBinary('"%s" does not contain a valid binary.' % path) + + return binary + + +def install(src, dest): + """Install a zip, exe, tar.gz, tar.bz2 or dmg file, and return the path of + the installation folder. + + :param src: Path to the install file + :param dest: Path to install to (to ensure we do not overwrite any existent + files the folder should not exist yet) + """ + if not is_installer(src): + msg = "{} is not a valid installer file".format(src) + if "://" in src: + try: + return _install_url(src, dest) + except Exception: + exc, val, tb = sys.exc_info() + error = InvalidSource("{} ({})".format(msg, val)) + reraise(InvalidSource, error, tb) + raise InvalidSource(msg) + + src = os.path.realpath(src) + dest = os.path.realpath(dest) + + did_we_create = False + if not os.path.exists(dest): + did_we_create = True + os.makedirs(dest) + + trbk = None + try: + install_dir = None + if src.lower().endswith(".dmg"): + install_dir = _install_dmg(src, dest) + elif src.lower().endswith(".exe"): + install_dir = _install_exe(src, dest) + elif src.lower().endswith(".msix"): + install_dir = _install_msix(src) + elif zipfile.is_zipfile(src) or tarfile.is_tarfile(src): + install_dir = mozfile.extract(src, dest)[0] + + return install_dir + + except BaseException: + cls, exc, trbk = sys.exc_info() + if did_we_create: + try: + # try to uninstall this properly + uninstall(dest) + except Exception: + # uninstall may fail, let's just try to clean the folder + # in this case + try: + mozfile.remove(dest) + except Exception: + pass + if issubclass(cls, Exception): + error = InstallError('Failed to install "%s (%s)"' % (src, str(exc))) + reraise(InstallError, error, trbk) + # any other kind of exception like KeyboardInterrupt is just re-raised. + reraise(cls, exc, trbk) + + finally: + # trbk won't get GC'ed due to circular reference + # http://docs.python.org/library/sys.html#sys.exc_info + del trbk + + +def is_installer(src): + """Tests if the given file is a valid installer package. + + Supported types: + Linux: tar.gz, tar.bz2 + Mac: dmg + Windows: zip, exe + + On Windows pefile will be used to determine if the executable is the + right type, if it is installed on the system. + + :param src: Path to the install file. + """ + src = os.path.realpath(src) + + if not os.path.isfile(src): + return False + + if mozinfo.isLinux: + return tarfile.is_tarfile(src) + elif mozinfo.isMac: + return src.lower().endswith(".dmg") + elif mozinfo.isWin: + if zipfile.is_zipfile(src): + return True + + if os.access(src, os.X_OK) and src.lower().endswith(".exe"): + if has_pefile: + # try to determine if binary is actually a gecko installer + pe_data = pefile.PE(src) + data = {} + for info in getattr(pe_data, "FileInfo", []): + if info.Key == "StringFileInfo": + for string in info.StringTable: + data.update(string.entries) + return "BuildID" not in data + else: + # pefile not available, just assume a proper binary was passed in + return True + + return False + + +def uninstall(install_folder): + """Uninstalls the application in the specified path. If it has been + installed via an installer on Windows, use the uninstaller first. + + :param install_folder: Path of the installation folder + + """ + # Uninstallation for MSIX applications is totally different than + # any other installs... + if "WindowsApps" in install_folder: + # At the time of writing, the package installation directory is always + # the package full name, so this assumption is valid (for now....). + packageFullName = install_folder.split("WindowsApps\\")[1].split("\\")[0] + cmd = f"powershell.exe Remove-AppxPackage -Package {packageFullName}" + subprocess.check_call(cmd) + return + + install_folder = os.path.realpath(install_folder) + assert os.path.isdir(install_folder), ( + 'installation folder "%s" exists.' % install_folder + ) + + # On Windows we have to use the uninstaller. If it's not available fallback + # to the directory removal code + if mozinfo.isWin: + uninstall_folder = "%s\\uninstall" % install_folder + log_file = "%s\\uninstall.log" % uninstall_folder + + if os.path.isfile(log_file): + trbk = None + try: + cmdArgs = ["%s\\uninstall\\helper.exe" % install_folder, "/S"] + result = subprocess.call(cmdArgs) + if result != 0: + raise Exception("Execution of uninstaller failed.") + + # The uninstaller spawns another process so the subprocess call + # returns immediately. We have to wait until the uninstall + # folder has been removed or until we run into a timeout. + end_time = time.time() + TIMEOUT_UNINSTALL + while os.path.exists(uninstall_folder): + time.sleep(1) + + if time.time() > end_time: + raise Exception("Failure removing uninstall folder.") + + except Exception as ex: + cls, exc, trbk = sys.exc_info() + error = UninstallError( + "Failed to uninstall %s (%s)" % (install_folder, str(ex)) + ) + reraise(UninstallError, error, trbk) + + finally: + # trbk won't get GC'ed due to circular reference + # http://docs.python.org/library/sys.html#sys.exc_info + del trbk + + # Ensure that we remove any trace of the installation. Even the uninstaller + # on Windows leaves files behind we have to explicitely remove. + mozfile.remove(install_folder) + + +def _install_url(url, dest): + """Saves a url to a temporary file, and passes that through to the + install function. + + :param url: Url to the install file + :param dest: Path to install to (to ensure we do not overwrite any existent + files the folder should not exist yet) + """ + r = requests.get(url, stream=True) + name = tempfile.mkstemp()[1] + try: + with open(name, "w+b") as fh: + for chunk in r.iter_content(chunk_size=16 * 1024): + fh.write(chunk) + result = install(name, dest) + finally: + mozfile.remove(name) + return result + + +def _install_dmg(src, dest): + """Extract a dmg file into the destination folder and return the + application folder. + + src -- DMG image which has to be extracted + dest -- the path to extract to + + """ + appDir = None + try: + # According to the Apple doc, the hdiutil output is stable and is based on the tab + # separators + # Therefor, $3 should give us the mounted path + appDir = ( + subprocess.check_output( + 'hdiutil attach -nobrowse -noautoopen "%s"' + "|grep /Volumes/" + "|awk 'BEGIN{FS=\"\t\"} {print $3}'" % str(src), + shell=True, + ) + .strip() + .decode("ascii") + ) + + for appFile in os.listdir(appDir): + if appFile.endswith(".app"): + appName = appFile + break + + mounted_path = os.path.join(appDir, appName) + + dest = os.path.join(dest, appName) + + # copytree() would fail if dest already exists. + if os.path.exists(dest): + raise InstallError('App bundle "%s" already exists.' % dest) + + shutil.copytree(mounted_path, dest, False) + + finally: + if appDir: + subprocess.check_call('hdiutil detach "%s" -quiet' % appDir, shell=True) + + return dest + + +def _install_exe(src, dest): + """Run the MSI installer to silently install the application into the + destination folder. Return the folder path. + + Arguments: + src -- MSI installer to be executed + dest -- the path to install to + + """ + # The installer doesn't automatically create a sub folder. Lets guess the + # best name from the src file name + filename = os.path.basename(src) + dest = os.path.join(dest, filename.split(".")[0]) + + # possibly gets around UAC in vista (still need to run as administrator) + os.environ["__compat_layer"] = "RunAsInvoker" + cmd = '"%s" /extractdir=%s' % (src, os.path.realpath(dest)) + + subprocess.check_call(cmd) + + return dest + + +def _get_msix_install_location(pkg): + with zipfile.ZipFile(pkg) as zf: + # First, we pull the app identity out of the AppxManifest... + with zf.open("AppxManifest.xml") as am: + for line in am.readlines(): + line = line.decode("utf-8") + if "= 0.7", + "mozfile >= 1.0", + "requests", + "six >= 1.13.0", +] + +setup( + name="mozInstall", + version=PACKAGE_VERSION, + description="package for installing and uninstalling Mozilla applications", + long_description="see https://firefox-source-docs.mozilla.org/mozbase/index.html", + # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers + classifiers=[ + "Environment :: Console", + "Intended Audience :: Developers", + "License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)", + "Natural Language :: English", + "Operating System :: OS Independent", + "Topic :: Software Development :: Libraries :: Python Modules", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3", + ], + keywords="mozilla", + author="Mozilla Automation and Tools team", + author_email="tools@lists.mozilla.org", + url="https://wiki.mozilla.org/Auto-tools/Projects/Mozbase", + license="MPL 2.0", + packages=["mozinstall"], + include_package_data=True, + zip_safe=False, + install_requires=deps, + # we have to generate two more executables for those systems that cannot run as Administrator + # and the filename containing "install" triggers the UAC + entry_points=""" + # -*- Entry points: -*- + [console_scripts] + mozinstall = mozinstall:install_cli + mozuninstall = mozinstall:uninstall_cli + moz_add_to_system = mozinstall:install_cli + moz_remove_from_system = mozinstall:uninstall_cli + """, +) diff --git a/testing/mozbase/mozinstall/tests/conftest.py b/testing/mozbase/mozinstall/tests/conftest.py new file mode 100644 index 0000000000..132547a96b --- /dev/null +++ b/testing/mozbase/mozinstall/tests/conftest.py @@ -0,0 +1,14 @@ +import pytest + + +@pytest.fixture +def get_installer(request): + def _get_installer(extension): + """Get path to the installer for the specified extension.""" + stub_dir = request.node.fspath.dirpath("installer_stubs") + + # We had to remove firefox.exe since it is not valid for mozinstall 1.12 and higher + # Bug 1157352 - We should grab a firefox.exe from the build process or download it + return stub_dir.join("firefox.{}".format(extension)).strpath + + return _get_installer diff --git a/testing/mozbase/mozinstall/tests/installer_stubs/firefox.dmg b/testing/mozbase/mozinstall/tests/installer_stubs/firefox.dmg new file mode 100644 index 0000000000..dd9c779dfa Binary files /dev/null and b/testing/mozbase/mozinstall/tests/installer_stubs/firefox.dmg differ diff --git a/testing/mozbase/mozinstall/tests/installer_stubs/firefox.tar.bz2 b/testing/mozbase/mozinstall/tests/installer_stubs/firefox.tar.bz2 new file mode 100644 index 0000000000..cb046a0e7f Binary files /dev/null and b/testing/mozbase/mozinstall/tests/installer_stubs/firefox.tar.bz2 differ diff --git a/testing/mozbase/mozinstall/tests/installer_stubs/firefox.zip b/testing/mozbase/mozinstall/tests/installer_stubs/firefox.zip new file mode 100644 index 0000000000..7c3f61a5e9 Binary files /dev/null and b/testing/mozbase/mozinstall/tests/installer_stubs/firefox.zip differ diff --git a/testing/mozbase/mozinstall/tests/manifest.toml b/testing/mozbase/mozinstall/tests/manifest.toml new file mode 100644 index 0000000000..43c0d29fdf --- /dev/null +++ b/testing/mozbase/mozinstall/tests/manifest.toml @@ -0,0 +1,12 @@ +[DEFAULT] +subsuite = "mozbase" + +["test_binary.py"] +skip-if = ["os == 'mac'"] + +["test_install.py"] +skip-if = ["os == 'mac'"] # intermittent + +["test_is_installer.py"] + +["test_uninstall.py"] diff --git a/testing/mozbase/mozinstall/tests/test_binary.py b/testing/mozbase/mozinstall/tests/test_binary.py new file mode 100644 index 0000000000..6454c78ef5 --- /dev/null +++ b/testing/mozbase/mozinstall/tests/test_binary.py @@ -0,0 +1,50 @@ +import os + +import mozinfo +import mozinstall +import mozunit +import pytest + + +@pytest.mark.skipif( + mozinfo.isWin, + reason="Bug 1157352 - New firefox.exe needed for mozinstall 1.12 and higher.", +) +def test_get_binary(tmpdir, get_installer): + """Test to retrieve binary from install path.""" + if mozinfo.isLinux: + installdir = mozinstall.install(get_installer("tar.bz2"), tmpdir.strpath) + binary = os.path.join(installdir, "firefox") + + assert mozinstall.get_binary(installdir, "firefox") == binary + + elif mozinfo.isWin: + installdir_exe = mozinstall.install( + get_installer("exe"), tmpdir.join("exe").strpath + ) + binary_exe = os.path.join(installdir_exe, "core", "firefox.exe") + + assert mozinstall.get_binary(installdir_exe, "firefox") == binary_exe + + installdir_zip = mozinstall.install( + get_installer("zip"), tmpdir.join("zip").strpath + ) + binary_zip = os.path.join(installdir_zip, "firefox.exe") + + assert mozinstall.get_binary(installdir_zip, "firefox") == binary_zip + + elif mozinfo.isMac: + installdir = mozinstall.install(get_installer("dmg"), tmpdir.strpath) + binary = os.path.join(installdir, "Contents", "MacOS", "firefox") + + assert mozinstall.get_binary(installdir, "firefox") == binary + + +def test_get_binary_error(tmpdir): + """Test that an InvalidBinary error is raised.""" + with pytest.raises(mozinstall.InvalidBinary): + mozinstall.get_binary(tmpdir.strpath, "firefox") + + +if __name__ == "__main__": + mozunit.main() diff --git a/testing/mozbase/mozinstall/tests/test_install.py b/testing/mozbase/mozinstall/tests/test_install.py new file mode 100644 index 0000000000..2dceb2cc78 --- /dev/null +++ b/testing/mozbase/mozinstall/tests/test_install.py @@ -0,0 +1,90 @@ +import subprocess + +import mozinfo +import mozinstall +import mozunit +import pytest + + +@pytest.mark.skipif( + mozinfo.isWin, + reason="Bug 1157352 - New firefox.exe needed for mozinstall 1.12 and higher.", +) +def test_is_installer(request, get_installer): + """Test that we can identify a correct installer.""" + if mozinfo.isLinux: + assert mozinstall.is_installer(get_installer("tar.bz2")) + + if mozinfo.isWin: + # test zip installer + assert mozinstall.is_installer(get_installer("zip")) + + # test exe installer + assert mozinstall.is_installer(get_installer("exe")) + + try: + # test stub browser file + # without pefile on the system this test will fail + import pefile # noqa + + stub_exe = ( + request.node.fspath.dirpath("build_stub").join("firefox.exe").strpath + ) + assert not mozinstall.is_installer(stub_exe) + except ImportError: + pass + + if mozinfo.isMac: + assert mozinstall.is_installer(get_installer("dmg")) + + +def test_invalid_source_error(get_installer): + """Test that InvalidSource error is raised with an incorrect installer.""" + if mozinfo.isLinux: + with pytest.raises(mozinstall.InvalidSource): + mozinstall.install(get_installer("dmg"), "firefox") + + elif mozinfo.isWin: + with pytest.raises(mozinstall.InvalidSource): + mozinstall.install(get_installer("tar.bz2"), "firefox") + + elif mozinfo.isMac: + with pytest.raises(mozinstall.InvalidSource): + mozinstall.install(get_installer("tar.bz2"), "firefox") + + # Test an invalid url handler + with pytest.raises(mozinstall.InvalidSource): + mozinstall.install("file://foo.bar", "firefox") + + +@pytest.mark.skipif( + mozinfo.isWin, + reason="Bug 1157352 - New firefox.exe needed for mozinstall 1.12 and higher.", +) +def test_install(tmpdir, get_installer): + """Test to install an installer.""" + if mozinfo.isLinux: + installdir = mozinstall.install(get_installer("tar.bz2"), tmpdir.strpath) + assert installdir == tmpdir.join("firefox").strpath + + elif mozinfo.isWin: + installdir_exe = mozinstall.install( + get_installer("exe"), tmpdir.join("exe").strpath + ) + assert installdir_exe == tmpdir.join("exe", "firefox").strpath + + installdir_zip = mozinstall.install( + get_installer("zip"), tmpdir.join("zip").strpath + ) + assert installdir_zip == tmpdir.join("zip", "firefox").strpath + + elif mozinfo.isMac: + installdir = mozinstall.install(get_installer("dmg"), tmpdir.strpath) + assert installdir == tmpdir.realpath().join("Firefox Stub.app").strpath + + mounted_images = subprocess.check_output(["hdiutil", "info"]).decode("ascii") + assert get_installer("dmg") not in mounted_images + + +if __name__ == "__main__": + mozunit.main() diff --git a/testing/mozbase/mozinstall/tests/test_is_installer.py b/testing/mozbase/mozinstall/tests/test_is_installer.py new file mode 100644 index 0000000000..057c29f968 --- /dev/null +++ b/testing/mozbase/mozinstall/tests/test_is_installer.py @@ -0,0 +1,40 @@ +import mozinfo +import mozinstall +import mozunit +import pytest + + +@pytest.mark.skipif( + mozinfo.isWin, + reason="Bug 1157352 - New firefox.exe needed for mozinstall 1.12 and higher.", +) +def test_is_installer(request, get_installer): + """Test that we can identify a correct installer.""" + if mozinfo.isLinux: + assert mozinstall.is_installer(get_installer("tar.bz2")) + + if mozinfo.isWin: + # test zip installer + assert mozinstall.is_installer(get_installer("zip")) + + # test exe installer + assert mozinstall.is_installer(get_installer("exe")) + + try: + # test stub browser file + # without pefile on the system this test will fail + import pefile # noqa + + stub_exe = ( + request.node.fspath.dirpath("build_stub").join("firefox.exe").strpath + ) + assert not mozinstall.is_installer(stub_exe) + except ImportError: + pass + + if mozinfo.isMac: + assert mozinstall.is_installer(get_installer("dmg")) + + +if __name__ == "__main__": + mozunit.main() diff --git a/testing/mozbase/mozinstall/tests/test_uninstall.py b/testing/mozbase/mozinstall/tests/test_uninstall.py new file mode 100644 index 0000000000..45298a834d --- /dev/null +++ b/testing/mozbase/mozinstall/tests/test_uninstall.py @@ -0,0 +1,39 @@ +import mozinfo +import mozinstall +import mozunit +import py +import pytest + + +@pytest.mark.skipif( + mozinfo.isWin, + reason="Bug 1157352 - New firefox.exe needed for mozinstall 1.12 and higher.", +) +def test_uninstall(tmpdir, get_installer): + """Test to uninstall an installed binary.""" + if mozinfo.isLinux: + installdir = mozinstall.install(get_installer("tar.bz2"), tmpdir.strpath) + mozinstall.uninstall(installdir) + assert not py.path.local(installdir).check() + + elif mozinfo.isWin: + installdir_exe = mozinstall.install( + get_installer("exe"), tmpdir.join("exe").strpath + ) + mozinstall.uninstall(installdir_exe) + assert not py.path.local(installdir).check() + + installdir_zip = mozinstall.install( + get_installer("zip"), tmpdir.join("zip").strpath + ) + mozinstall.uninstall(installdir_zip) + assert not py.path.local(installdir).check() + + elif mozinfo.isMac: + installdir = mozinstall.install(get_installer("dmg"), tmpdir.strpath) + mozinstall.uninstall(installdir) + assert not py.path.local(installdir).check() + + +if __name__ == "__main__": + mozunit.main() diff --git a/testing/mozbase/mozleak/mozleak/__init__.py b/testing/mozbase/mozleak/mozleak/__init__.py new file mode 100644 index 0000000000..206806da0c --- /dev/null +++ b/testing/mozbase/mozleak/mozleak/__init__.py @@ -0,0 +1,12 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +""" +mozleak is a library for extracting memory leaks from leak logs files. +""" + +from .leaklog import process_leak_log +from .lsan import LSANLeaks + +__all__ = ["process_leak_log", "LSANLeaks"] diff --git a/testing/mozbase/mozleak/mozleak/leaklog.py b/testing/mozbase/mozleak/mozleak/leaklog.py new file mode 100644 index 0000000000..8a3ee5aee3 --- /dev/null +++ b/testing/mozbase/mozleak/mozleak/leaklog.py @@ -0,0 +1,255 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import re + +from geckoprocesstypes import process_types + + +def _get_default_logger(): + from mozlog import get_default_logger + + log = get_default_logger(component="mozleak") + + if not log: + import logging + + log = logging.getLogger(__name__) + return log + + +def process_single_leak_file( + leakLogFileName, + processType, + leakThreshold, + ignoreMissingLeaks, + log=None, + stackFixer=None, + scope=None, + allowed=None, +): + """Process a single leak log.""" + + # | |Per-Inst Leaked| Total Rem| + # 0 |TOTAL | 17 192| 419115886 2| + # 833 |nsTimerImpl | 60 120| 24726 2| + # 930 |Foo | 32 8| 100 1| + lineRe = re.compile( + r"^\s*\d+ \|" + r"(?P[^|]+)\|" + r"\s*(?P-?\d+)\s+(?P-?\d+)\s*\|" + r"\s*-?\d+\s+(?P-?\d+)" + ) + # The class name can contain spaces. We remove trailing whitespace later. + + log = log or _get_default_logger() + + if allowed is None: + allowed = {} + + processString = "%s process:" % processType + crashedOnPurpose = False + totalBytesLeaked = None + leakedObjectAnalysis = [] + leakedObjectNames = [] + recordLeakedObjects = False + header = [] + log.info("leakcheck | Processing leak log file %s" % leakLogFileName) + + with open(leakLogFileName, "r") as leaks: + for line in leaks: + if line.find("purposefully crash") > -1: + crashedOnPurpose = True + matches = lineRe.match(line) + if not matches: + # eg: the leak table header row + strippedLine = line.rstrip() + logLine = stackFixer(strippedLine) if stackFixer else strippedLine + if recordLeakedObjects: + log.info(logLine) + else: + header.append(logLine) + continue + name = matches.group("name").rstrip() + size = int(matches.group("size")) + bytesLeaked = int(matches.group("bytesLeaked")) + numLeaked = int(matches.group("numLeaked")) + # Output the raw line from the leak log table if it is for an object + # row that has been leaked. + if numLeaked != 0: + # If this is the TOTAL line, first output the header lines. + if name == "TOTAL": + for logLine in header: + log.info(logLine) + log.info(line.rstrip()) + # If this is the TOTAL line, we're done with the header lines, + # whether or not it leaked. + if name == "TOTAL": + header = [] + # Analyse the leak log, but output later or it will interrupt the + # leak table + if name == "TOTAL": + # Multiple default processes can end up writing their bloat views into a single + # log, particularly on B2G. Eventually, these should be split into multiple + # logs (bug 1068869), but for now, we report the largest leak. + if totalBytesLeaked is not None: + log.warning( + "leakcheck | %s " + "multiple BloatView byte totals found" % processString + ) + else: + totalBytesLeaked = 0 + if bytesLeaked > totalBytesLeaked: + totalBytesLeaked = bytesLeaked + # Throw out the information we had about the previous bloat + # view. + leakedObjectNames = [] + leakedObjectAnalysis = [] + recordLeakedObjects = True + else: + recordLeakedObjects = False + if (size < 0 or bytesLeaked < 0 or numLeaked < 0) and leakThreshold >= 0: + log.error( + "TEST-UNEXPECTED-FAIL | leakcheck | %s negative leaks caught!" + % processString + ) + continue + if name != "TOTAL" and numLeaked != 0 and recordLeakedObjects: + leakedObjectNames.append(name) + leakedObjectAnalysis.append((numLeaked, name)) + + for numLeaked, name in leakedObjectAnalysis: + leak_allowed = False + if name in allowed: + limit = leak_allowed[name] + leak_allowed = limit is None or numLeaked <= limit + + log.mozleak_object( + processType, numLeaked, name, scope=scope, allowed=leak_allowed + ) + + log.mozleak_total( + processType, + totalBytesLeaked, + leakThreshold, + leakedObjectNames, + scope=scope, + induced_crash=crashedOnPurpose, + ignore_missing=ignoreMissingLeaks, + ) + + +def process_leak_log( + leak_log_file, + leak_thresholds=None, + ignore_missing_leaks=None, + log=None, + stack_fixer=None, + scope=None, + allowed=None, +): + """Process the leak log, including separate leak logs created + by child processes. + + Use this function if you want an additional PASS/FAIL summary. + It must be used with the |XPCOM_MEM_BLOAT_LOG| environment variable. + + The base of leak_log_file for a non-default process needs to end with + _proctype_pid12345.log + "proctype" is a string denoting the type of the process, which should + be the result of calling XRE_GeckoProcessTypeToString(). 12345 is + a series of digits that is the pid for the process. The .log is + optional. + + All other file names are treated as being for default processes. + + leak_thresholds should be a dict mapping process types to leak thresholds, + in bytes. If a process type is not present in the dict the threshold + will be 0. If the threshold is a negative number we additionally ignore + the case where there's negative leaks. + + allowed - A dictionary mapping process types to dictionaries containing + the number of objects of that type which are allowed to leak. + + scope - An identifier for the set of tests run during the browser session + (e.g. a directory name) + + ignore_missing_leaks should be a list of process types. If a process + creates a leak log without a TOTAL, then we report an error if it isn't + in the list ignore_missing_leaks. + + Returns a list of files that were processed. The caller is responsible for + cleaning these up. + """ + log = log or _get_default_logger() + + processed_files = [] + + leakLogFile = leak_log_file + if not os.path.exists(leakLogFile): + log.warning("leakcheck | refcount logging is off, so leaks can't be detected!") + return processed_files + + log.info( + "leakcheck | Processing log file %s%s" + % (leakLogFile, (" for scope %s" % scope) if scope is not None else "") + ) + + leakThresholds = leak_thresholds or {} + ignoreMissingLeaks = ignore_missing_leaks or [] + + # This list is based on XRE_GeckoProcessTypeToString. ipdlunittest processes likely + # are not going to produce leak logs we will ever see. + + knownProcessTypes = [ + p.string_name for p in process_types if p.string_name != "ipdlunittest" + ] + + for processType in knownProcessTypes: + log.info( + "TEST-INFO | leakcheck | %s process: leak threshold set at %d bytes" + % (processType, leakThresholds.get(processType, 0)) + ) + + for processType in leakThresholds: + if processType not in knownProcessTypes: + log.error( + "TEST-UNEXPECTED-FAIL | leakcheck | " + "Unknown process type %s in leakThresholds" % processType + ) + + (leakLogFileDir, leakFileBase) = os.path.split(leakLogFile) + if leakFileBase[-4:] == ".log": + leakFileBase = leakFileBase[:-4] + fileNameRegExp = re.compile(r"_([a-z]*)_pid\d*.log$") + else: + fileNameRegExp = re.compile(r"_([a-z]*)_pid\d*$") + + for fileName in os.listdir(leakLogFileDir): + if fileName.find(leakFileBase) != -1: + thisFile = os.path.join(leakLogFileDir, fileName) + m = fileNameRegExp.search(fileName) + if m: + processType = m.group(1) + else: + processType = "default" + if processType not in knownProcessTypes: + log.error( + "TEST-UNEXPECTED-FAIL | leakcheck | " + "Leak log with unknown process type %s" % processType + ) + leakThreshold = leakThresholds.get(processType, 0) + process_single_leak_file( + thisFile, + processType, + leakThreshold, + processType in ignoreMissingLeaks, + log=log, + stackFixer=stack_fixer, + scope=scope, + allowed=allowed, + ) + processed_files.append(thisFile) + return processed_files diff --git a/testing/mozbase/mozleak/mozleak/lsan.py b/testing/mozbase/mozleak/mozleak/lsan.py new file mode 100644 index 0000000000..f6555eff2d --- /dev/null +++ b/testing/mozbase/mozleak/mozleak/lsan.py @@ -0,0 +1,220 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import re + + +class LSANLeaks(object): + + """ + Parses the log when running an LSAN build, looking for interesting stack frames + in allocation stacks + """ + + def __init__( + self, + logger, + scope=None, + allowed=None, + maxNumRecordedFrames=None, + allowAll=False, + ): + self.logger = logger + self.inReport = False + self.fatalError = False + self.symbolizerError = False + self.foundFrames = set() + self.recordMoreFrames = None + self.currStack = None + self.maxNumRecordedFrames = maxNumRecordedFrames if maxNumRecordedFrames else 4 + self.summaryData = None + self.scope = scope + self.allowedMatch = None + self.allowAll = allowAll + self.sawError = False + + # Don't various allocation-related stack frames, as they do not help much to + # distinguish different leaks. + unescapedSkipList = [ + "malloc", + "js_malloc", + "malloc_", + "__interceptor_malloc", + "moz_xmalloc", + "calloc", + "js_calloc", + "calloc_", + "__interceptor_calloc", + "moz_xcalloc", + "realloc", + "js_realloc", + "realloc_", + "__interceptor_realloc", + "moz_xrealloc", + "new", + "js::MallocProvider", + ] + self.skipListRegExp = re.compile( + "^" + "|".join([re.escape(f) for f in unescapedSkipList]) + "$" + ) + + self.startRegExp = re.compile( + "==\d+==ERROR: LeakSanitizer: detected memory leaks" + ) + self.fatalErrorRegExp = re.compile( + "==\d+==LeakSanitizer has encountered a fatal error." + ) + self.symbolizerOomRegExp = re.compile( + "LLVMSymbolizer: error reading file: Cannot allocate memory" + ) + self.stackFrameRegExp = re.compile(" #\d+ 0x[0-9a-f]+ in ([^(= self.maxNumRecordedFrames: + self.recordMoreFrames = False + + def _cleanFrame(self, frame): + # Rust frames aren't properly demangled and in particular can contain + # some trailing junk of the form ::h[a-f0-9]{16} that changes with + # compiler versions; see bug 1507350. + return self.rustRegexp.sub("", frame) diff --git a/testing/mozbase/mozleak/setup.cfg b/testing/mozbase/mozleak/setup.cfg new file mode 100644 index 0000000000..3c6e79cf31 --- /dev/null +++ b/testing/mozbase/mozleak/setup.cfg @@ -0,0 +1,2 @@ +[bdist_wheel] +universal=1 diff --git a/testing/mozbase/mozleak/setup.py b/testing/mozbase/mozleak/setup.py new file mode 100644 index 0000000000..0c1ecb74a2 --- /dev/null +++ b/testing/mozbase/mozleak/setup.py @@ -0,0 +1,29 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +from setuptools import setup + +PACKAGE_NAME = "mozleak" +PACKAGE_VERSION = "1.0.0" + + +setup( + name=PACKAGE_NAME, + version=PACKAGE_VERSION, + description="Library for extracting memory leaks from leak logs files", + long_description="see https://firefox-source-docs.mozilla.org/mozbase/index.html", + classifiers=[ + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3.5", + ], + # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers + keywords="mozilla", + author="Mozilla Automation and Tools team", + author_email="tools@lists.mozilla.org", + url="https://wiki.mozilla.org/Auto-tools/Projects/Mozbase", + license="MPL", + packages=["mozleak"], + zip_safe=False, + install_requires=[], +) diff --git a/testing/mozbase/mozleak/tests/manifest.toml b/testing/mozbase/mozleak/tests/manifest.toml new file mode 100644 index 0000000000..133b0581e6 --- /dev/null +++ b/testing/mozbase/mozleak/tests/manifest.toml @@ -0,0 +1,4 @@ +[DEFAULT] +subsuite = "mozbase" + +["test_lsan.py"] diff --git a/testing/mozbase/mozleak/tests/test_lsan.py b/testing/mozbase/mozleak/tests/test_lsan.py new file mode 100644 index 0000000000..6a55a555b7 --- /dev/null +++ b/testing/mozbase/mozleak/tests/test_lsan.py @@ -0,0 +1,30 @@ +import mozunit +import pytest +from mozleak import lsan + + +@pytest.mark.parametrize( + ("input_", "expected"), + [ + ( + "alloc_system::platform::_$LT$impl$u20$core..alloc.." + "GlobalAlloc$u20$for$u20$alloc_system..System$GT$::" + "alloc::h5a1f0db41e296502", + "alloc_system::platform::_$LT$impl$u20$core..alloc.." + "GlobalAlloc$u20$for$u20$alloc_system..System$GT$::alloc", + ), + ( + "alloc_system::platform::_$LT$impl$u20$core..alloc.." + "GlobalAlloc$u20$for$u20$alloc_system..System$GT$::alloc", + "alloc_system::platform::_$LT$impl$u20$core..alloc.." + "GlobalAlloc$u20$for$u20$alloc_system..System$GT$::alloc", + ), + ], +) +def test_clean(input_, expected): + leaks = lsan.LSANLeaks(None) + assert leaks._cleanFrame(input_) == expected + + +if __name__ == "__main__": + mozunit.main() diff --git a/testing/mozbase/mozlog/mozlog/__init__.py b/testing/mozbase/mozlog/mozlog/__init__.py new file mode 100644 index 0000000000..82d40b5c55 --- /dev/null +++ b/testing/mozbase/mozlog/mozlog/__init__.py @@ -0,0 +1,34 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +""" +Mozlog aims to standardize log handling and formatting within Mozilla. + +It implements a JSON-based structured logging protocol with convenience +facilities for recording test results. + +The old unstructured module is deprecated. It simply wraps Python's +logging_ module and adds a few convenience methods for logging test +results and events. +""" + +import sys + +from . import commandline, structuredlog, unstructured +from .proxy import get_proxy_logger +from .structuredlog import get_default_logger, set_default_logger + +# Backwards compatibility shim for consumers that use mozlog.structured +structured = sys.modules[__name__] +sys.modules["{}.structured".format(__name__)] = structured + +__all__ = [ + "commandline", + "structuredlog", + "unstructured", + "get_default_logger", + "set_default_logger", + "get_proxy_logger", + "structured", +] diff --git a/testing/mozbase/mozlog/mozlog/capture.py b/testing/mozbase/mozlog/mozlog/capture.py new file mode 100644 index 0000000000..75717d62c8 --- /dev/null +++ b/testing/mozbase/mozlog/mozlog/capture.py @@ -0,0 +1,96 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import sys +import threading +from io import BytesIO + + +class LogThread(threading.Thread): + def __init__(self, queue, logger, level): + self.queue = queue + self.log_func = getattr(logger, level) + threading.Thread.__init__(self, name="Thread-Log") + self.daemon = True + + def run(self): + while True: + try: + msg = self.queue.get() + except (EOFError, IOError): + break + if msg is None: + break + else: + self.log_func(msg) + + +class LoggingWrapper(BytesIO): + """Wrapper for file like objects to redirect output to logger + instead""" + + def __init__(self, queue, prefix=None): + BytesIO.__init__(self) + self.queue = queue + self.prefix = prefix + self.buffer = self + + def write(self, data): + if isinstance(data, bytes): + try: + data = data.decode("utf8") + except UnicodeDecodeError: + data = data.decode("unicode_escape") + + if data.endswith("\n"): + data = data[:-1] + if data.endswith("\r"): + data = data[:-1] + if not data: + return + if self.prefix is not None: + data = "%s: %s" % (self.prefix, data) + self.queue.put(data) + + def flush(self): + pass + + +class CaptureIO(object): + def __init__(self, logger, do_capture, mp_context=None): + if mp_context is None: + import multiprocessing as mp_context + self.logger = logger + self.do_capture = do_capture + self.logging_queue = None + self.logging_thread = None + self.original_stdio = None + self.mp_context = mp_context + + def __enter__(self): + if self.do_capture: + self.original_stdio = (sys.stdout, sys.stderr) + self.logging_queue = self.mp_context.Queue() + self.logging_thread = LogThread(self.logging_queue, self.logger, "info") + sys.stdout = LoggingWrapper(self.logging_queue, prefix="STDOUT") + sys.stderr = LoggingWrapper(self.logging_queue, prefix="STDERR") + self.logging_thread.start() + + def __exit__(self, *args, **kwargs): + if self.do_capture: + sys.stdout, sys.stderr = self.original_stdio + if self.logging_queue is not None: + self.logger.info("Closing logging queue") + self.logging_queue.put(None) + if self.logging_thread is not None: + self.logging_thread.join(10) + while not self.logging_queue.empty(): + try: + self.logger.warning( + "Dropping log message: %r", self.logging_queue.get() + ) + except Exception: + pass + self.logging_queue.close() + self.logger.info("queue closed") diff --git a/testing/mozbase/mozlog/mozlog/commandline.py b/testing/mozbase/mozlog/mozlog/commandline.py new file mode 100644 index 0000000000..51e9ea6929 --- /dev/null +++ b/testing/mozbase/mozlog/mozlog/commandline.py @@ -0,0 +1,344 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import argparse +import optparse +import os +import sys +from collections import defaultdict + +import six + +from . import formatters, handlers +from .structuredlog import StructuredLogger, set_default_logger + +log_formatters = { + "raw": ( + formatters.JSONFormatter, + "Raw structured log messages " "(provided by mozlog)", + ), + "unittest": ( + formatters.UnittestFormatter, + "Unittest style output " "(provided by mozlog)", + ), + "xunit": ( + formatters.XUnitFormatter, + "xUnit compatible XML " "(provided by mozlog)", + ), + "html": (formatters.HTMLFormatter, "HTML report " "(provided by mozlog)"), + "mach": (formatters.MachFormatter, "Human-readable output " "(provided by mozlog)"), + "tbpl": (formatters.TbplFormatter, "TBPL style log format " "(provided by mozlog)"), + "grouped": ( + formatters.GroupingFormatter, + "Grouped summary of test results " "(provided by mozlog)", + ), + "errorsummary": (formatters.ErrorSummaryFormatter, argparse.SUPPRESS), +} + +TEXT_FORMATTERS = ("raw", "mach") +"""a subset of formatters for non test harnesses related applications""" + + +DOCS_URL = "https://firefox-source-docs.mozilla.org/mozbase/mozlog.html" + + +def level_filter_wrapper(formatter, level): + return handlers.LogLevelFilter(formatter, level) + + +def verbose_wrapper(formatter, verbose): + formatter.verbose = verbose + return formatter + + +def compact_wrapper(formatter, compact): + formatter.compact = compact + return formatter + + +def buffer_handler_wrapper(handler, buffer_limit): + if buffer_limit == "UNLIMITED": + buffer_limit = None + else: + buffer_limit = int(buffer_limit) + return handlers.BufferHandler(handler, buffer_limit) + + +def screenshot_wrapper(formatter, enable_screenshot): + formatter.enable_screenshot = enable_screenshot + return formatter + + +def valgrind_handler_wrapper(handler): + return handlers.ValgrindHandler(handler) + + +def default_formatter_options(log_type, overrides): + formatter_option_defaults = {"raw": {"level": "debug"}} + rv = {"verbose": False, "level": "info"} + rv.update(formatter_option_defaults.get(log_type, {})) + + if overrides is not None: + rv.update(overrides) + + return rv + + +fmt_options = { + #